Skip to content

Commit 1debdfa

Browse files
committed
Merge remote-tracking branch 'upstream/master'
2 parents 0458770 + 090d691 commit 1debdfa

File tree

5 files changed

+32
-158
lines changed

5 files changed

+32
-158
lines changed

core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala

Lines changed: 25 additions & 63 deletions
Original file line numberDiff line numberDiff line change
@@ -505,38 +505,27 @@ class BlockManagerSuite extends SparkFunSuite with Matchers with BeforeAndAfterE
505505
}
506506

507507
test("in-memory LRU storage") {
508-
store = makeBlockManager(12000)
509-
val a1 = new Array[Byte](4000)
510-
val a2 = new Array[Byte](4000)
511-
val a3 = new Array[Byte](4000)
512-
store.putSingle("a1", a1, StorageLevel.MEMORY_ONLY)
513-
store.putSingle("a2", a2, StorageLevel.MEMORY_ONLY)
514-
store.putSingle("a3", a3, StorageLevel.MEMORY_ONLY)
515-
assert(store.getSingle("a2").isDefined, "a2 was not in store")
516-
assert(store.getSingle("a3").isDefined, "a3 was not in store")
517-
assert(store.getSingle("a1") === None, "a1 was in store")
518-
assert(store.getSingle("a2").isDefined, "a2 was not in store")
519-
// At this point a2 was gotten last, so LRU will getSingle rid of a3
520-
store.putSingle("a1", a1, StorageLevel.MEMORY_ONLY)
521-
assert(store.getSingle("a1").isDefined, "a1 was not in store")
522-
assert(store.getSingle("a2").isDefined, "a2 was not in store")
523-
assert(store.getSingle("a3") === None, "a3 was in store")
508+
testInMemoryLRUStorage(StorageLevel.MEMORY_ONLY)
524509
}
525510

526511
test("in-memory LRU storage with serialization") {
512+
testInMemoryLRUStorage(StorageLevel.MEMORY_ONLY_SER)
513+
}
514+
515+
private def testInMemoryLRUStorage(storageLevel: StorageLevel): Unit = {
527516
store = makeBlockManager(12000)
528517
val a1 = new Array[Byte](4000)
529518
val a2 = new Array[Byte](4000)
530519
val a3 = new Array[Byte](4000)
531-
store.putSingle("a1", a1, StorageLevel.MEMORY_ONLY_SER)
532-
store.putSingle("a2", a2, StorageLevel.MEMORY_ONLY_SER)
533-
store.putSingle("a3", a3, StorageLevel.MEMORY_ONLY_SER)
520+
store.putSingle("a1", a1, storageLevel)
521+
store.putSingle("a2", a2, storageLevel)
522+
store.putSingle("a3", a3, storageLevel)
534523
assert(store.getSingle("a2").isDefined, "a2 was not in store")
535524
assert(store.getSingle("a3").isDefined, "a3 was not in store")
536525
assert(store.getSingle("a1") === None, "a1 was in store")
537526
assert(store.getSingle("a2").isDefined, "a2 was not in store")
538527
// At this point a2 was gotten last, so LRU will getSingle rid of a3
539-
store.putSingle("a1", a1, StorageLevel.MEMORY_ONLY_SER)
528+
store.putSingle("a1", a1, storageLevel)
540529
assert(store.getSingle("a1").isDefined, "a1 was not in store")
541530
assert(store.getSingle("a2").isDefined, "a2 was not in store")
542531
assert(store.getSingle("a3") === None, "a3 was in store")
@@ -618,62 +607,35 @@ class BlockManagerSuite extends SparkFunSuite with Matchers with BeforeAndAfterE
618607
}
619608

620609
test("disk and memory storage") {
621-
store = makeBlockManager(12000)
622-
val a1 = new Array[Byte](4000)
623-
val a2 = new Array[Byte](4000)
624-
val a3 = new Array[Byte](4000)
625-
store.putSingle("a1", a1, StorageLevel.MEMORY_AND_DISK)
626-
store.putSingle("a2", a2, StorageLevel.MEMORY_AND_DISK)
627-
store.putSingle("a3", a3, StorageLevel.MEMORY_AND_DISK)
628-
assert(store.getSingle("a2").isDefined, "a2 was not in store")
629-
assert(store.getSingle("a3").isDefined, "a3 was not in store")
630-
assert(store.memoryStore.getValues("a1") == None, "a1 was in memory store")
631-
assert(store.getSingle("a1").isDefined, "a1 was not in store")
632-
assert(store.memoryStore.getValues("a1").isDefined, "a1 was not in memory store")
610+
testDiskAndMemoryStorage(StorageLevel.MEMORY_AND_DISK, _.getSingle)
633611
}
634612

635613
test("disk and memory storage with getLocalBytes") {
636-
store = makeBlockManager(12000)
637-
val a1 = new Array[Byte](4000)
638-
val a2 = new Array[Byte](4000)
639-
val a3 = new Array[Byte](4000)
640-
store.putSingle("a1", a1, StorageLevel.MEMORY_AND_DISK)
641-
store.putSingle("a2", a2, StorageLevel.MEMORY_AND_DISK)
642-
store.putSingle("a3", a3, StorageLevel.MEMORY_AND_DISK)
643-
assert(store.getLocalBytes("a2").isDefined, "a2 was not in store")
644-
assert(store.getLocalBytes("a3").isDefined, "a3 was not in store")
645-
assert(store.memoryStore.getValues("a1") == None, "a1 was in memory store")
646-
assert(store.getLocalBytes("a1").isDefined, "a1 was not in store")
647-
assert(store.memoryStore.getValues("a1").isDefined, "a1 was not in memory store")
614+
testDiskAndMemoryStorage(StorageLevel.MEMORY_AND_DISK, _.getLocalBytes)
648615
}
649616

650617
test("disk and memory storage with serialization") {
651-
store = makeBlockManager(12000)
652-
val a1 = new Array[Byte](4000)
653-
val a2 = new Array[Byte](4000)
654-
val a3 = new Array[Byte](4000)
655-
store.putSingle("a1", a1, StorageLevel.MEMORY_AND_DISK_SER)
656-
store.putSingle("a2", a2, StorageLevel.MEMORY_AND_DISK_SER)
657-
store.putSingle("a3", a3, StorageLevel.MEMORY_AND_DISK_SER)
658-
assert(store.getSingle("a2").isDefined, "a2 was not in store")
659-
assert(store.getSingle("a3").isDefined, "a3 was not in store")
660-
assert(store.memoryStore.getValues("a1") == None, "a1 was in memory store")
661-
assert(store.getSingle("a1").isDefined, "a1 was not in store")
662-
assert(store.memoryStore.getValues("a1").isDefined, "a1 was not in memory store")
618+
testDiskAndMemoryStorage(StorageLevel.MEMORY_AND_DISK_SER, _.getSingle)
663619
}
664620

665621
test("disk and memory storage with serialization and getLocalBytes") {
622+
testDiskAndMemoryStorage(StorageLevel.MEMORY_AND_DISK_SER, _.getLocalBytes)
623+
}
624+
625+
def testDiskAndMemoryStorage(
626+
storageLevel: StorageLevel,
627+
accessMethod: BlockManager => BlockId => Option[_]): Unit = {
666628
store = makeBlockManager(12000)
667629
val a1 = new Array[Byte](4000)
668630
val a2 = new Array[Byte](4000)
669631
val a3 = new Array[Byte](4000)
670-
store.putSingle("a1", a1, StorageLevel.MEMORY_AND_DISK_SER)
671-
store.putSingle("a2", a2, StorageLevel.MEMORY_AND_DISK_SER)
672-
store.putSingle("a3", a3, StorageLevel.MEMORY_AND_DISK_SER)
673-
assert(store.getLocalBytes("a2").isDefined, "a2 was not in store")
674-
assert(store.getLocalBytes("a3").isDefined, "a3 was not in store")
675-
assert(store.memoryStore.getValues("a1") == None, "a1 was in memory store")
676-
assert(store.getLocalBytes("a1").isDefined, "a1 was not in store")
632+
store.putSingle("a1", a1, storageLevel)
633+
store.putSingle("a2", a2, storageLevel)
634+
store.putSingle("a3", a3, storageLevel)
635+
assert(accessMethod(store)("a2").isDefined, "a2 was not in store")
636+
assert(accessMethod(store)("a3").isDefined, "a3 was not in store")
637+
assert(store.memoryStore.getValues("a1").isEmpty, "a1 was in memory store")
638+
assert(accessMethod(store)("a1").isDefined, "a1 was not in store")
677639
assert(store.memoryStore.getValues("a1").isDefined, "a1 was not in memory store")
678640
}
679641

external/mqtt/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@
5151
<dependency>
5252
<groupId>org.eclipse.paho</groupId>
5353
<artifactId>org.eclipse.paho.client.mqttv3</artifactId>
54-
<version>1.0.1</version>
54+
<version>1.0.2</version>
5555
</dependency>
5656
<dependency>
5757
<groupId>org.scalacheck</groupId>

pom.xml

Lines changed: 0 additions & 87 deletions
Original file line numberDiff line numberDiff line change
@@ -226,93 +226,6 @@
226226
<enabled>false</enabled>
227227
</snapshots>
228228
</repository>
229-
<repository>
230-
<id>apache-repo</id>
231-
<name>Apache Repository</name>
232-
<url>https://repository.apache.org/content/repositories/releases</url>
233-
<releases>
234-
<enabled>true</enabled>
235-
</releases>
236-
<snapshots>
237-
<enabled>false</enabled>
238-
</snapshots>
239-
</repository>
240-
<repository>
241-
<id>jboss-repo</id>
242-
<name>JBoss Repository</name>
243-
<url>https://repository.jboss.org/nexus/content/repositories/releases</url>
244-
<releases>
245-
<enabled>true</enabled>
246-
</releases>
247-
<snapshots>
248-
<enabled>false</enabled>
249-
</snapshots>
250-
</repository>
251-
<repository>
252-
<id>mqtt-repo</id>
253-
<name>MQTT Repository</name>
254-
<url>https://repo.eclipse.org/content/repositories/paho-releases</url>
255-
<releases>
256-
<enabled>true</enabled>
257-
</releases>
258-
<snapshots>
259-
<enabled>false</enabled>
260-
</snapshots>
261-
</repository>
262-
<repository>
263-
<id>cloudera-repo</id>
264-
<name>Cloudera Repository</name>
265-
<url>https://repository.cloudera.com/artifactory/cloudera-repos</url>
266-
<releases>
267-
<enabled>true</enabled>
268-
</releases>
269-
<snapshots>
270-
<enabled>false</enabled>
271-
</snapshots>
272-
</repository>
273-
<repository>
274-
<id>spark-hive-staging</id>
275-
<name>Staging Repo for Hive 1.2.1 (Spark Version)</name>
276-
<url>https://oss.sonatype.org/content/repositories/orgspark-project-1113</url>
277-
<releases>
278-
<enabled>true</enabled>
279-
</releases>
280-
</repository>
281-
<repository>
282-
<id>mapr-repo</id>
283-
<name>MapR Repository</name>
284-
<url>http://repository.mapr.com/maven/</url>
285-
<releases>
286-
<enabled>true</enabled>
287-
</releases>
288-
<snapshots>
289-
<enabled>false</enabled>
290-
</snapshots>
291-
</repository>
292-
<!-- returning unauthorized on some operations -->
293-
<repository>
294-
<id>spring-releases</id>
295-
<name>Spring Release Repository</name>
296-
<url>https://repo.spring.io/libs-release</url>
297-
<releases>
298-
<enabled>false</enabled>
299-
</releases>
300-
<snapshots>
301-
<enabled>false</enabled>
302-
</snapshots>
303-
</repository>
304-
<!-- For transitive dependencies brougt by parquet-thrift -->
305-
<repository>
306-
<id>twttr-repo</id>
307-
<name>Twttr Repository</name>
308-
<url>http://maven.twttr.com</url>
309-
<releases>
310-
<enabled>true</enabled>
311-
</releases>
312-
<snapshots>
313-
<enabled>false</enabled>
314-
</snapshots>
315-
</repository>
316229
</repositories>
317230
<pluginRepositories>
318231
<pluginRepository>

project/SparkBuild.scala

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,12 @@ object SparkBuild extends PomBuild {
141141
publishMavenStyle := true,
142142
unidocGenjavadocVersion := "0.9-spark0",
143143

144-
resolvers += Resolver.mavenLocal,
144+
// Override SBT's default resolvers:
145+
resolvers := Seq(
146+
DefaultMavenRepository,
147+
Resolver.mavenLocal
148+
),
149+
externalResolvers := resolvers.value,
145150
otherResolvers <<= SbtPomKeys.mvnLocalRepository(dotM2 => Seq(Resolver.file("dotM2", dotM2))),
146151
publishLocalConfiguration in MavenCompile <<= (packagedArtifacts, deliverLocal, ivyLoggingLevel) map {
147152
(arts, _, level) => new PublishConfiguration(None, "dotM2", arts, Seq(), level)

project/plugins.sbt

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,3 @@
1-
resolvers += Resolver.url("artifactory", url("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases"))(Resolver.ivyStylePatterns)
2-
3-
resolvers += "Typesafe Repository" at "http://repo.typesafe.com/typesafe/releases/"
4-
5-
resolvers += "sonatype-releases" at "https://oss.sonatype.org/content/repositories/releases/"
6-
71
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2")
82

93
addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.2.0")

0 commit comments

Comments
 (0)