Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Changed syntax of unit returning methods
  • Loading branch information
NirmalReddy committed Mar 19, 2014
commit 8c5ff3ea95723617fbb5aaddf2546d8c674869bf
7 changes: 4 additions & 3 deletions core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1110,8 +1110,8 @@ object SparkContext extends Logging {
}

// Helper objects for converting common types to Writable
private def simpleWritableConverter[T, W <: Writable: ClassTag](convert: W => T):
WritableConverter[T] = {
private def simpleWritableConverter[T, W <: Writable: ClassTag](convert: W => T)
: WritableConverter[T] = {
val wClass = classTag[W].runtimeClass.asInstanceOf[Class[W]]
new WritableConverter[T](_ => wClass, x => convert(x.asInstanceOf[W]))
}
Expand All @@ -1135,7 +1135,8 @@ object SparkContext extends Logging {
simpleWritableConverter[Array[Byte], BytesWritable](_.getBytes)
}

implicit def stringWritableConverter() = simpleWritableConverter[String, Text](_.toString)
implicit def stringWritableConverter(): WritableConverter[String] =
simpleWritableConverter[String, Text](_.toString)

implicit def writableWritableConverter[T <: Writable]() =
new WritableConverter[T](_.runtimeClass.asInstanceOf[Class[T]], _.asInstanceOf[T])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -391,22 +391,22 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
/**
* Save this RDD as a text file, using string representations of elements.
*/
def saveAsTextFile(path: String) {
def saveAsTextFile(path: String): Unit = {
rdd.saveAsTextFile(path)
}


/**
* Save this RDD as a compressed text file, using string representations of elements.
*/
def saveAsTextFile(path: String, codec: Class[_ <: CompressionCodec]) {
def saveAsTextFile(path: String, codec: Class[_ <: CompressionCodec]): Unit = {
rdd.saveAsTextFile(path, codec)
}

/**
* Save this RDD as a SequenceFile of serialized objects.
*/
def saveAsObjectFile(path: String) {
def saveAsObjectFile(path: String): Unit = {
rdd.saveAsObjectFile(path)
}

Expand All @@ -425,7 +425,7 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
* executed on this RDD. It is strongly recommended that this RDD is persisted in
* memory, otherwise saving it on a file will require recomputation.
*/
def checkpoint() {
def checkpoint(): Unit = {
rdd.checkpoint()
}

Expand Down
6 changes: 3 additions & 3 deletions core/src/main/scala/org/apache/spark/util/Distribution.scala
Original file line number Diff line number Diff line change
Expand Up @@ -42,16 +42,16 @@ class Distribution(val data: Array[Double], val startIdx: Int, val endIdx: Int)
* given from 0 to 1
* @param probabilities
*/
def getQuantiles(probabilities: Traversable[Double] = defaultProbabilities):
IndexedSeq[Double] = {
def getQuantiles(probabilities: Traversable[Double] = defaultProbabilities)
: IndexedSeq[Double] = {
probabilities.toIndexedSeq.map{p:Double => data(closestIndex(p))}
}

private def closestIndex(p: Double) = {
math.min((p * length).toInt + startIdx, endIdx - 1)
}

def showQuantiles(out: PrintStream = System.out) {
def showQuantiles(out: PrintStream = System.out): Unit = {
out.println("min\t25%\t50%\t75%\tmax")
getQuantiles(defaultProbabilities).foreach{q => out.print(q + "\t")}
out.println
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import org.apache.spark.graphx.PartitionStrategy._
*/
object Analytics extends Logging {

def main(args: Array[String]) {
def main(args: Array[String]): Unit = {
val host = args(0)
val taskType = args(1)
val fname = args(2)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -431,13 +431,11 @@ class StreamingContext private[streaming] (
* Stop the execution of the streams.
* @param stopSparkContext Stop the associated SparkContext or not
*/
def stop(stopSparkContext: Boolean = true) {
synchronized {
scheduler.stop()
logInfo("StreamingContext stopped successfully")
waiter.notifyStop()
if (stopSparkContext) sc.stop()
}
def stop(stopSparkContext: Boolean = true): Unit = synchronized {
scheduler.stop()
logInfo("StreamingContext stopped successfully")
waiter.notifyStop()
if (stopSparkContext) sc.stop()
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ trait JavaDStreamLike[T, This <: JavaDStreamLike[T, This, R], R <: JavaRDDLike[T
* Print the first ten elements of each RDD generated in this DStream. This is an output
* operator, so this DStream will be registered as an output stream and there materialized.
*/
def print() {
def print(): Unit = {
dstream.print()
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -477,15 +477,15 @@ class JavaStreamingContext(val ssc: StreamingContext) {
/**
* Start the execution of the streams.
*/
def start() {
def start(): Unit = {
ssc.start()
}

/**
* Wait for the execution to stop. Any exceptions that occurs during the execution
* will be thrown in this thread.
*/
def awaitTermination() {
def awaitTermination(): Unit = {
ssc.awaitTermination()
}

Expand All @@ -494,22 +494,22 @@ class JavaStreamingContext(val ssc: StreamingContext) {
* will be thrown in this thread.
* @param timeout time to wait in milliseconds
*/
def awaitTermination(timeout: Long) {
def awaitTermination(timeout: Long): Unit = {
ssc.awaitTermination(timeout)
}

/**
* Stop the execution of the streams. Will stop the associated JavaSparkContext as well.
*/
def stop() {
def stop(): Unit = {
ssc.stop()
}

/**
* Stop the execution of the streams.
* @param stopSparkContext Stop the associated SparkContext or not
*/
def stop(stopSparkContext: Boolean) {
def stop(stopSparkContext: Boolean): Unit = {
ssc.stop(stopSparkContext)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -503,7 +503,7 @@ abstract class DStream[T: ClassTag] (
* 'this' DStream will be registered as an output stream and therefore materialized.
*/
@deprecated("use foreachRDD", "0.9.0")
def foreach(foreachFunc: RDD[T] => Unit) {
def foreach(foreachFunc: RDD[T] => Unit): Unit = {
this.foreachRDD(foreachFunc)
}

Expand All @@ -512,7 +512,7 @@ abstract class DStream[T: ClassTag] (
* 'this' DStream will be registered as an output stream and therefore materialized.
*/
@deprecated("use foreachRDD", "0.9.0")
def foreach(foreachFunc: (RDD[T], Time) => Unit) {
def foreach(foreachFunc: (RDD[T], Time) => Unit): Unit = {
this.foreachRDD(foreachFunc)
}

Expand Down