Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
rename SparkHadoopWriterConfig to HadoopWriteConfigUtil
  • Loading branch information
jiangxb1987 committed Nov 19, 2016
commit bedcd10fe74192fd71088c8739c786d750639af6
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ import org.apache.spark.util.{SerializableConfiguration, SerializableJobConf, Ut
* 3. Implementations should have a constructor with exactly one argument:
* (conf: SerializableConfiguration) or (conf: SerializableJobConf).
*/
abstract class SparkHadoopWriterConfig[K, V: ClassTag] extends Serializable {
abstract class HadoopWriteConfigUtil[K, V: ClassTag] extends Serializable {

// --------------------------------------------------------------------------
// Create JobContext/TaskAttemptContext
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ object SparkHadoopWriter extends Logging {
*/
def write[K, V: ClassTag](
rdd: RDD[(K, V)],
config: SparkHadoopWriterConfig[K, V]): Unit = {
config: HadoopWriteConfigUtil[K, V]): Unit = {
// Extract context and configuration from RDD.
val sparkContext = rdd.context
val stageId = rdd.id
Expand Down Expand Up @@ -119,7 +119,7 @@ object SparkHadoopWriter extends Logging {
/** Write a RDD partition out in a single Spark task. */
private def executeTask[K, V: ClassTag](
context: TaskContext,
config: SparkHadoopWriterConfig[K, V],
config: HadoopWriteConfigUtil[K, V],
jobTrackerId: String,
sparkStageId: Int,
sparkPartitionId: Int,
Expand Down Expand Up @@ -175,8 +175,8 @@ object SparkHadoopWriter extends Logging {
* A helper class that reads JobConf from older mapred API, creates output Format/Committer/Writer.
*/
private[spark]
class SparkHadoopMapRedWriterConfig[K, V: ClassTag](conf: SerializableJobConf)
extends SparkHadoopWriterConfig[K, V] with Logging {
class HadoopMapRedWriteConfigUtil[K, V: ClassTag](conf: SerializableJobConf)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Comments to this class also apply to companion class HadoopMapReduceWriteConfigUtil

extends HadoopWriteConfigUtil[K, V] with Logging {

private var outputFormat: Class[_ <: OutputFormat[K, V]] = null
private var writer: RecordWriter[K, V] = null
Expand Down Expand Up @@ -308,8 +308,8 @@ class SparkHadoopMapRedWriterConfig[K, V: ClassTag](conf: SerializableJobConf)
* Format/Committer/Writer.
*/
private[spark]
class SparkHadoopMapReduceWriterConfig[K, V: ClassTag](conf: SerializableConfiguration)
extends SparkHadoopWriterConfig[K, V] with Logging {
class HadoopMapReduceWriteConfigUtil[K, V: ClassTag](conf: SerializableConfiguration)
extends HadoopWriteConfigUtil[K, V] with Logging {

private var outputFormat: Class[_ <: NewOutputFormat[K, V]] = null
private var writer: NewRecordWriter[K, V] = null
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1051,7 +1051,7 @@ class PairRDDFunctions[K, V](self: RDD[(K, V)])
* configured for a Hadoop MapReduce job.
*/
def saveAsNewAPIHadoopDataset(conf: Configuration): Unit = self.withScope {
val config = new SparkHadoopMapReduceWriterConfig[K, V](new SerializableConfiguration(conf))
val config = new HadoopMapReduceWriteConfigUtil[K, V](new SerializableConfiguration(conf))
SparkHadoopWriter.write(
rdd = self,
config = config)
Expand All @@ -1064,7 +1064,7 @@ class PairRDDFunctions[K, V](self: RDD[(K, V)])
* MapReduce job.
*/
def saveAsHadoopDataset(conf: JobConf): Unit = self.withScope {
val config = new SparkHadoopMapRedWriterConfig[K, V](new SerializableJobConf(conf))
val config = new HadoopMapRedWriteConfigUtil[K, V](new SerializableJobConf(conf))
SparkHadoopWriter.write(
rdd = self,
config = config)
Expand Down