Skip to content
Closed
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
Remove deprecated API in KafkaTestUtils
  • Loading branch information
HyukjinKwon committed Jul 30, 2016
commit bb9f635a556bc1ed2f9fd9593a5a3f2c6af62cf9
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,10 @@ import scala.util.control.NonFatal

import kafka.admin.AdminUtils
import kafka.api.Request
import kafka.producer.{KeyedMessage, Producer, ProducerConfig}
import kafka.serializer.StringEncoder
import kafka.server.{KafkaConfig, KafkaServer}
import kafka.utils.ZkUtils
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.zookeeper.server.{NIOServerCnxnFactory, ZooKeeperServer}

import org.apache.spark.SparkConf
Expand Down Expand Up @@ -68,7 +68,7 @@ private[kafka010] class KafkaTestUtils extends Logging {
private var server: KafkaServer = _

// Kafka producer
private var producer: Producer[String, String] = _
private var producer: KafkaProducer[String, String] = _

// Flag to test whether the system is correctly started
private var zkReady = false
Expand Down Expand Up @@ -178,8 +178,9 @@ private[kafka010] class KafkaTestUtils extends Logging {

/** Send the array of messages to the Kafka broker */
def sendMessages(topic: String, messages: Array[String]): Unit = {
producer = new Producer[String, String](new ProducerConfig(producerConfiguration))
producer.send(messages.map { new KeyedMessage[String, String](topic, _ ) }: _*)
producer = new KafkaProducer[String, String](producerConfiguration)
val records = messages.map { new ProducerRecord[String, String](topic, _) }
records.map(producer.send)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

super minor: could use foreach instead of map since we don't care about the return value.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh, thank you.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This type of thing is even essential for correctness in some cases, like https://issues.apache.org/jira/browse/SPARK-16664

producer.close()
producer = null
}
Expand All @@ -198,10 +199,12 @@ private[kafka010] class KafkaTestUtils extends Logging {

private def producerConfiguration: Properties = {
val props = new Properties()
props.put("metadata.broker.list", brokerAddress)
props.put("serializer.class", classOf[StringEncoder].getName)
props.put("bootstrap.servers", brokerAddress)
props.put("value.serializer", classOf[StringSerializer].getName)
// Key serializer is required.
props.put("key.serializer", classOf[StringSerializer].getName)
// wait for all in-sync replicas to ack sends
props.put("request.required.acks", "-1")
props.put("acks", "all")
props
}

Expand Down