3333 with_progress = False
3434
3535# Kafka bootstrap server(s)
36- bootstrap_servers = 'localhost'
36+ bootstrap_servers = None
3737
38+ # Topic to use
39+ topic = 'test'
40+
41+ # API version requests are only implemented in Kafka broker >=0.10
42+ # but the client handles failed API version requests gracefully for older
43+ # versions as well, except for 0.9.0.x which will stall for about 10s
44+ # on each connect with this set to True.
45+ api_version_request = True
3846
3947# global variable to be set by stats_cb call back function
4048good_stats_cb_result = False
@@ -83,29 +91,30 @@ def verify_producer():
8391 # Producer config
8492 conf = {'bootstrap.servers' : bootstrap_servers ,
8593 'error_cb' : error_cb ,
94+ 'api.version.request' : api_version_request ,
8695 'default.topic.config' :{'produce.offset.report' : True }}
8796
8897 # Create producer
8998 p = confluent_kafka .Producer (** conf )
9099 print ('producer at %s' % p )
91100
92101 # Produce some messages
93- p .produce ('test' , 'Hello Python!' )
94- p .produce ('test' , key = 'Just a key' )
95- p .produce ('test' , partition = 1 , value = 'Strictly for partition 1' ,
102+ p .produce (topic , 'Hello Python!' )
103+ p .produce (topic , key = 'Just a key' )
104+ p .produce (topic , partition = 1 , value = 'Strictly for partition 1' ,
96105 key = 'mykey' )
97106
98107 # Produce more messages, now with delivery report callbacks in various forms.
99108 mydr = MyTestDr ()
100- p .produce ('test' , value = 'This one has a dr callback' ,
109+ p .produce (topic , value = 'This one has a dr callback' ,
101110 callback = mydr .delivery )
102- p .produce ('test' , value = 'This one has a lambda' ,
111+ p .produce (topic , value = 'This one has a lambda' ,
103112 callback = lambda err , msg : MyTestDr ._delivery (err , msg ))
104- p .produce ('test' , value = 'This one has neither' )
113+ p .produce (topic , value = 'This one has neither' )
105114
106115 # Produce even more messages
107116 for i in range (0 , 10 ):
108- p .produce ('test' , value = 'Message #%d' % i , key = str (i ),
117+ p .produce (topic , value = 'Message #%d' % i , key = str (i ),
109118 callback = mydr .delivery )
110119 p .poll (0 )
111120
@@ -119,11 +128,11 @@ def verify_producer():
119128def verify_producer_performance (with_dr_cb = True ):
120129 """ Time how long it takes to produce and delivery X messages """
121130 conf = {'bootstrap.servers' : bootstrap_servers ,
131+ 'api.version.request' : api_version_request ,
122132 'error_cb' : error_cb }
123133
124134 p = confluent_kafka .Producer (** conf )
125135
126- topic = 'test'
127136 msgcnt = 1000000
128137 msgsize = 100
129138 msg_pattern = 'test.py performance'
@@ -144,9 +153,9 @@ def verify_producer_performance(with_dr_cb=True):
144153 for i in range (0 , msgcnt ):
145154 try :
146155 if with_dr_cb :
147- p .produce ('test' , value = msg_payload , callback = dr .delivery )
156+ p .produce (topic , value = msg_payload , callback = dr .delivery )
148157 else :
149- p .produce ('test' , value = msg_payload )
158+ p .produce (topic , value = msg_payload )
150159 except BufferError as e :
151160 # Local queue is full (slow broker connection?)
152161 msgs_backpressure += 1
@@ -213,6 +222,7 @@ def verify_consumer():
213222 'group.id' : 'test.py' ,
214223 'session.timeout.ms' : 6000 ,
215224 'enable.auto.commit' : False ,
225+ 'api.version.request' : api_version_request ,
216226 'on_commit' : print_commit_result ,
217227 'error_cb' : error_cb ,
218228 'default.topic.config' : {
@@ -223,7 +233,7 @@ def verify_consumer():
223233 c = confluent_kafka .Consumer (** conf )
224234
225235 # Subscribe to a list of topics
226- c .subscribe (["test" ])
236+ c .subscribe ([topic ])
227237
228238 max_msgcnt = 100
229239 msgcnt = 0
@@ -245,10 +255,10 @@ def verify_consumer():
245255 print ('Consumer error: %s: ignoring' % msg .error ())
246256 break
247257
248- if False :
249- print ('%s[%d]@%d: key=%s, value=%s' % \
250- (msg .topic (), msg .partition (), msg .offset (),
251- msg .key (), msg .value ()))
258+ tstype , timestamp = msg . timestamp ()
259+ print ('%s[%d]@%d: key=%s, value=%s, tstype=%d, timestamp =%s' % \
260+ (msg .topic (), msg .partition (), msg .offset (),
261+ msg .key (), msg .value (), tstype , timestamp ))
252262
253263 if (msg .offset () % 5 ) == 0 :
254264 # Async commit
@@ -268,7 +278,7 @@ def verify_consumer():
268278
269279 # Start a new client and get the committed offsets
270280 c = confluent_kafka .Consumer (** conf )
271- offsets = c .committed (list (map (lambda p : confluent_kafka .TopicPartition ("test" , p ), range (0 ,3 ))))
281+ offsets = c .committed (list (map (lambda p : confluent_kafka .TopicPartition (topic , p ), range (0 ,3 ))))
272282 for tp in offsets :
273283 print (tp )
274284
@@ -302,7 +312,7 @@ def my_on_revoke (consumer, partitions):
302312 print (' %s [%d] @ %d' % (p .topic , p .partition , p .offset ))
303313 consumer .unassign ()
304314
305- c .subscribe (["test" ], on_assign = my_on_assign , on_revoke = my_on_revoke )
315+ c .subscribe ([topic ], on_assign = my_on_assign , on_revoke = my_on_revoke )
306316
307317 max_msgcnt = 1000000
308318 bytecnt = 0
@@ -362,10 +372,11 @@ def verify_stats_cb():
362372 def stats_cb (stats_json_str ):
363373 global good_stats_cb_result
364374 stats_json = json .loads (stats_json_str )
365- if 'test' in stats_json ['topics' ]:
366- app_offset = stats_json ['topics' ]['test' ]['partitions' ]['0' ]['app_offset' ]
375+ if topic in stats_json ['topics' ]:
376+ app_offset = stats_json ['topics' ][topic ]['partitions' ]['0' ]['app_offset' ]
367377 if app_offset > 0 :
368- print ("# app_offset stats for topic test partition 0: %d" % app_offset )
378+ print ("# app_offset stats for topic %s partition 0: %d" % \
379+ (topic , app_offset ))
369380 good_stats_cb_result = True
370381
371382 conf = {'bootstrap.servers' : bootstrap_servers ,
@@ -379,7 +390,7 @@ def stats_cb(stats_json_str):
379390 }}
380391
381392 c = confluent_kafka .Consumer (** conf )
382- c .subscribe (["test" ])
393+ c .subscribe ([topic ])
383394
384395 max_msgcnt = 1000000
385396 bytecnt = 0
@@ -437,6 +448,11 @@ def stats_cb(stats_json_str):
437448
438449 if len (sys .argv ) > 1 :
439450 bootstrap_servers = sys .argv [1 ]
451+ if len (sys .argv ) > 2 :
452+ topic = sys .argv [2 ]
453+ else :
454+ print ('Usage: %s <broker> [<topic>]' % sys .argv [0 ])
455+ sys .exit (1 )
440456
441457 print ('Using confluent_kafka module version %s (0x%x)' % confluent_kafka .version ())
442458 print ('Using librdkafka version %s (0x%x)' % confluent_kafka .libversion ())
0 commit comments