Skip to content

Commit 8eb26b6

Browse files
committed
Be explicit with tuples for %s formatting
Fix #1633
1 parent 7bd6b5d commit 8eb26b6

26 files changed

+45
-45
lines changed

kafka/admin/kafka.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ def __init__(self, **configs):
166166
log.debug("Starting Kafka administration interface")
167167
extra_configs = set(configs).difference(self.DEFAULT_CONFIG)
168168
if extra_configs:
169-
raise KafkaConfigurationError("Unrecognized configs: %s" % extra_configs)
169+
raise KafkaConfigurationError("Unrecognized configs: %s" % (extra_configs,))
170170

171171
self.config = copy.copy(self.DEFAULT_CONFIG)
172172
self.config.update(configs)

kafka/client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -174,7 +174,7 @@ def _send_broker_unaware_request(self, payloads, encoder_fn, decoder_fn):
174174

175175
return decoder_fn(future.value)
176176

177-
raise KafkaUnavailableError('All servers failed to process request: %s' % hosts)
177+
raise KafkaUnavailableError('All servers failed to process request: %s' % (hosts,))
178178

179179
def _payloads_by_broker(self, payloads):
180180
payloads_by_broker = collections.defaultdict(list)

kafka/client_async.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -355,7 +355,7 @@ def _maybe_connect(self, node_id):
355355
conn = self._conns.get(node_id)
356356

357357
if conn is None:
358-
assert broker, 'Broker id %s not in current metadata' % node_id
358+
assert broker, 'Broker id %s not in current metadata' % (node_id,)
359359

360360
log.debug("Initiating connection to node %s at %s:%s",
361361
node_id, broker.host, broker.port)

kafka/consumer/fetcher.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -298,7 +298,7 @@ def _retrieve_offsets(self, timestamps, timeout_ms=float("inf")):
298298
remaining_ms = timeout_ms - elapsed_ms
299299

300300
raise Errors.KafkaTimeoutError(
301-
"Failed to get offsets by timestamps in %s ms" % timeout_ms)
301+
"Failed to get offsets by timestamps in %s ms" % (timeout_ms,))
302302

303303
def fetched_records(self, max_records=None):
304304
"""Returns previously fetched records and updates consumed offsets.
@@ -911,7 +911,7 @@ def record(self, partition, num_bytes, num_records):
911911
class FetchManagerMetrics(object):
912912
def __init__(self, metrics, prefix):
913913
self.metrics = metrics
914-
self.group_name = '%s-fetch-manager-metrics' % prefix
914+
self.group_name = '%s-fetch-manager-metrics' % (prefix,)
915915

916916
self.bytes_fetched = metrics.sensor('bytes-fetched')
917917
self.bytes_fetched.add(metrics.metric_name('fetch-size-avg', self.group_name,
@@ -955,15 +955,15 @@ def record_topic_fetch_metrics(self, topic, num_bytes, num_records):
955955
bytes_fetched = self.metrics.sensor(name)
956956
bytes_fetched.add(self.metrics.metric_name('fetch-size-avg',
957957
self.group_name,
958-
'The average number of bytes fetched per request for topic %s' % topic,
958+
'The average number of bytes fetched per request for topic %s' % (topic,),
959959
metric_tags), Avg())
960960
bytes_fetched.add(self.metrics.metric_name('fetch-size-max',
961961
self.group_name,
962-
'The maximum number of bytes fetched per request for topic %s' % topic,
962+
'The maximum number of bytes fetched per request for topic %s' % (topic,),
963963
metric_tags), Max())
964964
bytes_fetched.add(self.metrics.metric_name('bytes-consumed-rate',
965965
self.group_name,
966-
'The average number of bytes consumed per second for topic %s' % topic,
966+
'The average number of bytes consumed per second for topic %s' % (topic,),
967967
metric_tags), Rate())
968968
bytes_fetched.record(num_bytes)
969969

@@ -976,10 +976,10 @@ def record_topic_fetch_metrics(self, topic, num_bytes, num_records):
976976
records_fetched = self.metrics.sensor(name)
977977
records_fetched.add(self.metrics.metric_name('records-per-request-avg',
978978
self.group_name,
979-
'The average number of records in each request for topic %s' % topic,
979+
'The average number of records in each request for topic %s' % (topic,),
980980
metric_tags), Avg())
981981
records_fetched.add(self.metrics.metric_name('records-consumed-rate',
982982
self.group_name,
983-
'The average number of records consumed per second for topic %s' % topic,
983+
'The average number of records consumed per second for topic %s' % (topic,),
984984
metric_tags), Rate())
985985
records_fetched.record(num_records)

kafka/consumer/group.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -309,7 +309,7 @@ def __init__(self, *topics, **configs):
309309
# Only check for extra config keys in top-level class
310310
extra_configs = set(configs).difference(self.DEFAULT_CONFIG)
311311
if extra_configs:
312-
raise KafkaConfigurationError("Unrecognized configs: %s" % extra_configs)
312+
raise KafkaConfigurationError("Unrecognized configs: %s" % (extra_configs,))
313313

314314
self.config = copy.copy(self.DEFAULT_CONFIG)
315315
self.config.update(configs)

kafka/consumer/simple.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -247,7 +247,7 @@ def seek(self, offset, whence=None, partition=None):
247247
self.offsets[resp.partition] = \
248248
resp.offsets[0] + deltas[resp.partition]
249249
else:
250-
raise ValueError('Unexpected value for `whence`, %d' % whence)
250+
raise ValueError('Unexpected value for `whence`, %d' % (whence,))
251251

252252
# Reset queue and fetch offsets since they are invalid
253253
self.fetch_offsets = self.offsets.copy()

kafka/consumer/subscription_state.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -247,7 +247,7 @@ def assign_from_subscribed(self, assignments):
247247

248248
for tp in assignments:
249249
if tp.topic not in self.subscription:
250-
raise ValueError("Assigned partition %s for non-subscribed topic." % str(tp))
250+
raise ValueError("Assigned partition %s for non-subscribed topic." % (tp,))
251251

252252
# after rebalancing, we always reinitialize the assignment state
253253
self.assignment.clear()

kafka/coordinator/consumer.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,7 @@ def _on_join_complete(self, generation, member_id, protocol,
216216
self._assignment_snapshot = None
217217

218218
assignor = self._lookup_assignor(protocol)
219-
assert assignor, 'Coordinator selected invalid assignment protocol: %s' % protocol
219+
assert assignor, 'Coordinator selected invalid assignment protocol: %s' % (protocol,)
220220

221221
assignment = ConsumerProtocol.ASSIGNMENT.decode(member_assignment_bytes)
222222

@@ -297,7 +297,7 @@ def time_to_next_poll(self):
297297

298298
def _perform_assignment(self, leader_id, assignment_strategy, members):
299299
assignor = self._lookup_assignor(assignment_strategy)
300-
assert assignor, 'Invalid assignment protocol: %s' % assignment_strategy
300+
assert assignor, 'Invalid assignment protocol: %s' % (assignment_strategy,)
301301
member_metadata = {}
302302
all_subscribed_topics = set()
303303
for member_id, metadata_bytes in members:
@@ -804,7 +804,7 @@ def _maybe_auto_commit_offsets_async(self):
804804
class ConsumerCoordinatorMetrics(object):
805805
def __init__(self, metrics, metric_group_prefix, subscription):
806806
self.metrics = metrics
807-
self.metric_group_name = '%s-coordinator-metrics' % metric_group_prefix
807+
self.metric_group_name = '%s-coordinator-metrics' % (metric_group_prefix,)
808808

809809
self.commit_latency = metrics.sensor('commit-latency')
810810
self.commit_latency.add(metrics.metric_name(

kafka/metrics/metrics.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -225,7 +225,7 @@ def register_metric(self, metric):
225225
with self._lock:
226226
if metric.metric_name in self.metrics:
227227
raise ValueError('A metric named "%s" already exists, cannot'
228-
' register another one.' % metric.metric_name)
228+
' register another one.' % (metric.metric_name,))
229229
self.metrics[metric.metric_name] = metric
230230
for reporter in self._reporters:
231231
reporter.metric_change(metric)

kafka/metrics/stats/percentiles.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ def __init__(self, size_in_bytes, bucketing, max_val, min_val=0.0,
2727
' to be 0.0.')
2828
self.bin_scheme = Histogram.LinearBinScheme(self._buckets, max_val)
2929
else:
30-
ValueError('Unknown bucket type: %s' % bucketing)
30+
ValueError('Unknown bucket type: %s' % (bucketing,))
3131

3232
def stats(self):
3333
measurables = []

0 commit comments

Comments
 (0)