Skip to content

Commit 863511a

Browse files
jeffwidman88manpreet
authored andcommitted
Stop using mutable types for default arg values
Using mutable types for default args is typically a no-no unless their surprising behavior is being explicitly abused, for an explanation see: http://effbot.org/zone/default-values.htm Fix dpkp#1212
1 parent b4c3cca commit 863511a

File tree

5 files changed

+15
-15
lines changed

5 files changed

+15
-15
lines changed

kafka/client.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -637,23 +637,23 @@ def load_metadata_for_topics(self, *topics, **kwargs):
637637
)
638638

639639
@time_metric('metadata')
640-
def send_metadata_request(self, payloads=[], fail_on_error=True,
640+
def send_metadata_request(self, payloads=(), fail_on_error=True,
641641
callback=None):
642642
encoder = KafkaProtocol.encode_metadata_request
643643
decoder = KafkaProtocol.decode_metadata_response
644644

645645
return self._send_broker_unaware_request(payloads, encoder, decoder)
646646

647647
@time_metric('consumer_metadata')
648-
def send_consumer_metadata_request(self, payloads=[], fail_on_error=True,
648+
def send_consumer_metadata_request(self, payloads=(), fail_on_error=True,
649649
callback=None):
650650
encoder = KafkaProtocol.encode_consumer_metadata_request
651651
decoder = KafkaProtocol.decode_consumer_metadata_response
652652

653653
return self._send_broker_unaware_request(payloads, encoder, decoder)
654654

655655
@time_metric('produce')
656-
def send_produce_request(self, payloads=[], acks=1, timeout=1000,
656+
def send_produce_request(self, payloads=(), acks=1, timeout=1000,
657657
fail_on_error=True, callback=None):
658658
"""
659659
Encode and send some ProduceRequests
@@ -704,7 +704,7 @@ def send_produce_request(self, payloads=[], acks=1, timeout=1000,
704704
(not fail_on_error or not self._raise_on_response_error(resp))]
705705

706706
@time_metric('fetch')
707-
def send_fetch_request(self, payloads=[], fail_on_error=True,
707+
def send_fetch_request(self, payloads=(), fail_on_error=True,
708708
callback=None, max_wait_time=100, min_bytes=4096):
709709
"""
710710
Encode and send a FetchRequest
@@ -725,7 +725,7 @@ def send_fetch_request(self, payloads=[], fail_on_error=True,
725725
if not fail_on_error or not self._raise_on_response_error(resp)]
726726

727727
@time_metric('offset')
728-
def send_offset_request(self, payloads=[], fail_on_error=True,
728+
def send_offset_request(self, payloads=(), fail_on_error=True,
729729
callback=None):
730730
resps = self._send_broker_aware_request(
731731
payloads,
@@ -736,8 +736,8 @@ def send_offset_request(self, payloads=[], fail_on_error=True,
736736
if not fail_on_error or not self._raise_on_response_error(resp)]
737737

738738
@time_metric('offset_list')
739-
def send_list_offset_request(self, payloads=[], fail_on_error=True,
740-
callback=None):
739+
def send_list_offset_request(self, payloads=(), fail_on_error=True,
740+
callback=None):
741741
resps = self._send_broker_aware_request(
742742
payloads,
743743
KafkaProtocol.encode_list_offset_request,
@@ -747,7 +747,7 @@ def send_list_offset_request(self, payloads=[], fail_on_error=True,
747747
if not fail_on_error or not self._raise_on_response_error(resp)]
748748

749749
@time_metric('offset_commit')
750-
def send_offset_commit_request(self, group, payloads=[],
750+
def send_offset_commit_request(self, group, payloads=(),
751751
fail_on_error=True, callback=None):
752752
encoder = functools.partial(
753753
KafkaProtocol.encode_offset_commit_request,
@@ -760,7 +760,7 @@ def send_offset_commit_request(self, group, payloads=[],
760760
if not fail_on_error or not self._raise_on_response_error(resp)]
761761

762762
@time_metric('offset_commit_kafka')
763-
def send_offset_commit_request_kafka(self, group, payloads=[],
763+
def send_offset_commit_request_kafka(self, group, payloads=(),
764764
fail_on_error=True, callback=None):
765765
encoder = functools.partial(
766766
KafkaProtocol.encode_offset_commit_request_kafka,
@@ -773,7 +773,7 @@ def send_offset_commit_request_kafka(self, group, payloads=[],
773773
if not fail_on_error or not self._raise_on_response_error(resp)]
774774

775775
@time_metric('offset_fetch')
776-
def send_offset_fetch_request(self, group, payloads=[],
776+
def send_offset_fetch_request(self, group, payloads=(),
777777
fail_on_error=True, callback=None):
778778

779779
encoder = functools.partial(KafkaProtocol.encode_offset_fetch_request,
@@ -785,7 +785,7 @@ def send_offset_fetch_request(self, group, payloads=[],
785785
if not fail_on_error or not self._raise_on_response_error(resp)]
786786

787787
@time_metric('offset_fetch_kafka')
788-
def send_offset_fetch_request_kafka(self, group, payloads=[],
788+
def send_offset_fetch_request_kafka(self, group, payloads=(),
789789
fail_on_error=True, callback=None):
790790

791791
encoder = functools.partial(KafkaProtocol.encode_offset_fetch_request,

test/test_client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -404,7 +404,7 @@ def test_send_produce_request_raises_when_topic_unknown(self, protocol, conn):
404404
def test_correlation_rollover(self):
405405
with patch.object(SimpleClient, 'load_metadata_for_topics'):
406406
big_num = 2**31 - 3
407-
client = SimpleClient(hosts=[], correlation_id=big_num)
407+
client = SimpleClient(hosts=(), correlation_id=big_num)
408408
self.assertEqual(big_num + 1, client._next_id())
409409
self.assertEqual(big_num + 2, client._next_id())
410410
self.assertEqual(0, client._next_id())

test/test_conn.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ def test_send_max_ifr(conn):
112112
def test_send_no_response(_socket, conn):
113113
conn.connect()
114114
assert conn.state is ConnectionStates.CONNECTED
115-
req = ProduceRequest[0](required_acks=0, timeout=0, topics=[])
115+
req = ProduceRequest[0](required_acks=0, timeout=0, topics=())
116116
header = RequestHeader(req, client_id=conn.config['client_id'])
117117
payload_bytes = len(header.encode()) + len(req.encode())
118118
third = payload_bytes // 3

test/test_fetcher.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222

2323
@pytest.fixture
2424
def client(mocker):
25-
return mocker.Mock(spec=KafkaClient(bootstrap_servers=[], api_version=(0, 9)))
25+
return mocker.Mock(spec=KafkaClient(bootstrap_servers=(), api_version=(0, 9)))
2626

2727

2828
@pytest.fixture

test/test_sender.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919

2020
@pytest.fixture
2121
def client(mocker):
22-
_cli = mocker.Mock(spec=KafkaClient(bootstrap_servers=[], api_version=(0, 9)))
22+
_cli = mocker.Mock(spec=KafkaClient(bootstrap_servers=(), api_version=(0, 9)))
2323
_cli.cluster = mocker.Mock(spec=ClusterMetadata())
2424
return _cli
2525

0 commit comments

Comments
 (0)