Skip to content

Commit fa4acfb

Browse files
committed
use absolute imports everywhere
1 parent 4dc0899 commit fa4acfb

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

44 files changed

+148
-145
lines changed

.gitignore

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,8 @@ build
55
dist
66
MANIFEST
77
env
8+
venv
9+
.ropeproject/
810
servers/*/kafka-bin*
911
servers/*/resources/ssl*
1012
.coverage*
@@ -13,4 +15,5 @@ docs/_build
1315
.cache*
1416
.idea/
1517
integration-test/
16-
tests-env/
18+
tests-env/
19+
.pytest_cache/

kafka/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from __future__ import absolute_import
22

33
__title__ = 'kafka'
4-
from .version import __version__
4+
from kafka.version import __version__
55
__author__ = 'Dana Powers'
66
__license__ = 'Apache License 2.0'
77
__copyright__ = 'Copyright 2016 Dana Powers, David Arthur, and Contributors'

kafka/client_async.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -13,26 +13,26 @@
1313
import selectors # pylint: disable=import-error
1414
except ImportError:
1515
# vendored backport module
16-
from .vendor import selectors34 as selectors
16+
from kafka.vendor import selectors34 as selectors
1717

1818
import socket
1919
import time
2020

2121
from kafka.vendor import six
2222

23-
from .cluster import ClusterMetadata
24-
from .conn import BrokerConnection, ConnectionStates, collect_hosts, get_ip_port_afi
25-
from . import errors as Errors
26-
from .future import Future
27-
from .metrics import AnonMeasurable
28-
from .metrics.stats import Avg, Count, Rate
29-
from .metrics.stats.rate import TimeUnit
30-
from .protocol.metadata import MetadataRequest
31-
from .util import Dict, WeakMethod
23+
from kafka.cluster import ClusterMetadata
24+
from kafka.conn import BrokerConnection, ConnectionStates, collect_hosts, get_ip_port_afi
25+
from kafka import errors as Errors
26+
from kafka.future import Future
27+
from kafka.metrics import AnonMeasurable
28+
from kafka.metrics.stats import Avg, Count, Rate
29+
from kafka.metrics.stats.rate import TimeUnit
30+
from kafka.protocol.metadata import MetadataRequest
31+
from kafka.util import Dict, WeakMethod
3232
# Although this looks unused, it actually monkey-patches socket.socketpair()
3333
# and should be left in as long as we're using socket.socketpair() in this file
34-
from .vendor import socketpair
35-
from .version import __version__
34+
from kafka.vendor import socketpair
35+
from kafka.version import __version__
3636

3737
if six.PY2:
3838
ConnectionError = None

kafka/cluster.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,9 @@
88

99
from kafka.vendor import six
1010

11-
from . import errors as Errors
12-
from .future import Future
13-
from .structs import BrokerMetadata, PartitionMetadata, TopicPartition
11+
from kafka import errors as Errors
12+
from kafka.future import Future
13+
from kafka.structs import BrokerMetadata, PartitionMetadata, TopicPartition
1414

1515
log = logging.getLogger(__name__)
1616

kafka/conn.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
import selectors # pylint: disable=import-error
1313
except ImportError:
1414
# vendored backport module
15-
from .vendor import selectors34 as selectors
15+
from kafka.vendor import selectors34 as selectors
1616

1717
import socket
1818
import struct
@@ -857,8 +857,8 @@ def check_version(self, timeout=2, strict=False):
857857
# vanilla MetadataRequest. If the server did not recognize the first
858858
# request, both will be failed with a ConnectionError that wraps
859859
# socket.error (32, 54, or 104)
860-
from .protocol.admin import ApiVersionRequest, ListGroupsRequest
861-
from .protocol.commit import OffsetFetchRequest, GroupCoordinatorRequest
860+
from kafka.protocol.admin import ApiVersionRequest, ListGroupsRequest
861+
from kafka.protocol.commit import OffsetFetchRequest, GroupCoordinatorRequest
862862

863863
# Socket errors are logged as exceptions and can alarm users. Mute them
864864
from logging import Filter

kafka/consumer/__init__.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
from __future__ import absolute_import
22

3-
from .simple import SimpleConsumer
4-
from .multiprocess import MultiProcessConsumer
5-
from .group import KafkaConsumer
3+
from kafka.consumer.simple import SimpleConsumer
4+
from kafka.consumer.multiprocess import MultiProcessConsumer
5+
from kafka.consumer.group import KafkaConsumer
66

77
__all__ = [
88
'SimpleConsumer', 'MultiProcessConsumer', 'KafkaConsumer'

kafka/consumer/multiprocess.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,15 +8,15 @@
88

99
from kafka.vendor.six.moves import queue # pylint: disable=import-error
1010

11-
from ..common import KafkaError
12-
from .base import (
11+
from kafka.common import KafkaError
12+
from kafka.consumer.base import (
1313
Consumer,
1414
AUTO_COMMIT_MSG_COUNT, AUTO_COMMIT_INTERVAL,
1515
NO_MESSAGES_WAIT_TIME_SECONDS,
1616
FULL_QUEUE_WAIT_TIME_SECONDS,
1717
MAX_BACKOFF_SECONDS,
1818
)
19-
from .simple import SimpleConsumer
19+
from kafka.consumer.simple import SimpleConsumer
2020

2121

2222
log = logging.getLogger(__name__)

kafka/consumer/simple.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from kafka.vendor import six
1313
from kafka.vendor.six.moves import queue # pylint: disable=import-error
1414

15-
from .base import (
15+
from kafka.consumer.base import (
1616
Consumer,
1717
FETCH_DEFAULT_BLOCK_TIMEOUT,
1818
AUTO_COMMIT_MSG_COUNT,
@@ -24,7 +24,7 @@
2424
ITER_TIMEOUT_SECONDS,
2525
NO_MESSAGES_WAIT_TIME_SECONDS
2626
)
27-
from ..common import (
27+
from kafka.common import (
2828
FetchRequestPayload, KafkaError, OffsetRequestPayload,
2929
ConsumerFetchSizeTooSmall,
3030
UnknownTopicOrPartitionError, NotLeaderForPartitionError,

kafka/coordinator/assignors/range.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55

66
from kafka.vendor import six
77

8-
from .abstract import AbstractPartitionAssignor
9-
from ..protocol import ConsumerProtocolMemberMetadata, ConsumerProtocolMemberAssignment
8+
from kafka.coordinator.assignors.abstract import AbstractPartitionAssignor
9+
from kafka.coordinator.protocol import ConsumerProtocolMemberMetadata, ConsumerProtocolMemberAssignment
1010

1111
log = logging.getLogger(__name__)
1212

kafka/coordinator/assignors/roundrobin.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,9 @@
66

77
from kafka.vendor import six
88

9-
from .abstract import AbstractPartitionAssignor
10-
from ...common import TopicPartition
11-
from ..protocol import ConsumerProtocolMemberMetadata, ConsumerProtocolMemberAssignment
9+
from kafka.coordinator.assignors.abstract import AbstractPartitionAssignor
10+
from kafka.common import TopicPartition
11+
from kafka.coordinator.protocol import ConsumerProtocolMemberMetadata, ConsumerProtocolMemberAssignment
1212

1313
log = logging.getLogger(__name__)
1414

kafka/coordinator/base.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -10,13 +10,13 @@
1010

1111
from kafka.vendor import six
1212

13-
from .heartbeat import Heartbeat
14-
from .. import errors as Errors
15-
from ..future import Future
16-
from ..metrics import AnonMeasurable
17-
from ..metrics.stats import Avg, Count, Max, Rate
18-
from ..protocol.commit import GroupCoordinatorRequest, OffsetCommitRequest
19-
from ..protocol.group import (HeartbeatRequest, JoinGroupRequest,
13+
from kafka.coordinator.heartbeat import Heartbeat
14+
from kafka import errors as Errors
15+
from kafka.future import Future
16+
from kafka.metrics import AnonMeasurable
17+
from kafka.metrics.stats import Avg, Count, Max, Rate
18+
from kafka.protocol.commit import GroupCoordinatorRequest, OffsetCommitRequest
19+
from kafka.protocol.group import (HeartbeatRequest, JoinGroupRequest,
2020
LeaveGroupRequest, SyncGroupRequest)
2121

2222
log = logging.getLogger('kafka.coordinator')

kafka/coordinator/consumer.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -7,17 +7,17 @@
77

88
from kafka.vendor import six
99

10-
from .base import BaseCoordinator, Generation
11-
from .assignors.range import RangePartitionAssignor
12-
from .assignors.roundrobin import RoundRobinPartitionAssignor
13-
from .protocol import ConsumerProtocol
14-
from .. import errors as Errors
15-
from ..future import Future
16-
from ..metrics import AnonMeasurable
17-
from ..metrics.stats import Avg, Count, Max, Rate
18-
from ..protocol.commit import OffsetCommitRequest, OffsetFetchRequest
19-
from ..structs import OffsetAndMetadata, TopicPartition
20-
from ..util import WeakMethod
10+
from kafka.coordinator.base import BaseCoordinator, Generation
11+
from kafka.coordinator.assignors.range import RangePartitionAssignor
12+
from kafka.coordinator.assignors.roundrobin import RoundRobinPartitionAssignor
13+
from kafka.coordinator.protocol import ConsumerProtocol
14+
from kafka import errors as Errors
15+
from kafka.future import Future
16+
from kafka.metrics import AnonMeasurable
17+
from kafka.metrics.stats import Avg, Count, Max, Rate
18+
from kafka.protocol.commit import OffsetCommitRequest, OffsetFetchRequest
19+
from kafka.structs import OffsetAndMetadata, TopicPartition
20+
from kafka.util import WeakMethod
2121

2222

2323
log = logging.getLogger(__name__)

kafka/metrics/__init__.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
from __future__ import absolute_import
22

3-
from .compound_stat import NamedMeasurable
4-
from .dict_reporter import DictReporter
5-
from .kafka_metric import KafkaMetric
6-
from .measurable import AnonMeasurable
7-
from .metric_config import MetricConfig
8-
from .metric_name import MetricName
9-
from .metrics import Metrics
10-
from .quota import Quota
3+
from kafka.metrics.compound_stat import NamedMeasurable
4+
from kafka.metrics.dict_reporter import DictReporter
5+
from kafka.metrics.kafka_metric import KafkaMetric
6+
from kafka.metrics.measurable import AnonMeasurable
7+
from kafka.metrics.metric_config import MetricConfig
8+
from kafka.metrics.metric_name import MetricName
9+
from kafka.metrics.metrics import Metrics
10+
from kafka.metrics.quota import Quota
1111

1212
__all__ = [
1313
'AnonMeasurable', 'DictReporter', 'KafkaMetric', 'MetricConfig',

kafka/metrics/stats/__init__.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
from __future__ import absolute_import
22

3-
from .avg import Avg
4-
from .count import Count
5-
from .histogram import Histogram
6-
from .max_stat import Max
7-
from .min_stat import Min
8-
from .percentile import Percentile
9-
from .percentiles import Percentiles
10-
from .rate import Rate
11-
from .sensor import Sensor
12-
from .total import Total
3+
from kafka.metrics.stats.avg import Avg
4+
from kafka.metrics.stats.count import Count
5+
from kafka.metrics.stats.histogram import Histogram
6+
from kafka.metrics.stats.max_stat import Max
7+
from kafka.metrics.stats.min_stat import Min
8+
from kafka.metrics.stats.percentile import Percentile
9+
from kafka.metrics.stats.percentiles import Percentiles
10+
from kafka.metrics.stats.rate import Rate
11+
from kafka.metrics.stats.sensor import Sensor
12+
from kafka.metrics.stats.total import Total
1313

1414
__all__ = [
1515
'Avg', 'Count', 'Histogram', 'Max', 'Min', 'Percentile', 'Percentiles',

kafka/partitioner/__init__.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
from __future__ import absolute_import
22

3-
from .default import DefaultPartitioner
4-
from .hashed import HashedPartitioner, Murmur2Partitioner, LegacyPartitioner
5-
from .roundrobin import RoundRobinPartitioner
3+
from kafka.partitioner.default import DefaultPartitioner
4+
from kafka.partitioner.hashed import HashedPartitioner, Murmur2Partitioner, LegacyPartitioner
5+
from kafka.partitioner.roundrobin import RoundRobinPartitioner
66

77
__all__ = [
88
'DefaultPartitioner', 'RoundRobinPartitioner', 'HashedPartitioner',

kafka/partitioner/default.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
import random
44

5-
from .hashed import murmur2
5+
from kafka.partitioner.hashed import murmur2
66

77

88
class DefaultPartitioner(object):

kafka/partitioner/hashed.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from kafka.vendor import six
44

5-
from .base import Partitioner
5+
from kafka.partitioner.base import Partitioner
66

77

88
class Murmur2Partitioner(Partitioner):

kafka/partitioner/roundrobin.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from __future__ import absolute_import
22

3-
from .base import Partitioner
3+
from kafka.partitioner.base import Partitioner
44

55

66
class RoundRobinPartitioner(Partitioner):

kafka/producer/__init__.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
from __future__ import absolute_import
22

3-
from .kafka import KafkaProducer
4-
from .simple import SimpleProducer
5-
from .keyed import KeyedProducer
3+
from kafka.producer.kafka import KafkaProducer
4+
from kafka.producer.simple import SimpleProducer
5+
from kafka.producer.keyed import KeyedProducer
66

77
__all__ = [
88
'KafkaProducer',

kafka/producer/buffer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
import threading
66
import time
77

8-
from ..metrics.stats import Rate
8+
from kafka.metrics.stats import Rate
99

1010
import kafka.errors as Errors
1111

kafka/producer/future.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33
import collections
44
import threading
55

6-
from .. import errors as Errors
7-
from ..future import Future
6+
from kafka import errors as Errors
7+
from kafka.future import Future
88

99

1010
class FutureProduceResult(Future):

kafka/producer/kafka.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -8,20 +8,20 @@
88
import time
99
import weakref
1010

11-
from ..vendor import six
12-
13-
from .. import errors as Errors
14-
from ..client_async import KafkaClient, selectors
15-
from ..codec import has_gzip, has_snappy, has_lz4
16-
from ..metrics import MetricConfig, Metrics
17-
from ..partitioner.default import DefaultPartitioner
18-
from ..record.default_records import DefaultRecordBatchBuilder
19-
from ..record.legacy_records import LegacyRecordBatchBuilder
20-
from ..serializer import Serializer
21-
from ..structs import TopicPartition
22-
from .future import FutureRecordMetadata, FutureProduceResult
23-
from .record_accumulator import AtomicInteger, RecordAccumulator
24-
from .sender import Sender
11+
from kafka.vendor import six
12+
13+
from kafka import errors as Errors
14+
from kafka.client_async import KafkaClient, selectors
15+
from kafka.codec import has_gzip, has_snappy, has_lz4
16+
from kafka.metrics import MetricConfig, Metrics
17+
from kafka.partitioner.default import DefaultPartitioner
18+
from kafka.record.default_records import DefaultRecordBatchBuilder
19+
from kafka.record.legacy_records import LegacyRecordBatchBuilder
20+
from kafka.serializer import Serializer
21+
from kafka.structs import TopicPartition
22+
from kafka.producer.future import FutureRecordMetadata, FutureProduceResult
23+
from kafka.producer.record_accumulator import AtomicInteger, RecordAccumulator
24+
from kafka.producer.sender import Sender
2525

2626

2727
log = logging.getLogger(__name__)

kafka/producer/keyed.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33
import logging
44
import warnings
55

6-
from .base import Producer
7-
from ..partitioner import HashedPartitioner
6+
from kafka.producer.base import Producer
7+
from kafka.partitioner import HashedPartitioner
88

99

1010
log = logging.getLogger(__name__)

0 commit comments

Comments
 (0)