Skip to content

Commit 6e991d8

Browse files
authored
Merge pull request #42 from jgartrel/python3
Update to Python3
2 parents 4dc5045 + 3d44468 commit 6e991d8

File tree

9 files changed

+50
-53
lines changed

9 files changed

+50
-53
lines changed

_tests/test_pput.py

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
from cStringIO import StringIO
1+
from io import BytesIO
22
from datetime import datetime
3-
from Queue import Queue
3+
from queue import Queue
44
from uuid import uuid4
55
import hashlib
66

@@ -39,19 +39,19 @@ def sample_data():
3939
"""
4040
global _cached_sample_data
4141
if _cached_sample_data is None:
42-
data = StringIO()
43-
chars = "".join(chr(i) for i in xrange(256))
44-
for count in xrange(6):
42+
data = BytesIO()
43+
chars = "".join(chr(i) for i in range(256))
44+
for count in range(6):
4545
cc = chr(count)
46-
for _ in xrange(2 * 1024):
46+
for _ in range(2 * 1024):
4747
# each iteration adds 1MB
4848
# each 1MB chunk is made up of an alternation of the block's index (zero based)
4949
# and an incrementing counter (overflows to 0 several times)
5050
# the first block will be: 00 00 00 01 00 02 ... 00 ff 00 00 ... 00 ff
5151
data.write(
52-
"".join(cc+chars[i] for i in xrange(256))
52+
"".join(cc+chars[i] for i in range(256)).encode("latin1")
5353
)
54-
print "wrote {} MB" .format(data.tell() / 1024.0 / 1024.0)
54+
print("wrote {} MB" .format(data.tell() / 1024.0 / 1024.0))
5555
# give the test a read-only file to avoid accidentally modifying the data between tests
5656
_cached_sample_data = ReadOnlyFile(data)
5757
_cached_sample_data.seek(0)
@@ -71,12 +71,12 @@ def test_multipart_etag(sample_data):
7171

7272

7373
def test_stream_handler():
74-
stream_handler = StreamHandler(StringIO("aabbccdde"), chunk_size=2)
74+
stream_handler = StreamHandler(BytesIO(b"aabbccdde"), chunk_size=2)
7575
chunks = []
7676
while not stream_handler.finished:
7777
chunk = stream_handler.get_chunk()
7878
chunks.append(chunk)
79-
assert chunks == ['aa', 'bb', 'cc', 'dd', 'e']
79+
assert chunks == [b'aa', b'bb', b'cc', b'dd', b'e']
8080

8181

8282
def test_handle_results():
@@ -134,7 +134,7 @@ def test_supervisor_loop(sample_data):
134134

135135

136136
def test_zero_data(sample_data):
137-
stream_handler = StreamHandler(StringIO())
137+
stream_handler = StreamHandler(BytesIO())
138138
bucket = FakeBucket()
139139
sup = UploadSupervisor(stream_handler, 'test', bucket=bucket)
140140
with pytest.raises(UploadException):
@@ -149,6 +149,7 @@ def upload_part(self, index, chunk):
149149
return hashlib.md5(chunk).hexdigest()
150150

151151

152+
@pytest.mark.filterwarnings("ignore:Exception in thread")
152153
def test_supervisor_loop_with_worker_crash(sample_data):
153154
stream_handler = StreamHandler(sample_data)
154155
bucket = FakeBucket()
@@ -174,7 +175,7 @@ def call(self):
174175
def test_retry_decorator():
175176
boom = Boom()
176177
with pytest.raises(BoomException) as excp_info:
177-
for _ in xrange(3):
178+
for _ in range(3):
178179
boom.call()
179180
assert boom.count == 3
180181

_tests/test_snap.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# pylint: disable=redefined-outer-name,protected-access
22
from collections import OrderedDict
3-
from cStringIO import StringIO
3+
from io import StringIO
44
import contextlib
55
import string
66
import sys
@@ -30,7 +30,7 @@ def __init__(self, name, metadata=None):
3030

3131

3232
class FakeBucket(object):
33-
rand_prefix = 'test-' + ''.join([random.choice(string.ascii_letters) for _ in xrange(8)]) + '/'
33+
rand_prefix = 'test-' + ''.join([random.choice(string.ascii_letters) for _ in range(8)]) + '/'
3434
fake_data = {
3535
"pool/fs@snap_0": {'parent': 'pool/fs@snap_expired'},
3636
"pool/fs@snap_1_f": {'isfull': 'true', 'compressor': 'pigz1'},
@@ -44,7 +44,7 @@ class FakeBucket(object):
4444

4545
def list(self, *a, **kwa):
4646
# boto bucket.list gives you keys without metadata, let's emulate that
47-
return (FakeKey(os.path.join(self.rand_prefix, name)) for name in self.fake_data.iterkeys())
47+
return (FakeKey(os.path.join(self.rand_prefix, name)) for name in self.fake_data.keys())
4848

4949
def get_key(self, key):
5050
name = key[len(self.rand_prefix):]
@@ -60,9 +60,9 @@ def write_s3_data():
6060
cfg = get_config()
6161
bucket = boto.connect_s3(
6262
cfg['S3_KEY_ID'], cfg['S3_SECRET']).get_bucket(cfg['BUCKET'])
63-
for name, metadata in FakeBucket.fake_data.iteritems():
63+
for name, metadata in FakeBucket.fake_data.items():
6464
key = bucket.new_key(os.path.join(FakeBucket.rand_prefix, name))
65-
headers = {("x-amz-meta-" + k): v for k, v in metadata.iteritems()}
65+
headers = {("x-amz-meta-" + k): v for k, v in metadata.items()}
6666
key.set_contents_from_string("spam", headers=headers)
6767
return bucket
6868

@@ -204,8 +204,8 @@ def test_list_local_snapshots():
204204
}
205205
snapshots = zfs._parse_snapshots()
206206
# comparing .items() because we care about the sorting in the OrderedDict's
207-
assert snapshots['pool'].items() == expected['pool'].items()
208-
assert snapshots['pool/fs'].items() == expected['pool/fs'].items()
207+
assert list(snapshots['pool'].items()) == list(expected['pool'].items())
208+
assert list(snapshots['pool/fs'].items()) == list(expected['pool/fs'].items())
209209

210210

211211
@pytest.mark.parametrize("fs_name, expected", [
@@ -324,7 +324,7 @@ def test_backup_incremental_missing_parent(s3_manager):
324324
pair_manager = PairManager(s3_manager, zfs_manager, command_executor=fake_cmd)
325325
with pytest.raises(IntegrityError) as excp_info:
326326
pair_manager.backup_incremental()
327-
assert excp_info.value.message == \
327+
assert str(excp_info.value) == \
328328
"Broken snapshot detected pool/fs@snap_5, reason: 'parent broken'"
329329
assert fake_cmd._called_commands == []
330330

@@ -345,7 +345,7 @@ def test_backup_incremental_cycle(s3_manager):
345345
pair_manager = PairManager(s3_manager, zfs_manager, command_executor=fake_cmd)
346346
with pytest.raises(IntegrityError) as excp_info:
347347
pair_manager.backup_incremental()
348-
assert excp_info.value.message == \
348+
assert str(excp_info.value) == \
349349
"Broken snapshot detected pool/fs@snap_7_cycle, reason: 'cycle detected'"
350350
assert fake_cmd._called_commands == []
351351

@@ -456,7 +456,7 @@ def test_restore_broken(s3_manager):
456456
pair_manager = PairManager(s3_manager, zfs_manager, command_executor=fake_cmd)
457457
with pytest.raises(IntegrityError) as excp_info:
458458
pair_manager.restore('pool/fs@snap_4_mp')
459-
assert excp_info.value.message == \
459+
assert str(excp_info.value) == \
460460
"Broken snapshot detected pool/fs@snap_4_mp, reason: 'missing parent'"
461461

462462

@@ -501,7 +501,7 @@ def test_get_latest():
501501
fake_cmd = FakeCommandExecutor()
502502
with pytest.raises(SoftError) as excp_info:
503503
zfs_manager.get_latest()
504-
assert excp_info.value.message == \
504+
assert str(excp_info.value) == \
505505
'Nothing to backup for filesystem "None". Are you sure ' \
506506
'SNAPSHOT_PREFIX="zfs-auto-snap:daily" is correct?'
507507
assert fake_cmd._called_commands == []

_tests/test_ssh_sync.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
)
2323

2424

25-
@pytest.mark.parametrize("pair, expected", HAPPY_PATH.values(), ids=HAPPY_PATH.keys())
25+
@pytest.mark.parametrize("pair, expected", list(HAPPY_PATH.values()), ids=list(HAPPY_PATH.keys()))
2626
def test_snapshots_to_send(pair, expected):
2727
local, remote = pair
2828
assert snapshots_to_send(local, remote) == expected
@@ -36,12 +36,12 @@ def test_snapshots_to_send(pair, expected):
3636
)
3737

3838

39-
@pytest.mark.parametrize('pair, err_msg', ERRORS.values(), ids=ERRORS.keys())
39+
@pytest.mark.parametrize('pair, err_msg', list(ERRORS.values()), ids=list(ERRORS.keys()))
4040
def test_snapshots_to_send_error(pair, err_msg):
4141
local, remote = pair
4242
with pytest.raises(AssertionError) as err:
4343
snapshots_to_send(local, remote)
44-
assert err_msg == err.value.message
44+
assert err_msg == str(err.value)
4545

4646

4747
PULL_HAPPY_PATH = dict(
@@ -59,7 +59,7 @@ def test_snapshots_to_send_error(pair, err_msg):
5959
)
6060

6161

62-
@pytest.mark.parametrize('pair, expected', PULL_HAPPY_PATH.values(), ids=PULL_HAPPY_PATH.keys())
62+
@pytest.mark.parametrize('pair, expected', list(PULL_HAPPY_PATH.values()), ids=list(PULL_HAPPY_PATH.keys()))
6363
def test_pull_command(pair, expected):
6464
commands = sync_snapshots(
6565
pair,
@@ -87,7 +87,7 @@ def test_pull_command(pair, expected):
8787
)
8888

8989

90-
@pytest.mark.parametrize('pair, expected', PUSH_HAPPY_PATH.values(), ids=PUSH_HAPPY_PATH.keys())
90+
@pytest.mark.parametrize('pair, expected', list(PUSH_HAPPY_PATH.values()), ids=list(PUSH_HAPPY_PATH.keys()))
9191
def test_push_command(pair, expected):
9292
commands = sync_snapshots(
9393
pair,

z3/config.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import ConfigParser
1+
import configparser
22
import os
33
import os.path
44

@@ -50,7 +50,7 @@ def get(self, key, default=None, section=None):
5050
def get_config():
5151
global _settings
5252
if _settings is None:
53-
_config = ConfigParser.ConfigParser()
53+
_config = configparser.ConfigParser()
5454
default = os.path.join(z3.__path__[0], "z3.conf")
5555
_config.read(default)
5656
_config.read("/etc/z3_backup/z3.conf")

z3/get.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ def main():
2626
else:
2727
s3 = boto3.client('s3', **extra_config)
2828
try:
29-
s3.download_fileobj(cfg['BUCKET'], args.name, sys.stdout, Config=config)
29+
s3.download_fileobj(cfg['BUCKET'], args.name, sys.stdout.buffer, Config=config)
3030
except botocore.exceptions.ClientError as e:
3131
if e.response['Error']['Code'] == "404":
3232
print("The object does not exist.")

z3/pput.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@
44
pput bucket_name/filename
55
"""
66

7-
from Queue import Queue
8-
from cStringIO import StringIO
7+
from queue import Queue
8+
from io import BytesIO
99
from collections import namedtuple
1010
from threading import Thread
1111
import argparse
@@ -47,7 +47,7 @@ def multipart_etag(digests):
4747

4848

4949
def parse_size(size):
50-
if isinstance(size, (int, long)):
50+
if isinstance(size, int):
5151
return size
5252
size = size.strip().upper()
5353
last = size[-1]
@@ -66,7 +66,7 @@ class StreamHandler(object):
6666
def __init__(self, input_stream, chunk_size=5*1024*1024):
6767
self.input_stream = input_stream
6868
self.chunk_size = chunk_size
69-
self._partial_chunk = ""
69+
self._partial_chunk = b""
7070
self._eof_reached = False
7171

7272
@property
@@ -82,7 +82,7 @@ def get_chunk(self):
8282
self._partial_chunk += read
8383
if len(self._partial_chunk) == self.chunk_size or self._eof_reached:
8484
chunk = self._partial_chunk
85-
self._partial_chunk = ""
85+
self._partial_chunk = b""
8686
return chunk
8787
# else:
8888
# print "partial", len(self._partial_chunk)
@@ -92,7 +92,7 @@ def retry(times=int(CFG['MAX_RETRIES'])):
9292
def decorator(func):
9393
@functools.wraps(func)
9494
def wrapped(*a, **kwa):
95-
for attempt in xrange(1, times+1):
95+
for attempt in range(1, times+1):
9696
try:
9797
return func(*a, **kwa)
9898
except: # pylint: disable=bare-except
@@ -123,7 +123,7 @@ def upload_part(self, index, chunk):
123123
part.id = self.multipart.id
124124
part.key_name = self.multipart.key_name
125125
return part.upload_part_from_file(
126-
StringIO(chunk), index, replace=True).md5
126+
BytesIO(chunk), index, replace=True).md5
127127

128128
def start(self):
129129
self._thread = Thread(target=self.main_loop)
@@ -179,7 +179,7 @@ def _start_workers(self, concurrency, worker_class):
179179
inbox=work_queue,
180180
outbox=result_queue,
181181
).start()
182-
for _ in xrange(concurrency)]
182+
for _ in range(concurrency)]
183183
return workers
184184

185185
def _begin_upload(self):
@@ -321,7 +321,7 @@ def parse_args():
321321

322322
def main():
323323
args = parse_args()
324-
input_fd = os.fdopen(args.file_descriptor, 'r') if args.file_descriptor else sys.stdin
324+
input_fd = os.fdopen(args.file_descriptor, 'rb') if args.file_descriptor else sys.stdin.buffer
325325
if args.estimated is not None:
326326
chunk_size = optimize_chunksize(parse_size(args.estimated))
327327
else:
@@ -359,7 +359,7 @@ def main():
359359
sys.stderr.write("{}\n".format(excp))
360360
return 1
361361
if verbosity >= VERB_NORMAL:
362-
print json.dumps({'status': 'success', 'etag': etag})
362+
print(json.dumps({'status': 'success', 'etag': etag}))
363363

364364

365365
if __name__ == '__main__':

z3/s3_mp_cleanup.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,15 +10,15 @@ def cleanup_multipart(bucket, max_days=1, dry_run=False):
1010
max_age_seconds = max_days * 24 * 3600
1111
now = datetime.utcnow()
1212
fmt = "{} | {:30} | {:20}"
13-
print fmt.format("A", "key", "initiated")
13+
print(fmt.format("A", "key", "initiated"))
1414
for multi in bucket.list_multipart_uploads():
1515
delta = now-boto.utils.parse_ts(multi.initiated)
1616
if delta.total_seconds() >= max_age_seconds:
17-
print fmt.format("X", multi.key_name, multi.initiated)
17+
print(fmt.format("X", multi.key_name, multi.initiated))
1818
if not dry_run:
1919
multi.cancel_upload()
2020
else:
21-
print fmt.format(" ", multi.key_name, multi.initiated)
21+
print(fmt.format(" ", multi.key_name, multi.initiated))
2222

2323

2424
def main():

z3/snap.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
from __future__ import print_function
2-
31
import argparse
42
import functools
53
import logging
@@ -148,7 +146,7 @@ def _snapshots(self):
148146
return snapshots
149147

150148
def list(self):
151-
return sorted(self._snapshots.values(), key=operator.attrgetter('name'))
149+
return sorted(list(self._snapshots.values()), key=operator.attrgetter('name'))
152150

153151
def get(self, name):
154152
return self._snapshots.get(name)
@@ -174,7 +172,7 @@ def _list_snapshots(self):
174172
# see FakeZFSManager
175173
return subprocess.check_output(
176174
['zfs', 'list', '-Ht', 'snap', '-o',
177-
'name,used,refer,mountpoint,written'])
175+
'name,used,refer,mountpoint,written'], universal_newlines=True)
178176

179177
def _parse_snapshots(self):
180178
"""Returns all snapshots grouped by filesystem, a dict of OrderedDict's
@@ -209,7 +207,7 @@ def _build_snapshots(self, fs_name):
209207
# for fs_name, fs_snaps in self._parse_snapshots().iteritems():
210208
fs_snaps = self._parse_snapshots().get(fs_name, {})
211209
parent = None
212-
for snap_name, data in fs_snaps.iteritems():
210+
for snap_name, data in fs_snaps.items():
213211
if not snap_name.startswith(self._snapshot_prefix):
214212
continue
215213
full_name = '{}@{}'.format(fs_name, snap_name)
@@ -229,7 +227,7 @@ def _snapshots(self):
229227
return self._build_snapshots(self._fs_name)
230228

231229
def list(self):
232-
return self._snapshots.values()
230+
return list(self._snapshots.values())
233231

234232
def get_latest(self):
235233
if len(self._snapshots) == 0:
@@ -238,7 +236,7 @@ def get_latest(self):
238236
'Nothing to backup for filesystem "{}". Are you sure '
239237
'SNAPSHOT_PREFIX="{}" is correct?'.format(
240238
cfg.get('FILESYSTEM'), cfg.get('SNAPSHOT_PREFIX')))
241-
return self._snapshots.values()[-1]
239+
return list(self._snapshots.values())[-1]
242240

243241
def get(self, name):
244242
return self._snapshots.get(name)

z3/ssh_sync.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
from __future__ import print_function
2-
31
import argparse
42
import subprocess
53
import sys

0 commit comments

Comments
 (0)