if extra_metadata:
metadata['extra_metadata'] = extra_metadata
metadata_json = json.dumps(metadata, sort_keys=True, indent=2)
+ if six.PY3:
+ metadata_json = metadata_json.encode('utf-8')
with self.get_object_writer(container, filename) as writer:
writer.write(metadata_json)
LOG.debug('_write_metadata finished. Metadata: %s.', metadata_json)
sha256file['chunk_size'] = self.sha_block_size_bytes
sha256file['sha256s'] = sha256_list
sha256file_json = json.dumps(sha256file, sort_keys=True, indent=2)
+ if six.PY3:
+ sha256file_json = sha256file_json.encode('utf-8')
with self.get_object_writer(container, filename) as writer:
writer.write(sha256file_json)
LOG.debug('_write_sha256file finished.')
{'container': container, 'filename': filename})
with self.get_object_reader(container, filename) as reader:
metadata_json = reader.read()
+ if six.PY3:
+ metadata_json = metadata_json.decode('utf-8')
metadata = json.loads(metadata_json)
LOG.debug('_read_metadata finished. Metadata: %s.', metadata_json)
return metadata
{'container': container, 'filename': filename})
with self.get_object_reader(container, filename) as reader:
sha256file_json = reader.read()
+ if six.PY3:
+ sha256file_json = sha256file_json.decode('utf-8')
sha256file = json.loads(sha256file_json)
LOG.debug('_read_sha256file finished (%s).', sha256file)
return sha256file
while True:
data_offset = volume_file.tell()
data = volume_file.read(self.chunk_size_bytes)
- if data == '':
+ if data == b'':
break
# Calculate new shas with the datablock.
extra_metadata = metadata.get('extra_metadata')
container = backup['container']
metadata_objects = metadata['objects']
- metadata_object_names = sum((obj.keys() for obj in metadata_objects),
- [])
+ metadata_object_names = []
+ for obj in metadata_objects:
+ metadata_object_names.extend(obj.keys())
LOG.debug('metadata_object_names = %s.', metadata_object_names)
prune_list = [self._metadata_filename(backup),
self._sha256_filename(backup)]
raise exception.InvalidBackup(reason=err)
for metadata_object in metadata_objects:
- object_name = metadata_object.keys()[0]
+ object_name, obj = list(metadata_object.items())[0]
LOG.debug('restoring object. backup: %(backup_id)s, '
'container: %(container)s, object name: '
'%(object_name)s, volume: %(volume_id)s.',
body = reader.read()
compression_algorithm = metadata_object[object_name]['compression']
decompressor = self._get_compressor(compression_algorithm)
- volume_file.seek(metadata_object.values()[0]['offset'])
+ volume_file.seek(obj['offset'])
if decompressor is not None:
LOG.debug('decompressing data using %s algorithm',
compression_algorithm)
self.container = container
self.object_name = object_name
self.conn = conn
- self.data = ''
+ self.data = bytearray()
def __enter__(self):
return self
self.data += data
def close(self):
- reader = six.StringIO(self.data)
+ reader = six.BytesIO(self.data)
try:
etag = self.conn.put_object(self.container, self.object_name,
reader,
- content_length=reader.len)
+ content_length=len(self.data))
except socket.error as err:
raise exception.SwiftConnectionFailed(reason=err)
LOG.debug('swift MD5 for %(object_name)s: %(etag)s',
import zlib
from oslo_log import log as logging
+import six
from six.moves import http_client
from swiftclient import client as swift
'offset': 20}
}]
metadata_json = json.dumps(metadata, sort_keys=True, indent=2)
+ if six.PY3:
+ metadata_json = metadata_json.encode('utf-8')
fake_object_body = metadata_json
return (fake_object_header, fake_object_body)
def test_prepare_output_data_effective_compression(self):
service = swift_dr.SwiftBackupDriver(self.ctxt)
# Set up buffer of 128 zeroed bytes
- fake_data = buffer(bytearray(128))
+ fake_data = b'\0' * 128
result = service._prepare_output_data(fake_data)
self.flags(backup_compression_algorithm='none')
service = swift_dr.SwiftBackupDriver(self.ctxt)
# Set up buffer of 128 zeroed bytes
- fake_data = buffer(bytearray(128))
+ fake_data = b'\0' * 128
result = service._prepare_output_data(fake_data)
def test_prepare_output_data_ineffective_compression(self):
service = swift_dr.SwiftBackupDriver(self.ctxt)
# Set up buffer of 128 zeroed bytes
- fake_data = buffer(bytearray(128))
+ fake_data = b'\0' * 128
# Pre-compress so that compression in the driver will be ineffective.
already_compressed_data = service.compressor.compress(fake_data)
self._create_backup_db_entry(backup_id2, mode)
self._create_backup_db_entry(backup_id3, mode)
- with open(VOLUME_PATH, 'rw') as volume_file:
+ with open(VOLUME_PATH, 'w+') as volume_file:
# Create two backups of the volume
backup1 = objects.Backup.get_by_id(self.ctxt, backup_id1)
self.driver.backup(backup1, volume_file)
self._create_backup_db_entry(backup_id1, mode)
self._create_backup_db_entry(backup_id2, mode)
- with open(VOLUME_PATH, 'rw') as volume_file:
+ with open(VOLUME_PATH, 'w+') as volume_file:
# Create two backups of the volume
backup1 = objects.Backup.get_by_id(self.ctxt, 123)
self.driver.backup(backup1, volume_file)
backup_id1 = 123
self._create_backup_db_entry(backup_id1, mode)
- with open(VOLUME_PATH, 'rw') as volume_file:
+ with open(VOLUME_PATH, 'w+') as volume_file:
# Create two backups of the volume
backup1 = objects.Backup.get_by_id(self.ctxt, 123)
self.assertRaises(exception.InvalidBackup,
cinder.tests.unit.test_backup \
cinder.tests.unit.test_backup_ceph \
cinder.tests.unit.test_backup_driver_base \
+ cinder.tests.unit.test_backup_swift \
+ cinder.tests.unit.test_backup_tsm \
cinder.tests.unit.test_block_device \
cinder.tests.unit.test_cloudbyte \
cinder.tests.unit.test_conf \