obj[object_name] = {}
obj[object_name]['offset'] = data_offset
obj[object_name]['length'] = len(data)
- LOG.debug('reading chunk of data from volume')
- if self.compressor is not None:
- algorithm = CONF.backup_compression_algorithm.lower()
- obj[object_name]['compression'] = algorithm
- data_size_bytes = len(data)
- data = self.compressor.compress(data)
- comp_size_bytes = len(data)
- LOG.debug('compressed %(data_size_bytes)d bytes of data '
- 'to %(comp_size_bytes)d bytes using '
- '%(algorithm)s',
- {
- 'data_size_bytes': data_size_bytes,
- 'comp_size_bytes': comp_size_bytes,
- 'algorithm': algorithm,
- })
- else:
- LOG.debug('not compressing data')
- obj[object_name]['compression'] = 'none'
-
+ LOG.debug('Backing up chunk of data from volume.')
+ algorithm, output_data = self._prepare_output_data(data)
+ obj[object_name]['compression'] = algorithm
LOG.debug('About to put_object')
with self.get_object_writer(
container, object_name, extra_metadata=extra_metadata
) as writer:
- writer.write(data)
+ writer.write(output_data)
md5 = hashlib.md5(data).hexdigest()
obj[object_name]['md5'] = md5
LOG.debug('backup MD5 for %(object_name)s: %(md5)s',
LOG.debug('Calling eventlet.sleep(0)')
eventlet.sleep(0)
+ def _prepare_output_data(self, data):
+ if self.compressor is None:
+ return 'none', data
+ data_size_bytes = len(data)
+ compressed_data = self.compressor.compress(data)
+ comp_size_bytes = len(compressed_data)
+ algorithm = CONF.backup_compression_algorithm.lower()
+ if comp_size_bytes >= data_size_bytes:
+ LOG.debug('Compression of this chunk was ineffective: '
+ 'original length: %(data_size_bytes)d, '
+ 'compressed length: %(compressed_size_bytes)d. '
+ 'Using original data for this chunk.',
+ {'data_size_bytes': data_size_bytes,
+ 'comp_size_bytes': comp_size_bytes,
+ })
+ return 'none', data
+ LOG.debug('Compressed %(data_size_bytes)d bytes of data '
+ 'to %(comp_size_bytes)d bytes using %(algorithm)s.',
+ {'data_size_bytes': data_size_bytes,
+ 'comp_size_bytes': comp_size_bytes,
+ 'algorithm': algorithm,
+ })
+ return algorithm, compressed_data
+
def _finalize_backup(self, backup, container, object_meta, object_sha256):
"""Write the backup's metadata to the backup repository."""
object_list = object_meta['list']
compressor = service._get_compressor('bz2')
self.assertEqual(compressor, bz2)
self.assertRaises(ValueError, service._get_compressor, 'fake')
+
+ def test_prepare_output_data_effective_compression(self):
+ service = nfs.NFSBackupDriver(self.ctxt)
+ # Set up buffer of 128 zeroed bytes
+ fake_data = buffer(bytearray(128))
+
+ result = service._prepare_output_data(fake_data)
+
+ self.assertEqual('zlib', result[0])
+ self.assertTrue(len(result) < len(fake_data))
+
+ def test_prepare_output_data_no_compresssion(self):
+ self.flags(backup_compression_algorithm='none')
+ service = nfs.NFSBackupDriver(self.ctxt)
+ # Set up buffer of 128 zeroed bytes
+ fake_data = buffer(bytearray(128))
+
+ result = service._prepare_output_data(fake_data)
+
+ self.assertEqual('none', result[0])
+ self.assertEqual(fake_data, result[1])
+
+ def test_prepare_output_data_ineffective_compression(self):
+ service = nfs.NFSBackupDriver(self.ctxt)
+ # Set up buffer of 128 zeroed bytes
+ fake_data = buffer(bytearray(128))
+ # Pre-compress so that compression in the driver will be ineffective.
+ already_compressed_data = service.compressor.compress(fake_data)
+
+ result = service._prepare_output_data(already_compressed_data)
+
+ self.assertEqual('none', result[0])
+ self.assertEqual(already_compressed_data, result[1])
compressor = service._get_compressor('bz2')
self.assertEqual(compressor, bz2)
self.assertRaises(ValueError, service._get_compressor, 'fake')
+
+ def test_prepare_output_data_effective_compression(self):
+ service = swift_dr.SwiftBackupDriver(self.ctxt)
+ # Set up buffer of 128 zeroed bytes
+ fake_data = buffer(bytearray(128))
+
+ result = service._prepare_output_data(fake_data)
+
+ self.assertEqual('zlib', result[0])
+ self.assertTrue(len(result) < len(fake_data))
+
+ def test_prepare_output_data_no_compresssion(self):
+ self.flags(backup_compression_algorithm='none')
+ service = swift_dr.SwiftBackupDriver(self.ctxt)
+ # Set up buffer of 128 zeroed bytes
+ fake_data = buffer(bytearray(128))
+
+ result = service._prepare_output_data(fake_data)
+
+ self.assertEqual('none', result[0])
+ self.assertEqual(fake_data, result[1])
+
+ def test_prepare_output_data_ineffective_compression(self):
+ service = swift_dr.SwiftBackupDriver(self.ctxt)
+ # Set up buffer of 128 zeroed bytes
+ fake_data = buffer(bytearray(128))
+ # Pre-compress so that compression in the driver will be ineffective.
+ already_compressed_data = service.compressor.compress(fake_data)
+
+ result = service._prepare_output_data(already_compressed_data)
+
+ self.assertEqual('none', result[0])
+ self.assertEqual(already_compressed_data, result[1])