]> review.fuel-infra Code Review - openstack-build/cinder-build.git/commitdiff
Refactor the backup method of SwiftBackupService
authorSeif Lotfy <s.lotfy@telekom.de>
Fri, 21 Jun 2013 09:28:27 +0000 (09:28 +0000)
committerSeif Lotfy <s.lotfy@telekom.de>
Fri, 21 Jun 2013 09:28:27 +0000 (09:28 +0000)
Export functionalities of backup to 3 new methods:
prepare_backup backup_chunk and finalize_backup. The original
backup function calls those functions. This allows other
drivers to combine them in a different logic.
e.g for the RBDDriver

Change-Id: I4f335bfcd03cd1da52a9e5c5c689d97581614af6

cinder/backup/services/swift.py
cinder/tests/backup/fake_swift_client.py

index 7e2aa287abdfd187221b645da2a956e4ba623d89..6aa15f961d1bb7ddb05b911b03b8127cd75cd7ad 100644 (file)
@@ -201,8 +201,8 @@ class SwiftBackupService(base.Base):
         LOG.debug(_('_read_metadata finished (%s)') % metadata)
         return metadata
 
-    def backup(self, backup, volume_file):
-        """Backup the given volume to swift using the given backup metadata."""
+    def prepare_backup(self, backup):
+        """Prepare the backup process and return the backup metadata"""
         backup_id = backup['id']
         volume_id = backup['volume_id']
         volume = self.db.volume_get(self.context, volume_id)
@@ -232,65 +232,88 @@ class SwiftBackupService(base.Base):
                       'object_prefix': object_prefix,
                       'availability_zone': availability_zone,
                   })
-        object_id = 1
-        object_list = []
-        while True:
-            data_block_size_bytes = self.data_block_size_bytes
-            object_name = '%s-%05d' % (object_prefix, object_id)
-            obj = {}
-            obj[object_name] = {}
-            obj[object_name]['offset'] = volume_file.tell()
-            data = volume_file.read(data_block_size_bytes)
-            obj[object_name]['length'] = len(data)
-            if data == '':
-                break
-            LOG.debug(_('reading chunk of data from volume'))
-            if self.compressor is not None:
-                algorithm = CONF.backup_compression_algorithm.lower()
-                obj[object_name]['compression'] = algorithm
-                data_size_bytes = len(data)
-                data = self.compressor.compress(data)
-                comp_size_bytes = len(data)
-                LOG.debug(_('compressed %(data_size_bytes)d bytes of data'
-                            ' to %(comp_size_bytes)d bytes using '
-                            '%(algorithm)s') %
-                          {
-                              'data_size_bytes': data_size_bytes,
-                              'comp_size_bytes': comp_size_bytes,
-                              'algorithm': algorithm,
-                          })
-            else:
-                LOG.debug(_('not compressing data'))
-                obj[object_name]['compression'] = 'none'
+        object_meta = {'id': 1, 'list': [], 'prefix': object_prefix}
+        return object_meta, container
+
+    def backup_chunk(self, backup, container, data, data_offset, object_meta):
+        """Backup data chunk based on the object metadata and offset"""
+        object_prefix = object_meta['prefix']
+        object_list = object_meta['list']
+        object_id = object_meta['id']
+        object_name = '%s-%05d' % (object_prefix, object_id)
+        obj = {}
+        obj[object_name] = {}
+        obj[object_name]['offset'] = data_offset
+        obj[object_name]['length'] = len(data)
+        LOG.debug(_('reading chunk of data from volume'))
+        if self.compressor is not None:
+            algorithm = CONF.backup_compression_algorithm.lower()
+            obj[object_name]['compression'] = algorithm
+            data_size_bytes = len(data)
+            data = self.compressor.compress(data)
+            comp_size_bytes = len(data)
+            LOG.debug(_('compressed %(data_size_bytes)d bytes of data '
+                        'to %(comp_size_bytes)d bytes using '
+                        '%(algorithm)s') %
+                      {
+                          'data_size_bytes': data_size_bytes,
+                          'comp_size_bytes': comp_size_bytes,
+                          'algorithm': algorithm,
+                      })
+        else:
+            LOG.debug(_('not compressing data'))
+            obj[object_name]['compression'] = 'none'
 
-            reader = StringIO.StringIO(data)
-            LOG.debug(_('About to put_object'))
-            try:
-                etag = self.conn.put_object(container, object_name, reader)
-            except socket.error as err:
-                raise exception.SwiftConnectionFailed(reason=str(err))
-            LOG.debug(_('swift MD5 for %(object_name)s: %(etag)s') %
-                      {'object_name': object_name, 'etag': etag, })
-            md5 = hashlib.md5(data).hexdigest()
-            obj[object_name]['md5'] = md5
-            LOG.debug(_('backup MD5 for %(object_name)s: %(md5)s') %
-                      {'object_name': object_name, 'md5': md5})
-            if etag != md5:
-                err = _('error writing object to swift, MD5 of object in '
-                        'swift %(etag)s is not the same as MD5 of object sent '
-                        'to swift %(md5)s') % {'etag': etag, 'md5': md5}
-                raise exception.InvalidBackup(reason=err)
-            object_list.append(obj)
-            object_id += 1
-            LOG.debug(_('Calling eventlet.sleep(0)'))
-            eventlet.sleep(0)
+        reader = StringIO.StringIO(data)
+        LOG.debug(_('About to put_object'))
+        try:
+            etag = self.conn.put_object(container, object_name, reader)
+        except socket.error as err:
+            raise exception.SwiftConnectionFailed(reason=str(err))
+        LOG.debug(_('swift MD5 for %(object_name)s: %(etag)s') %
+                  {'object_name': object_name, 'etag': etag, })
+        md5 = hashlib.md5(data).hexdigest()
+        obj[object_name]['md5'] = md5
+        LOG.debug(_('backup MD5 for %(object_name)s: %(md5)s') %
+                  {'object_name': object_name, 'md5': md5})
+        if etag != md5:
+            err = _('error writing object to swift, MD5 of object in '
+                    'swift %(etag)s is not the same as MD5 of object sent '
+                    'to swift %(md5)s') % {'etag': etag, 'md5': md5}
+            raise exception.InvalidBackup(reason=err)
+        object_list.append(obj)
+        object_id += 1
+        object_meta['list'] = object_list
+        object_meta['id'] = object_id
+        LOG.debug(_('Calling eventlet.sleep(0)'))
+        eventlet.sleep(0)
+
+    def finalize_backup(self, backup, container, object_meta):
+        """Finalize the backup by updating its metadata on Swift"""
+        object_list = object_meta['list']
+        object_id = object_meta['id']
         try:
-            self._write_metadata(backup, volume_id, container, object_list)
+            self._write_metadata(backup,
+                                 backup['volume_id'],
+                                 container,
+                                 object_list)
         except socket.error as err:
             raise exception.SwiftConnectionFailed(reason=str(err))
-        self.db.backup_update(self.context, backup_id, {'object_count':
-                                                        object_id})
-        LOG.debug(_('backup %s finished.') % backup_id)
+        self.db.backup_update(self.context, backup['id'],
+                              {'object_count': object_id})
+        LOG.debug(_('backup %s finished.') % backup['id'])
+
+    def backup(self, backup, volume_file):
+        """Backup the given volume to swift using the given backup metadata."""
+        object_meta, container = self.prepare_backup(backup)
+        while True:
+            data = volume_file.read(self.data_block_size_bytes)
+            data_offset = volume_file.tell()
+            if data == '':
+                break
+            self.backup_chunk(backup, container, data,
+                              data_offset, object_meta)
+        self.finalize_backup(backup, container, object_meta)
 
     def _restore_v1(self, backup, volume_id, metadata, volume_file):
         """Restore a v1 swift volume backup from swift."""
index ff7424679707d79515987671cfd1e5571a48e2a4..ccf9982876f60d8fd80dd279bc5f61f9d98c455e 100644 (file)
@@ -98,7 +98,9 @@ class FakeSwiftConnection(object):
         fake_object_body = os.urandom(1024 * 1024)
         return (fake_header, zlib.compress(fake_object_body))
 
-    def put_object(self, container, name, reader):
+    def put_object(self, container, name, reader, content_length=None,
+                   etag=None, chunk_size=None, content_type=None,
+                   headers=None, query_string=None):
         LOG.debug("fake put_object(%s, %s)" % (container, name))
         if container == 'socket_error_on_put':
             raise socket.error(111, 'ECONNREFUSED')