def complete_rsync(self, drive, db_file, args): old_filename = os.path.join(self.root, drive, 'tmp', args[0]) if os.path.exists(db_file): return HTTPNotFound() if not os.path.exists(old_filename): return HTTPNotFound() broker = self.broker_class(old_filename) broker.newid(args[0]) renamer(old_filename, db_file) return HTTPNoContent()
def rsync_then_merge(self, drive, db_file, args): old_filename = os.path.join(self.root, drive, 'tmp', args[0]) if not os.path.exists(db_file) or not os.path.exists(old_filename): return HTTPNotFound() new_broker = self.broker_class(old_filename) existing_broker = self.broker_class(db_file) point = -1 objects = existing_broker.get_items_since(point, 1000) while len(objects): new_broker.merge_items(objects) point = objects[-1]['ROWID'] objects = existing_broker.get_items_since(point, 1000) sleep() new_broker.newid(args[0]) renamer(old_filename, db_file) return HTTPNoContent()
def process_object_update(self, update_path, device): """ Process the object information to be updated and update. :param update_path: path to pickled object update file :param device: path to device """ try: update = pickle.load(open(update_path, 'rb')) except Exception: self.logger.exception(_('ERROR Pickle problem, quarantining %s'), update_path) renamer( update_path, os.path.join(device, 'quarantined', 'objects', os.path.basename(update_path))) return successes = update.get('successes', []) part, nodes = self.get_container_ring().get_nodes( update['account'], update['container']) obj = '/%s/%s/%s' % \ (update['account'], update['container'], update['obj']) success = True for node in nodes: if node['id'] not in successes: status = self.object_update(node, part, update['op'], obj, update['headers']) if not (200 <= status < 300) and status != 404: success = False else: successes.append(node['id']) if success: self.successes += 1 self.logger.debug(_('Update sent for %(obj)s %(path)s'), { 'obj': obj, 'path': update_path }) os.unlink(update_path) else: self.failures += 1 self.logger.debug(_('Update failed for %(obj)s %(path)s'), { 'obj': obj, 'path': update_path }) update['successes'] = successes write_pickle(update, update_path, os.path.join(device, 'tmp'))
def put(self, fd, tmppath, metadata, extension='.data'): """ Finalize writing the file on disk, and renames it from the temp file to the real location. This should be called after the data has been written to the temp file. :params fd: file descriptor of the temp file :param tmppath: path to the temporary file being used :param metadata: dictionary of metadata to be written :param extention: extension to be used when making the file """ metadata['name'] = self.name timestamp = normalize_timestamp(metadata['X-Timestamp']) write_metadata(fd, metadata) if 'Content-Length' in metadata: self.drop_cache(fd, 0, int(metadata['Content-Length'])) tpool.execute(os.fsync, fd) invalidate_hash(os.path.dirname(self.datadir)) renamer(tmppath, os.path.join(self.datadir, timestamp + extension)) self.metadata = metadata
def quarantine_db(object_file, server_type): """ In the case that a corrupt file is found, move it to a quarantined area to allow replication to fix it. :param object_file: path to corrupt file :param server_type: type of file that is corrupt ('container' or 'account') """ object_dir = os.path.dirname(object_file) quarantine_dir = os.path.abspath( os.path.join(object_dir, '..', '..', '..', '..', 'quarantined', server_type + 's', os.path.basename(object_dir))) try: renamer(object_dir, quarantine_dir) except OSError, e: if e.errno not in (errno.EEXIST, errno.ENOTEMPTY): raise quarantine_dir = "%s-%s" % (quarantine_dir, uuid.uuid4().hex) renamer(object_dir, quarantine_dir)
def quarantine_db(object_file, server_type): """ In the case that a corrupt file is found, move it to a quarantined area to allow replication to fix it. :param object_file: path to corrupt file :param server_type: type of file that is corrupt ('container' or 'account') """ object_dir = os.path.dirname(object_file) quarantine_dir = os.path.abspath(os.path.join(object_dir, '..', '..', '..', '..', 'quarantined', server_type + 's', os.path.basename(object_dir))) try: renamer(object_dir, quarantine_dir) except OSError, e: if e.errno not in (errno.EEXIST, errno.ENOTEMPTY): raise quarantine_dir = "%s-%s" % (quarantine_dir, uuid.uuid4().hex) renamer(object_dir, quarantine_dir)
def process_object_update(self, update_path, device): """ Process the object information to be updated and update. :param update_path: path to pickled object update file :param device: path to device """ try: update = pickle.load(open(update_path, 'rb')) except Exception: self.logger.exception( _('ERROR Pickle problem, quarantining %s'), update_path) renamer(update_path, os.path.join(device, 'quarantined', 'objects', os.path.basename(update_path))) return successes = update.get('successes', []) part, nodes = self.get_container_ring().get_nodes( update['account'], update['container']) obj = '/%s/%s/%s' % \ (update['account'], update['container'], update['obj']) success = True for node in nodes: if node['id'] not in successes: status = self.object_update(node, part, update['op'], obj, update['headers']) if not (200 <= status < 300) and status != 404: success = False else: successes.append(node['id']) if success: self.successes += 1 self.logger.debug(_('Update sent for %(obj)s %(path)s'), {'obj': obj, 'path': update_path}) os.unlink(update_path) else: self.failures += 1 self.logger.debug(_('Update failed for %(obj)s %(path)s'), {'obj': obj, 'path': update_path}) update['successes'] = successes write_pickle(update, update_path, os.path.join(device, 'tmp'))
def quarantine_renamer(device_path, corrupted_file_path): """ In the case that a file is corrupted, move it to a quarantined area to allow replication to fix it. :params device_path: The path to the device the corrupted file is on. :params corrupted_file_path: The path to the file you want quarantined. :returns: path (str) of directory the file was moved to :raises OSError: re-raises non errno.EEXIST / errno.ENOTEMPTY exceptions from rename """ from_dir = dirname(corrupted_file_path) to_dir = join(device_path, 'quarantined', 'objects', basename(from_dir)) invalidate_hash(dirname(from_dir)) try: renamer(from_dir, to_dir) except OSError, e: if e.errno not in (errno.EEXIST, errno.ENOTEMPTY): raise to_dir = "%s-%s" % (to_dir, uuid.uuid4().hex) renamer(from_dir, to_dir)