def delete(self, name): try: swiftclient.delete_object(self.storage_url, self.token, self.container_name, name, http_conn=self.http_conn) except swiftclient.ClientException: pass
def container_sync_row(self, row, sync_to, sync_key, broker, info): """ Sends the update the row indicates to the sync_to container. :param row: The updated row in the local database triggering the sync update. :param sync_to: The URL to the remote container. :param sync_key: The X-Container-Sync-Key to use when sending requests to the other container. :param broker: The local container database broker. :param info: The get_info result from the local container database broker. :returns: True on success """ try: start_time = time() if row['deleted']: try: delete_object(sync_to, name=row['name'], headers={'x-timestamp': row['created_at'], 'x-container-sync-key': sync_key}, proxy=self.proxy) except ClientException, err: if err.http_status != HTTP_NOT_FOUND: raise self.container_deletes += 1 self.logger.increment('deletes') self.logger.timing_since('deletes.timing', start_time) else:
def delete(self, keyname): """ REFACTORED FOR PYRAX """ if not self.object_exists(self.container,keyname): log.instance.logger.warning('Cannot delete, file noexist: ' + str(keyname)) else: print 'yyy' log.instance.logger.info('Remote delete for ' + str(keyname)) files.delete_object(self.auth.url,self.auth.token,self.container,keyname)
def rmtree(self, abs_path): container = swiftclient.get_container(self.storage_url, self.token, self.container_name) for obj in container[1]: if obj['name'].startswith(abs_path): swiftclient.delete_object(self.storage_url, token=self.token, container=self.container_name, name=obj['name'])
def delete_object(self, container_name, object_name): token = self.fetch_token() url = self.auth['endpoint_url'] try: delete_object(url=url, token=token, container=container_name, name=object_name) except ClientException: pass
def _run(self, thread): if time.time() - self.heartbeat >= 15: self.heartbeat = time.time() self._log_status("DEL") device, partition, name, container_name = self.names.pop() with self.connection() as conn: try: if self.use_proxy: client.delete_object(self.url, self.token, container_name, name, http_conn=conn) else: node = {"ip": self.ip, "port": self.port, "device": device} direct_client.direct_delete_object(node, partition, self.account, container_name, name) except client.ClientException, e: self.logger.debug(str(e)) self.failures += 1
def _run(self, thread): if time.time() - self.heartbeat >= 15: self.heartbeat = time.time() self._log_status('DEL') device, partition, name, container_name = self.names.pop() with self.connection() as conn: try: if self.use_proxy: client.delete_object(self.url, self.token, container_name, name, http_conn=conn) else: node = {'ip': self.ip, 'port': self.port, 'device': device} direct_client.direct_delete_object(node, partition, self.account, container_name, name) except client.ClientException, e: self.logger.debug(str(e)) self.failures += 1
def remove(self, fname): swiftclient.delete_object(self.sturl, self.token, self.container, fname['name'])
token, cont, obj, obj_fp, content_length=obj_size) except ClientException, err: return err.http_status return HTTP_CREATED return HTTP_BAD_REQUEST if action == 'obj_get': (obj_status, hunk) = get_object(storage_url, token, cont, obj) #resp.headerlist = obj_status.items() #resp.body_file = hunk if action == 'obj_delete': try: delete_object(storage_url, token, cont, obj) except ClientException, err: return err.http_status return HTTP_NO_CONTENT if action == 'obj_metadata': if meta_headers: try: headers = head_object(storage_url, token, cont, obj) except ClientException, err: return err.http_status headers = self.get_current_meta(headers) headers.update(meta_headers) headers = self.clean_blank_meta(headers) # to delete object metadata: exclude meta to delete from existing metas. try: post_object(storage_url, token, cont, obj, headers)
def container_sync_row(self, row, sync_to, sync_key, broker, info): """ Sends the update the row indicates to the sync_to container. :param row: The updated row in the local database triggering the sync update. :param sync_to: The URL to the remote container. :param sync_key: The X-Container-Sync-Key to use when sending requests to the other container. :param broker: The local container database broker. :param info: The get_info result from the local container database broker. :returns: True on success """ try: start_time = time() if row['deleted']: try: delete_object(sync_to, name=row['name'], headers={'x-timestamp': row['created_at'], 'x-container-sync-key': sync_key}, proxy=self.proxy) except ClientException as err: if err.http_status != HTTP_NOT_FOUND: raise self.container_deletes += 1 self.logger.increment('deletes') self.logger.timing_since('deletes.timing', start_time) else: part, nodes = self.object_ring.get_nodes( info['account'], info['container'], row['name']) shuffle(nodes) exc = None looking_for_timestamp = float(row['created_at']) timestamp = -1 headers = body = None for node in nodes: try: these_headers, this_body = direct_get_object( node, part, info['account'], info['container'], row['name'], resp_chunk_size=65536) this_timestamp = float(these_headers['x-timestamp']) if this_timestamp > timestamp: timestamp = this_timestamp headers = these_headers body = this_body except ClientException as err: # If any errors are not 404, make sure we report the # non-404 one. We don't want to mistakenly assume the # object no longer exists just because one says so and # the others errored for some other reason. if not exc or exc.http_status == HTTP_NOT_FOUND: exc = err except (Exception, Timeout) as err: exc = err if timestamp < looking_for_timestamp: if exc: raise exc raise Exception( _('Unknown exception trying to GET: %(node)r ' '%(account)r %(container)r %(object)r'), {'node': node, 'part': part, 'account': info['account'], 'container': info['container'], 'object': row['name']}) for key in ('date', 'last-modified'): if key in headers: del headers[key] if 'etag' in headers: headers['etag'] = headers['etag'].strip('"') headers['x-timestamp'] = row['created_at'] headers['x-container-sync-key'] = sync_key put_object(sync_to, name=row['name'], headers=headers, contents=FileLikeIter(body), proxy=self.proxy) self.container_puts += 1 self.logger.increment('puts') self.logger.timing_since('puts.timing', start_time) except ClientException as err: if err.http_status == HTTP_UNAUTHORIZED: self.logger.info( _('Unauth %(sync_from)r => %(sync_to)r'), {'sync_from': '%s/%s' % (quote(info['account']), quote(info['container'])), 'sync_to': sync_to}) elif err.http_status == HTTP_NOT_FOUND: self.logger.info( _('Not found %(sync_from)r => %(sync_to)r \ - object %(obj_name)r'), {'sync_from': '%s/%s' % (quote(info['account']), quote(info['container'])), 'sync_to': sync_to, 'obj_name': row['name']}) else: self.logger.exception( _('ERROR Syncing %(db_file)s %(row)s'), {'db_file': broker.db_file, 'row': row}) self.container_failures += 1 self.logger.increment('failures') return False except (Exception, Timeout) as err: self.logger.exception( _('ERROR Syncing %(db_file)s %(row)s'), {'db_file': broker.db_file, 'row': row}) self.container_failures += 1 self.logger.increment('failures') return False return True
def container_sync_row(self, row, sync_to, sync_key, broker, info): """ Sends the update the row indicates to the sync_to container. :param row: The updated row in the local database triggering the sync update. :param sync_to: The URL to the remote container. :param sync_key: The X-Container-Sync-Key to use when sending requests to the other container. :param broker: The local container database broker. :param info: The get_info result from the local container database broker. :returns: True on success """ try: start_time = time() if row["deleted"]: try: delete_object( sync_to, name=row["name"], headers={"x-timestamp": row["created_at"], "x-container-sync-key": sync_key}, proxy=self.proxy, ) except ClientException as err: if err.http_status != HTTP_NOT_FOUND: raise self.container_deletes += 1 self.logger.increment("deletes") self.logger.timing_since("deletes.timing", start_time) else: part, nodes = self.object_ring.get_nodes(info["account"], info["container"], row["name"]) shuffle(nodes) exc = None looking_for_timestamp = float(row["created_at"]) timestamp = -1 headers = body = None for node in nodes: try: these_headers, this_body = direct_get_object( node, part, info["account"], info["container"], row["name"], resp_chunk_size=65536 ) this_timestamp = float(these_headers["x-timestamp"]) if this_timestamp > timestamp: timestamp = this_timestamp headers = these_headers body = this_body except ClientException as err: # If any errors are not 404, make sure we report the # non-404 one. We don't want to mistakenly assume the # object no longer exists just because one says so and # the others errored for some other reason. if not exc or exc.http_status == HTTP_NOT_FOUND: exc = err except (Exception, Timeout) as err: exc = err if timestamp < looking_for_timestamp: if exc: raise exc raise Exception( _("Unknown exception trying to GET: %(node)r " "%(account)r %(container)r %(object)r"), { "node": node, "part": part, "account": info["account"], "container": info["container"], "object": row["name"], }, ) for key in ("date", "last-modified"): if key in headers: del headers[key] if "etag" in headers: headers["etag"] = headers["etag"].strip('"') headers["x-timestamp"] = row["created_at"] headers["x-container-sync-key"] = sync_key put_object(sync_to, name=row["name"], headers=headers, contents=FileLikeIter(body), proxy=self.proxy) self.container_puts += 1 self.logger.increment("puts") self.logger.timing_since("puts.timing", start_time) except ClientException as err: if err.http_status == HTTP_UNAUTHORIZED: self.logger.info( _("Unauth %(sync_from)r => %(sync_to)r"), {"sync_from": "%s/%s" % (quote(info["account"]), quote(info["container"])), "sync_to": sync_to}, ) elif err.http_status == HTTP_NOT_FOUND: self.logger.info( _( "Not found %(sync_from)r => %(sync_to)r \ - object %(obj_name)r" ), { "sync_from": "%s/%s" % (quote(info["account"]), quote(info["container"])), "sync_to": sync_to, "obj_name": row["name"], }, ) else: self.logger.exception(_("ERROR Syncing %(db_file)s %(row)s"), {"db_file": str(broker), "row": row}) self.container_failures += 1 self.logger.increment("failures") return False except (Exception, Timeout) as err: self.logger.exception(_("ERROR Syncing %(db_file)s %(row)s"), {"db_file": str(broker), "row": row}) self.container_failures += 1 self.logger.increment("failures") return False return True
if obj: if len(obj) > 1024: return HTTP_PRECONDITION_FAILED try: put_object(storage_url, token, cont, obj, obj_fp) except ClientException, err: return err.http_status return HTTP_CREATED return HTTP_BAD_REQUEST if action == 'obj_get': (obj_status, hunk) = get_object(storage_url, token, cont, obj) #resp.headerlist = obj_status.items() #resp.body_file = hunk if action == 'obj_delete': try: delete_object(storage_url, token, cont, obj) except ClientException, err: return err.http_status return HTTP_NO_CONTENT if action == 'obj_metadata': if meta_headers: try: headers = head_object(storage_url, token, cont, obj) except ClientException, err: return err.http_status headers = self.get_current_meta(headers) headers.update(meta_headers) headers = self.clean_blank_meta(headers) # to delete object metadata: exclude meta to delete from existing metas. try: post_object(storage_url, token, cont, obj, headers)
def container_sync_row(self, row, sync_to, sync_key, broker, info): """ Sends the update the row indicates to the sync_to container. :param row: The updated row in the local database triggering the sync update. :param sync_to: The URL to the remote container. :param sync_key: The X-Container-Sync-Key to use when sending requests to the other container. :param broker: The local container database broker. :param info: The get_info result from the local container database broker. :returns: True on success """ try: start_time = time() if row['deleted']: try: delete_object(sync_to, name=row['name'], headers={ 'x-timestamp': row['created_at'], 'x-container-sync-key': sync_key }, proxy=self.proxy) except ClientException as err: if err.http_status != HTTP_NOT_FOUND: raise self.container_deletes += 1 self.logger.increment('deletes') self.logger.timing_since('deletes.timing', start_time) else: part, nodes = self.object_ring.get_nodes( info['account'], info['container'], row['name']) shuffle(nodes) exc = None looking_for_timestamp = float(row['created_at']) timestamp = -1 headers = body = None for node in nodes: try: these_headers, this_body = direct_get_object( node, part, info['account'], info['container'], row['name'], resp_chunk_size=65536) this_timestamp = float(these_headers['x-timestamp']) if this_timestamp > timestamp: timestamp = this_timestamp headers = these_headers body = this_body except ClientException as err: # If any errors are not 404, make sure we report the # non-404 one. We don't want to mistakenly assume the # object no longer exists just because one says so and # the others errored for some other reason. if not exc or exc.http_status == HTTP_NOT_FOUND: exc = err except (Exception, Timeout) as err: exc = err if timestamp < looking_for_timestamp: if exc: raise exc raise Exception( _('Unknown exception trying to GET: %(node)r ' '%(account)r %(container)r %(object)r'), { 'node': node, 'part': part, 'account': info['account'], 'container': info['container'], 'object': row['name'] }) for key in ('date', 'last-modified'): if key in headers: del headers[key] if 'etag' in headers: headers['etag'] = headers['etag'].strip('"') headers['x-timestamp'] = row['created_at'] headers['x-container-sync-key'] = sync_key put_object(sync_to, name=row['name'], headers=headers, contents=FileLikeIter(body), proxy=self.proxy) self.container_puts += 1 self.logger.increment('puts') self.logger.timing_since('puts.timing', start_time) except ClientException as err: if err.http_status == HTTP_UNAUTHORIZED: self.logger.info( _('Unauth %(sync_from)r => %(sync_to)r'), { 'sync_from': '%s/%s' % (quote(info['account']), quote(info['container'])), 'sync_to': sync_to }) elif err.http_status == HTTP_NOT_FOUND: self.logger.info( _('Not found %(sync_from)r => %(sync_to)r \ - object %(obj_name)r'), { 'sync_from': '%s/%s' % (quote(info['account']), quote(info['container'])), 'sync_to': sync_to, 'obj_name': row['name'] }) else: self.logger.exception(_('ERROR Syncing %(db_file)s %(row)s'), { 'db_file': str(broker), 'row': row }) self.container_failures += 1 self.logger.increment('failures') return False except (Exception, Timeout) as err: self.logger.exception(_('ERROR Syncing %(db_file)s %(row)s'), { 'db_file': str(broker), 'row': row }) self.container_failures += 1 self.logger.increment('failures') return False return True
def delete_from_swift(name): try: swiftclient.delete_object(PRE_AUTH_URL, PRE_AUTH_TOKEN, CONTAINER_NAME, name, http_conn=connect_swift()) except swiftclient.ClientException: pass