def container_cdn_command(self, url, container, sfile=None): """Command your CDN enabled Container. :param url: :param container: """ rty_count = ARGS.get("error_retry") for retry in basic.retryloop(attempts=rty_count, delay=2, obj=sfile): cheaders = self.payload["headers"] if sfile is not None: rpath = http.quoter(url=url.path, cont=container, ufile=sfile) # perform CDN Object DELETE adddata = "%s %s" % (cheaders, container) with meth.operation(retry, obj=adddata): resp = http.delete_request(url=url, rpath=rpath, headers=cheaders) self.resp_exception(resp=resp) else: rpath = http.quoter(url=url.path, cont=container) http.cdn_toggle(headers=cheaders) # perform CDN Enable PUT adddata = "%s %s" % (cheaders, container) with meth.operation(retry, obj=adddata): resp = http.put_request(url=url, rpath=rpath, headers=cheaders) self.resp_exception(resp=resp) report.reporter( msg="OBJECT %s MESSAGE %s %s %s" % (rpath, resp.status_code, resp.reason, resp.request), prt=False, lvl="debug", )
def object_deleter(self, url, container, u_file): """Deletes an objects in a container. :param url: :param container: :param u_file: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=2, obj=u_file): # Open Connection conn = http.open_connection(url=url) # Open connection and perform operation with meth.operation(retry, conn): rpath = http.quoter(url=url.path, cont=container, ufile=u_file) # Make a connection resp = self._header_getter(conn=conn, rpath=rpath, fheaders=self.payload['headers'], retry=retry) if not resp.status == 404: # Perform delete. self._deleter(conn=conn, rpath=rpath, fheaders=self.payload['headers'], retry=retry)
def object_downloader(self, url, container, source, u_file): """Download an Object from a Container. :param url: :param container: :param u_file: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=2, obj=u_file): # Open Connection conn = http.open_connection(url=url) # Perform operation with meth.operation(retry, conn): fheaders = self.payload['headers'] rpath = http.quoter(url=url.path, cont=container, ufile=u_file) # Perform Download. self._downloader(conn=conn, rpath=rpath, fheaders=fheaders, lfile=u_file, source=source, retry=retry)
def container_create(self, url, container): """Create a container if it is not Found. :param url: :param container: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=5, obj=container): rpath = http.quoter(url=url.path, cont=container) fheaders = self.payload['headers'] with meth.operation(retry, obj='%s %s' % (fheaders, rpath)): resp = self._header_getter(url=url, rpath=rpath, fheaders=fheaders) # Check that the status was a good one if resp.status_code == 404: report.reporter(msg='Creating Container => %s' % container) http.put_request(url=url, rpath=rpath, headers=fheaders) self.resp_exception(resp=resp) report.reporter(msg='Container "%s" Created' % container) return True else: report.reporter(msg='Container "%s" Found' % container) return False
def object_lister(self, url, container, object_count=None, last_obj=None): """Builds a long list of objects found in a container. NOTE: This could be millions of Objects. :param url: :param container: :param object_count: :param last_obj: :return None | list: """ for retry in basic.retryloop(attempts=ARGS.get("error_retry"), obj="Object List"): fheaders = self.payload["headers"] fpath = http.quoter(url=url.path, cont=container) with meth.operation(retry, obj="%s %s" % (fheaders, fpath)): resp = self._header_getter(url=url, rpath=fpath, fheaders=fheaders) if resp.status_code == 404: report.reporter(msg="Not found. %s | %s" % (resp.status_code, resp.request)) return None, None, None else: if object_count is None: object_count = resp.headers.get("x-container-object-count") if object_count: object_count = int(object_count) if not object_count > 0: return None, None, None else: return None, None, None # Set the number of loops that we are going to do return self._list_getter(url=url, filepath=fpath, fheaders=fheaders, last_obj=last_obj)
def container_lister(self, url, last_obj=None): """Builds a long list of objects found in a container. NOTE: This could be millions of Objects. :param url: :return None | list: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), obj='Container List'): fheaders = self.payload['headers'] fpath = http.quoter(url=url.path) with meth.operation(retry, obj='%s %s' % (fheaders, fpath)): resp = self._header_getter(url=url, rpath=fpath, fheaders=fheaders) head_check = resp.headers container_count = head_check.get('x-account-container-count') if container_count: container_count = int(container_count) if not container_count > 0: return None else: return None # Set the number of loops that we are going to do return self._list_getter(url=url, filepath=fpath, fheaders=fheaders, last_obj=last_obj)
def object_updater(self, url, container, u_file): """Update an existing object in a swift container. This method will place new headers on an existing object. :param url: :param container: :param u_file: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=2, obj=u_file): # HTML Encode the path for the file rpath = http.quoter(url=url.path, cont=container, ufile=u_file) fheaders = self.payload['headers'] if ARGS.get('object_headers') is not None: fheaders.update(ARGS.get('object_headers')) if ARGS.get('save_perms') is not None: fheaders.update(basic.stat_file(local_file=u_file)) with meth.operation(retry, obj='%s %s' % (fheaders, u_file)): self._header_poster(url=url, rpath=rpath, fheaders=fheaders)
def container_cdn_command(self, url, container, sfile=None): """Command your CDN enabled Container. :param url: :param container: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=2, obj=sfile): # Open Connection conn = http.open_connection(url=url) with meth.operation(retry, conn): cheaders = self.payload['headers'] if sfile is not None: rpath = http.quoter(url=url.path, cont=container, ufile=sfile) # perform CDN Object DELETE conn.request('DELETE', rpath, headers=cheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) else: rpath = http.quoter(url=url.path, cont=container) http.cdn_toggle(headers=cheaders) # perform CDN Enable POST conn.request('PUT', rpath, headers=cheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) report.reporter( msg=('OBJECT %s MESSAGE %s %s %s' % (rpath, resp.status, resp.reason, resp.msg)), prt=False, lvl='debug')
def container_lister(self, url, last_obj=None): """Builds a long list of objects found in a container. NOTE: This could be millions of Objects. :param url: :return None | list: """ for retry in basic.retryloop(attempts=ARGS.get("error_retry"), obj="Container List"): fheaders = self.payload["headers"] fpath = http.quoter(url=url.path) with meth.operation(retry, obj="%s %s" % (fheaders, fpath)): resp = self._header_getter(url=url, rpath=fpath, fheaders=fheaders) head_check = resp.headers container_count = head_check.get("x-account-container-count") if container_count: container_count = int(container_count) if not container_count > 0: return None else: return None # Set the number of loops that we are going to do return self._list_getter(url=url, filepath=fpath, fheaders=fheaders, last_obj=last_obj)
def container_create(self, url, container): """Create a container if it is not Found. :param url: :param container: """ rty_count = ARGS.get("error_retry") for retry in basic.retryloop(attempts=rty_count, delay=5, obj=container): rpath = http.quoter(url=url.path, cont=container) fheaders = self.payload["headers"] with meth.operation(retry, obj="%s %s" % (fheaders, rpath)): resp = self._header_getter(url=url, rpath=rpath, fheaders=fheaders) # Check that the status was a good one if resp.status_code == 404: report.reporter(msg="Creating Container => %s" % container) http.put_request(url=url, rpath=rpath, headers=fheaders) self.resp_exception(resp=resp) report.reporter(msg='Container "%s" Created' % container) return True else: report.reporter(msg='Container "%s" Found' % container) return False
def container_cdn_command(self, url, container, sfile=None): """Command your CDN enabled Container. :param url: :param container: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=2, obj=sfile): # Open Connection conn = http.open_connection(url=url) with meth.operation(retry, conn): cheaders = self.payload['headers'] if sfile is not None: rpath = http.quoter(url=url.path, cont=container, ufile=sfile) # perform CDN Object DELETE conn.request('DELETE', rpath, headers=cheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) else: rpath = http.quoter(url=url.path, cont=container) http.cdn_toggle(headers=cheaders) # perform CDN Enable POST conn.request('PUT', rpath, headers=cheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) report.reporter( msg=('OBJECT %s MESSAGE %s %s %s' % (rpath, resp.status, resp.reason, resp.msg)), prt=False, lvl='debug' )
def detail_show(self, url): """Return Details on an object or container.""" rty_count = ARGS.get("error_retry") for retry in basic.retryloop(attempts=rty_count, delay=5, obj=ARGS.get("container")): if ARGS.get("object") is not None: rpath = http.quoter(url=url.path, cont=ARGS.get("container"), ufile=ARGS.get("object")) else: rpath = http.quoter(url=url.path, cont=ARGS.get("container")) fheaders = self.payload["headers"] with meth.operation(retry, obj="%s %s" % (fheaders, rpath)): return self._header_getter(url=url, rpath=rpath, fheaders=fheaders)
def container_deleter(self, url, container): """Delete all objects in a container. :param url: :param container: """ for retry in basic.retryloop(attempts=ARGS.get("error_retry"), delay=2, obj=container): fheaders = self.payload["headers"] rpath = http.quoter(url=url.path, cont=container) with meth.operation(retry, obj="%s %s" % (fheaders, container)): # Perform delete. self._deleter(url=url, rpath=rpath, fheaders=fheaders)
def object_lister(self, url, container, object_count=None, last_obj=None): """Builds a long list of objects found in a container. NOTE: This could be millions of Objects. :param url: :param container: :param object_count: :param last_obj: :return None | list: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), obj='Object List'): # Open Connection conn = http.open_connection(url=url) # Open connection and perform operation with meth.operation(retry, conn): # Determine how many files are in the container fpath = http.quoter(url=url.path, cont=container) # Make a connection resp = self._header_getter(conn=conn, rpath=fpath, fheaders=self.payload['headers'], retry=retry) if resp.status == 404: report.reporter( msg='Not found. %s | %s' % (resp.status, resp.msg) ) return None, None, None else: if object_count is None: head_check = dict(resp.getheaders()) object_count = head_check.get( 'x-container-object-count' ) if object_count: object_count = int(object_count) if not object_count > 0: return None, None, None else: return None, None, None # Set the number of loops that we are going to do return self._list_getter(conn=conn, count=object_count, filepath=fpath, fheaders=self.payload['headers'], last_obj=last_obj)
def object_putter(self, url, container, source, u_file): """This is the Sync method which uploads files to the swift repository if they are not already found. If a file "name" is found locally and in the swift repository an MD5 comparison is done between the two files. If the MD5 is miss-matched the local file is uploaded to the repository. If custom meta data is specified, and the object exists the method will put the metadata onto the object. :param url: :param container: :param source: :param u_file: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=2, obj=u_file): # Open Connection conn = http.open_connection(url=url) # Open connection and perform operation with meth.operation(retry, conn, obj=u_file): # Get the path ready for action sfile = basic.get_sfile(ufile=u_file, source=source) rpath = http.quoter(url=url.path, cont=container, ufile=sfile) fheaders = self.payload['headers'] # Perform Upload. self._putter(conn=conn, fpath=u_file, rpath=rpath, fheaders=fheaders, retry=retry) # Put headers on the object if custom headers, or save perms. if any([ ARGS.get('object_headers') is not None, ARGS.get('save_perms') is not None ]): if ARGS.get('object_headers') is not None: fheaders.update(ARGS.get('object_headers')) if ARGS.get('save_perms') is not None: fheaders.update(basic.stat_file(local_file=u_file)) self._header_poster(conn=conn, rpath=rpath, fheaders=fheaders, retry=retry)
def object_putter(self, url, container, source, u_file): """This is the Sync method which uploads files to the swift repository if they are not already found. If a file "name" is found locally and in the swift repository an MD5 comparison is done between the two files. If the MD5 is miss-matched the local file is uploaded to the repository. If custom meta data is specified, and the object exists the method will put the metadata onto the object. :param url: :param container: :param source: :param u_file: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=2, obj=u_file): # Open Connection conn = http.open_connection(url=url) # Open connection and perform operation with meth.operation(retry, conn, obj=u_file): # Get the path ready for action sfile = basic.get_sfile(ufile=u_file, source=source) rpath = http.quoter(url=url.path, cont=container, ufile=sfile) fheaders = self.payload['headers'] # Perform Upload. self._putter(conn=conn, fpath=u_file, rpath=rpath, fheaders=fheaders, retry=retry) # Put headers on the object if custom headers, or save perms. if any([ARGS.get('object_headers') is not None, ARGS.get('save_perms') is not None]): if ARGS.get('object_headers') is not None: fheaders.update(ARGS.get('object_headers')) if ARGS.get('save_perms') is not None: fheaders.update(basic.stat_file(local_file=u_file)) self._header_poster(conn=conn, rpath=rpath, fheaders=fheaders, retry=retry)
def object_downloader(self, url, container, source, u_file): """Download an Object from a Container. :param url: :param container: :param u_file: """ rty_count = ARGS.get("error_retry") for retry in basic.retryloop(attempts=rty_count, delay=2, obj=u_file): fheaders = self.payload["headers"] rpath = http.quoter(url=url.path, cont=container, ufile=u_file) with meth.operation(retry, obj="%s %s" % (fheaders, u_file)): self._downloader(url=url, rpath=rpath, fheaders=fheaders, lfile=u_file, source=source)
def container_deleter(self, url, container): """Delete all objects in a container. :param url: :param container: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=2, obj=container): fheaders = self.payload['headers'] rpath = http.quoter(url=url.path, cont=container) with meth.operation(retry, obj='%s %s' % (fheaders, container)): # Perform delete. self._deleter(url=url, rpath=rpath, fheaders=fheaders)
def container_cdn_command(self, url, container, sfile=None): """Command your CDN enabled Container. :param url: :param container: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=2, obj=sfile): cheaders = self.payload['headers'] if sfile is not None: rpath = http.quoter(url=url.path, cont=container, ufile=sfile) # perform CDN Object DELETE adddata = '%s %s' % (cheaders, container) with meth.operation(retry, obj=adddata): resp = http.delete_request(url=url, rpath=rpath, headers=cheaders) self.resp_exception(resp=resp) else: rpath = http.quoter(url=url.path, cont=container) http.cdn_toggle(headers=cheaders) # perform CDN Enable PUT adddata = '%s %s' % (cheaders, container) with meth.operation(retry, obj=adddata): resp = http.put_request(url=url, rpath=rpath, headers=cheaders) self.resp_exception(resp=resp) report.reporter( msg='OBJECT %s MESSAGE %s %s %s' % (rpath, resp.status_code, resp.reason, resp.request), prt=False, lvl='debug')
def object_lister(self, url, container, object_count=None, last_obj=None): """Builds a long list of objects found in a container. NOTE: This could be millions of Objects. :param url: :param container: :param object_count: :param last_obj: :return None | list: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), obj='Object List'): # Open Connection conn = http.open_connection(url=url) # Open connection and perform operation with meth.operation(retry, conn): # Determine how many files are in the container fpath = http.quoter(url=url.path, cont=container) # Make a connection resp = self._header_getter(conn=conn, rpath=fpath, fheaders=self.payload['headers'], retry=retry) if resp.status == 404: report.reporter(msg='Not found. %s | %s' % (resp.status, resp.msg)) return None, None, None else: if object_count is None: head_check = dict(resp.getheaders()) object_count = head_check.get( 'x-container-object-count') if object_count: object_count = int(object_count) if not object_count > 0: return None, None, None else: return None, None, None # Set the number of loops that we are going to do return self._list_getter(conn=conn, count=object_count, filepath=fpath, fheaders=self.payload['headers'], last_obj=last_obj)
def detail_show(self, url): """Return Details on an object or container.""" rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=5, obj=ARGS.get('container')): if ARGS.get('object') is not None: rpath = http.quoter(url=url.path, cont=ARGS.get('container'), ufile=ARGS.get('object')) else: rpath = http.quoter(url=url.path, cont=ARGS.get('container')) fheaders = self.payload['headers'] with meth.operation(retry, obj='%s %s' % (fheaders, rpath)): return self._header_getter(url=url, rpath=rpath, fheaders=fheaders)
def object_downloader(self, url, container, source, u_file): """Download an Object from a Container. :param url: :param container: :param u_file: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=2, obj=u_file): fheaders = self.payload['headers'] rpath = http.quoter(url=url.path, cont=container, ufile=u_file) with meth.operation(retry, obj='%s %s' % (fheaders, u_file)): self._downloader(url=url, rpath=rpath, fheaders=fheaders, lfile=u_file, source=source)
def object_deleter(self, url, container, u_file): """Deletes an objects in a container. :param url: :param container: :param u_file: """ rty_count = ARGS.get("error_retry") for retry in basic.retryloop(attempts=rty_count, delay=2, obj=u_file): fheaders = self.payload["headers"] rpath = http.quoter(url=url.path, cont=container, ufile=u_file) # Make a connection with meth.operation(retry, obj="%s %s" % (fheaders, rpath)): resp = self._header_getter(url=url, rpath=rpath, fheaders=fheaders) if not resp.status_code == 404: # Perform delete. self._deleter(url=url, rpath=rpath, fheaders=fheaders)
def object_deleter(self, url, container, u_file): """Deletes an objects in a container. :param url: :param container: :param u_file: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=2, obj=u_file): fheaders = self.payload['headers'] rpath = http.quoter(url=url.path, cont=container, ufile=u_file) # Make a connection with meth.operation(retry, obj='%s %s' % (fheaders, rpath)): resp = self._header_getter(url=url, rpath=rpath, fheaders=fheaders) if not resp.status_code == 404: # Perform delete. self._deleter(url=url, rpath=rpath, fheaders=fheaders)
def object_putter(self, url, container, source, u_file): """This is the Sync method which uploads files to the swift repository if they are not already found. If a file "name" is found locally and in the swift repository an MD5 comparison is done between the two files. If the MD5 is miss-matched the local file is uploaded to the repository. If custom meta data is specified, and the object exists the method will put the metadata onto the object. :param url: :param container: :param source: :param u_file: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=2, obj=u_file): # Open connection and perform operation # Get the path ready for action sfile = basic.get_sfile(ufile=u_file, source=source) if ARGS.get('dir'): container = '%s/%s' % (container, ARGS['dir'].strip('/')) rpath = http.quoter(url=url.path, cont=container, ufile=sfile) fheaders = self.payload['headers'] if ARGS.get('object_headers') is not None: fheaders.update(ARGS.get('object_headers')) if ARGS.get('save_perms') is not None: fheaders.update(basic.stat_file(local_file=u_file)) with meth.operation(retry, obj='%s %s' % (fheaders, u_file)): self._putter(url=url, fpath=u_file, rpath=rpath, fheaders=fheaders)
def container_deleter(self, url, container): """Delete all objects in a container. :param url: :param container: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=2, obj=container): # Open Connection conn = http.open_connection(url=url) # Open connection and perform operation with meth.operation(retry, conn): rpath = http.quoter(url=url.path, cont=container) # Perform delete. self._deleter(conn=conn, rpath=rpath, fheaders=self.payload['headers'], retry=retry)
def container_lister(self, url, last_obj=None): """Builds a long list of objects found in a container. NOTE: This could be millions of Objects. :param url: :return None | list: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), obj='Container List'): # Open Connection conn = http.open_connection(url=url) # Open connection and perform operation with meth.operation(retry, conn): # Determine how many files are in the container fpath = http.quoter(url=url.path) # Make a connection resp = self._header_getter(conn=conn, rpath=fpath, fheaders=self.payload['headers'], retry=retry) head_check = dict(resp.getheaders()) container_count = head_check.get('x-account-container-count') if container_count: container_count = int(container_count) if not container_count > 0: return None else: return None # Set the number of loops that we are going to do return self._list_getter(conn=conn, count=container_count, filepath=fpath, fheaders=self.payload['headers'], last_obj=last_obj)
def object_lister(self, url, container, object_count=None, last_obj=None): """Builds a long list of objects found in a container. NOTE: This could be millions of Objects. :param url: :param container: :param object_count: :param last_obj: :return None | list: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), obj='Object List'): fheaders = self.payload['headers'] fpath = http.quoter(url=url.path, cont=container) with meth.operation(retry, obj='%s %s' % (fheaders, fpath)): resp = self._header_getter(url=url, rpath=fpath, fheaders=fheaders) if resp.status_code == 404: report.reporter(msg='Not found. %s | %s' % (resp.status_code, resp.request)) return None, None, None else: if object_count is None: object_count = resp.headers.get( 'x-container-object-count') if object_count: object_count = int(object_count) if not object_count > 0: return None, None, None else: return None, None, None # Set the number of loops that we are going to do return self._list_getter(url=url, filepath=fpath, fheaders=fheaders, last_obj=last_obj)
def container_create(self, url, container): """Create a container if it is not Found. :param url: :param container: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=5, obj=container): conn = http.open_connection(url=url) rpath = http.quoter(url=url.path, cont=container) # Open connection and perform operation with meth.operation(retry, conn): resp = self._header_getter(conn=conn, rpath=rpath, fheaders=self.payload['headers'], retry=retry) # Check that the status was a good one if resp.status == 404: report.reporter( msg='Creating Container ==> %s' % container ) conn.request('PUT', rpath, headers=self.payload['headers']) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) report.reporter(msg='Container "%s" Created' % container) return True else: report.reporter(msg='Container "%s" Found' % container) return False
def object_updater(self, url, container, u_file): """Update an existing object in a swift container. This method will place new headers on an existing object. :param url: :param container: :param u_file: """ for retry in basic.retryloop(attempts=ARGS.get("error_retry"), delay=2, obj=u_file): # HTML Encode the path for the file rpath = http.quoter(url=url.path, cont=container, ufile=u_file) fheaders = self.payload["headers"] if ARGS.get("object_headers") is not None: fheaders.update(ARGS.get("object_headers")) if ARGS.get("save_perms") is not None: fheaders.update(basic.stat_file(local_file=u_file)) with meth.operation(retry, obj="%s %s" % (fheaders, u_file)): self._header_poster(url=url, rpath=rpath, fheaders=fheaders)
def container_create(self, url, container): """Create a container if it is not Found. :param url: :param container: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=5, obj=container): conn = http.open_connection(url=url) rpath = http.quoter(url=url.path, cont=container) # Open connection and perform operation with meth.operation(retry, conn): resp = self._header_getter(conn=conn, rpath=rpath, fheaders=self.payload['headers'], retry=retry) # Check that the status was a good one if resp.status == 404: report.reporter(msg='Creating Container ==> %s' % container) conn.request('PUT', rpath, headers=self.payload['headers']) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) report.reporter(msg='Container "%s" Created' % container) return True else: report.reporter(msg='Container "%s" Found' % container) return False
def indicator(self): """Produce the spinner.""" with methods.operation(retry=turbo.emergency_kill): while self.system: busy_chars = ['|', '/', '-', '\\'] for _cr in busy_chars: # Fixes Errors with OS X due to no sem_getvalue support if self.work_q is not None: if not sys.platform.startswith('darwin'): size = self.work_q.qsize() if size > 0: _qz = 'Number of Jobs in Queue = %s ' % size else: _qz = 'Waiting for in-process Jobs to finish ' else: _qz = 'Waiting for in-process Jobs to finish. ' else: _qz = 'Please Wait... ' sys.stdout.write('\rProcessing - [ %(spin)s ] - %(qsize)s' % {"qsize": _qz, "spin": _cr}) sys.stdout.flush() time.sleep(.1) self.system = self.system
def object_putter(self, url, container, source, u_file): """This is the Sync method which uploads files to the swift repository if they are not already found. If a file "name" is found locally and in the swift repository an MD5 comparison is done between the two files. If the MD5 is miss-matched the local file is uploaded to the repository. If custom meta data is specified, and the object exists the method will put the metadata onto the object. :param url: :param container: :param source: :param u_file: """ for retry in basic.retryloop(attempts=ARGS.get("error_retry"), delay=2, obj=u_file): # Open connection and perform operation # Get the path ready for action sfile = basic.get_sfile(ufile=u_file, source=source) if ARGS.get("dir"): container = "%s/%s" % (container, ARGS["dir"].strip("/")) rpath = http.quoter(url=url.path, cont=container, ufile=sfile) fheaders = self.payload["headers"] if ARGS.get("object_headers") is not None: fheaders.update(ARGS.get("object_headers")) if ARGS.get("save_perms") is not None: fheaders.update(basic.stat_file(local_file=u_file)) with meth.operation(retry, obj="%s %s" % (fheaders, u_file)): self._putter(url=url, fpath=u_file, rpath=rpath, fheaders=fheaders)
def _list_getter(self, conn, count, filepath, fheaders, last_obj=None): """Get a list of all objects in a container. :param conn: :param count: :param filepath: :param fheaders: :return list: """ def _marker_type(base, last): """Set and return the marker. :param base: :param last: :return str: """ if last is None: return base else: return _last_marker(f_path=base, l_obj=last) def _last_marker(f_path, l_obj): """Set Marker. :param f_path: :param l_obj: :return str: """ return '%s&marker=%s' % (f_path, http.quoter(url=l_obj)) def _obj_index(b_path, m_path, l_obj, f_list): conn.request('GET', m_path, headers=fheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) return_list = basic.json_encode(read) for obj in return_list: time_offset = ARGS.get('time_offset') if time_offset is not None: # Get the last_modified data from the Object. if cloud.time_delta(lmobj=time_offset) is True: f_list.append(obj) else: f_list.append(obj) last_obj_in_list = f_list[-1].get('name') if l_obj is last_obj_in_list: return f_list else: marker = _marker_type(base=b_path, last=last_obj_in_list) _obj_index( b_path, marker, last_obj_in_list, f_list ) # Quote the file path. base_path = marked_path = ( '%s/?limit=10000&format=json' % basic.ustr(filepath) ) if last_obj is not None: marked_path = _last_marker( f_path=base_path, l_obj=http.quoter(url=last_obj) ) for retry in basic.retryloop(attempts=ARGS.get('error_retry'), obj='Object List Creation'): with meth.operation(retry): file_list = [] _obj_index( base_path, marked_path, last_obj, file_list ) final_list = basic.unique_list_dicts( dlist=file_list, key='name' ) list_count = len(final_list) report.reporter( msg='INFO: %d object(s) found' % len(final_list), log=True ) return final_list, list_count, last_obj
def _list_getter(self, url, filepath, fheaders, last_obj=None): """Get a list of all objects in a container. :param url: :param filepath: :param fheaders: :return list: """ def _marker_type(base, last): """Set and return the marker. :param base: :param last: :return str: """ if last is None: return base else: return _last_marker(f_path=base, l_obj=last) def _last_marker(f_path, l_obj): """Set Marker. :param f_path: :param l_obj: :return str: """ return "%s&marker=%s" % (f_path, http.quoter(url=l_obj)) def _obj_index(b_path, m_path): f_list = [] l_obj = None while True: resp = http.get_request(url=url, rpath=m_path, headers=fheaders) self.resp_exception(resp=resp) return_list = resp.json() for obj in return_list: time_offset = obj.get("last_modified") if time_offset is not None and ARGS.get("time_offset"): # Get the last_modified data from the Object. if cloud.time_delta(lmobj=time_offset) is False: f_list.append(obj) else: f_list.append(obj) else: if not f_list: return list() last_obj_in_list = f_list[-1].get("name") if ARGS.get("max_jobs", ARGS.get("object_index")) is not None: max_jobs = ARGS.get("max_jobs", ARGS.get("object_index")) if max_jobs <= len(f_list): return f_list[:max_jobs] elif l_obj is last_obj_in_list: return f_list else: l_obj = last_obj_in_list m_path = _marker_type(base=b_path, last=last_obj_in_list) else: if l_obj is last_obj_in_list: return f_list else: l_obj = last_obj_in_list m_path = _marker_type(base=b_path, last=last_obj_in_list) # Quote the file path. base_path = marked_path = "%s/?limit=10000&format=json" % basic.ustr(filepath) if last_obj is not None: marked_path = _last_marker(f_path=base_path, l_obj=http.quoter(url=last_obj)) for retry in basic.retryloop(attempts=ARGS.get("error_retry"), obj="Object List Creation"): with meth.operation(retry, obj="%s %s" % (fheaders, filepath)): file_list = _obj_index(base_path, marked_path) final_list = basic.unique_list_dicts(dlist=file_list, key="name") list_count = len(final_list) report.reporter(msg="INFO: %d object(s) found" % len(final_list), log=True) if not final_list: return final_list, list_count, None elif "name" in file_list[-1]: return final_list, list_count, file_list[-1]["name"] else: return final_list, list_count, file_list[-1]
def _list_getter(self, url, filepath, fheaders, last_obj=None): """Get a list of all objects in a container. :param url: :param filepath: :param fheaders: :return list: """ def _marker_type(base, last): """Set and return the marker. :param base: :param last: :return str: """ if last is None: return base else: return _last_marker(f_path=base, l_obj=last) def _last_marker(f_path, l_obj): """Set Marker. :param f_path: :param l_obj: :return str: """ return '%s&marker=%s' % (f_path, http.quoter(url=l_obj)) def _obj_index(b_path, m_path): f_list = [] l_obj = None while True: resp = http.get_request(url=url, rpath=m_path, headers=fheaders) self.resp_exception(resp=resp) return_list = resp.json() for obj in return_list: time_offset = ARGS.get('time_offset') if time_offset is not None: # Get the last_modified data from the Object. if cloud.time_delta(lmobj=time_offset) is True: f_list.append(obj) else: f_list.append(obj) last_obj_in_list = f_list[-1].get('name') if ARGS.get('max_jobs', ARGS.get('object_index')) is not None: max_jobs = ARGS.get('max_jobs', ARGS.get('object_index')) if max_jobs <= len(f_list): return f_list[:max_jobs] elif l_obj is last_obj_in_list: return f_list else: l_obj = last_obj_in_list m_path = _marker_type(base=b_path, last=last_obj_in_list) else: if l_obj is last_obj_in_list: return f_list else: l_obj = last_obj_in_list m_path = _marker_type(base=b_path, last=last_obj_in_list) # Quote the file path. base_path = marked_path = ('%s/?limit=10000&format=json' % basic.ustr(filepath)) if last_obj is not None: marked_path = _last_marker(f_path=base_path, l_obj=http.quoter(url=last_obj)) for retry in basic.retryloop(attempts=ARGS.get('error_retry'), obj='Object List Creation'): with meth.operation(retry, obj='%s %s' % (fheaders, filepath)): file_list = _obj_index(base_path, marked_path) final_list = basic.unique_list_dicts(dlist=file_list, key='name') list_count = len(final_list) report.reporter(msg='INFO: %d object(s) found' % len(final_list), log=True) if 'name' in file_list[-1]: return final_list, list_count, file_list[-1]['name'] else: return final_list, list_count, file_list[-1]
def _list_getter(self, conn, count, filepath, fheaders, last_obj=None): """Get a list of all objects in a container. :param conn: :param count: :param filepath: :param fheaders: :return list: """ def _marker_type(base, last): """Set and return the marker. :param base: :param last: :return str: """ if last is None: return base else: return _last_marker(f_path=base, l_obj=last) def _last_marker(f_path, l_obj): """Set Marker. :param f_path: :param l_obj: :return str: """ return '%s&marker=%s' % (f_path, http.quoter(url=l_obj)) def _obj_index(b_path, m_path, l_obj, f_list): conn.request('GET', m_path, headers=fheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) return_list = basic.json_encode(read) for obj in return_list: time_offset = ARGS.get('time_offset') if time_offset is not None: # Get the last_modified data from the Object. if cloud.time_delta(lmobj=time_offset) is True: f_list.append(obj) else: f_list.append(obj) last_obj_in_list = f_list[-1].get('name') if l_obj is last_obj_in_list: return f_list else: marker = _marker_type(base=b_path, last=last_obj_in_list) _obj_index(b_path, marker, last_obj_in_list, f_list) # Quote the file path. base_path = marked_path = ('%s/?limit=10000&format=json' % basic.ustr(filepath)) if last_obj is not None: marked_path = _last_marker(f_path=base_path, l_obj=http.quoter(url=last_obj)) for retry in basic.retryloop(attempts=ARGS.get('error_retry'), obj='Object List Creation'): with meth.operation(retry): file_list = [] _obj_index(base_path, marked_path, last_obj, file_list) final_list = basic.unique_list_dicts(dlist=file_list, key='name') list_count = len(final_list) report.reporter(msg='INFO: %d object(s) found' % len(final_list), log=True) return final_list, list_count, last_obj
def object_syncer(self, surl, turl, scontainer, tcontainer, u_file): """Download an Object from one Container and the upload it to a target. :param surl: :param turl: :param scontainer: :param tcontainer: :param u_file: """ def _cleanup(): """Ensure that our temp file is removed.""" if locals().get('tfile') is not None: basic.remove_file(tfile) def _time_difference(obj_resp, obj): if ARGS.get('save_newer') is True: # Get the source object last modified time. compare_time = obj_resp.header.get('last_modified') if compare_time is None: return True elif cloud.time_delta(compare_time=compare_time, lmobj=obj['last_modified']) is True: return False else: return True else: return True def _compare(obj_resp, obj): if obj_resp.status_code == 404: report.reporter(msg='Target Object %s not found' % obj['name'], prt=False) return True elif ARGS.get('add_only'): report.reporter(msg='Target Object %s already exists' % obj['name'], prt=True) return False elif obj_resp.headers.get('etag') != obj['hash']: report.reporter(msg=('Checksum Mismatch on Target Object %s' % u_file['name']), prt=False, lvl='debug') return _time_difference(obj_resp, obj) else: return False fheaders = self.payload['headers'] for retry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=5, obj=u_file['name']): # Open connection and perform operation spath = http.quoter(url=surl.path, cont=scontainer, ufile=u_file['name']) tpath = http.quoter(url=turl.path, cont=tcontainer, ufile=u_file['name']) with meth.operation(retry, obj='%s %s' % (fheaders, tpath)): resp = self._header_getter(url=turl, rpath=tpath, fheaders=fheaders) # If object comparison is True GET then PUT object if _compare(resp, u_file) is not True: return None try: # Open Connection for source Download with meth.operation(retry, obj='%s %s' % (fheaders, spath)): # make a temp file. tfile = basic.create_tmp() # Make a connection resp = self._header_getter(url=surl, rpath=spath, fheaders=fheaders) sheaders = resp.headers self._downloader(url=surl, rpath=spath, fheaders=fheaders, lfile=tfile, source=None, skip=True) for _retry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=5, obj=u_file): # open connection for target upload. adddata = '%s %s' % (fheaders, u_file) with meth.operation(_retry, obj=adddata, cleanup=_cleanup): resp = self._header_getter(url=turl, rpath=tpath, fheaders=fheaders) self.resp_exception(resp=resp) # PUT remote object self._putter(url=turl, fpath=tfile, rpath=tpath, fheaders=fheaders, skip=True) # let the system rest for 1 seconds. basic.stupid_hack(wait=1) # With the source headers POST new headers on target if ARGS.get('clone_headers') is True: theaders = resp.headers for key in sheaders.keys(): if key not in theaders: fheaders.update({key: sheaders[key]}) # Force the SOURCE content Type on the Target. fheaders.update( {'content-type': sheaders.get('content-type')}) self._header_poster(url=turl, rpath=tpath, fheaders=fheaders) finally: _cleanup()
def object_syncer(self, surl, turl, scontainer, tcontainer, u_file): """Download an Object from one Container and the upload it to a target. :param surl: :param turl: :param scontainer: :param tcontainer: :param u_file: """ def _cleanup(): """Ensure that our temp file is removed.""" if locals().get("tfile") is not None: basic.remove_file(tfile) def _time_difference(obj_resp, obj): if ARGS.get("save_newer") is True: # Get the source object last modified time. compare_time = obj_resp.header.get("last_modified") if compare_time is None: return True elif cloud.time_delta(compare_time=compare_time, lmobj=obj["last_modified"]) is True: return False else: return True else: return True def _compare(obj_resp, obj): if obj_resp.status_code == 404: report.reporter(msg="Target Object %s not found" % obj["name"], prt=False) return True elif ARGS.get("add_only"): report.reporter(msg="Target Object %s already exists" % obj["name"], prt=True) return False elif obj_resp.headers.get("etag") != obj["hash"]: report.reporter(msg=("Checksum Mismatch on Target Object %s" % u_file["name"]), prt=False, lvl="debug") return _time_difference(obj_resp, obj) else: return False fheaders = self.payload["headers"] for retry in basic.retryloop(attempts=ARGS.get("error_retry"), delay=5, obj=u_file["name"]): # Open connection and perform operation spath = http.quoter(url=surl.path, cont=scontainer, ufile=u_file["name"]) tpath = http.quoter(url=turl.path, cont=tcontainer, ufile=u_file["name"]) with meth.operation(retry, obj="%s %s" % (fheaders, tpath)): resp = self._header_getter(url=turl, rpath=tpath, fheaders=fheaders) # If object comparison is True GET then PUT object if _compare(resp, u_file) is not True: return None try: # Open Connection for source Download with meth.operation(retry, obj="%s %s" % (fheaders, spath)): # make a temp file. tfile = basic.create_tmp() # Make a connection resp = self._header_getter(url=surl, rpath=spath, fheaders=fheaders) sheaders = resp.headers self._downloader(url=surl, rpath=spath, fheaders=fheaders, lfile=tfile, source=None, skip=True) for _retry in basic.retryloop(attempts=ARGS.get("error_retry"), delay=5, obj=u_file): # open connection for target upload. adddata = "%s %s" % (fheaders, u_file) with meth.operation(_retry, obj=adddata, cleanup=_cleanup): resp = self._header_getter(url=turl, rpath=tpath, fheaders=fheaders) self.resp_exception(resp=resp) # PUT remote object self._putter(url=turl, fpath=tfile, rpath=tpath, fheaders=fheaders, skip=True) # let the system rest for 1 seconds. basic.stupid_hack(wait=1) # With the source headers POST new headers on target if ARGS.get("clone_headers") is True: theaders = resp.headers for key in sheaders.keys(): if key not in theaders: fheaders.update({key: sheaders[key]}) # Force the SOURCE content Type on the Target. fheaders.update({"content-type": sheaders.get("content-type")}) self._header_poster(url=turl, rpath=tpath, fheaders=fheaders) finally: _cleanup()
def object_syncer(self, surl, turl, scontainer, tcontainer, obj): """Download an Object from one Container and the upload it to a target. :param surl: :param turl: :param scontainer: :param tcontainer: :param obj: """ def _cleanup(): """Ensure that our temp file is removed.""" if locals().get('tfile') is not None: basic.remove_file(tfile) def _time_difference(resp, obj): if ARGS.get('save_newer') is True: # Get the source object last modified time. compare_time = resp.getheader('last_modified') if compare_time is None: return True elif cloud.time_delta(compare_time=compare_time, lmobj=obj['last_modified']) is True: return False else: return True else: return True def _compare(resp, obj): if resp.status == 404: report.reporter(msg='Target Object %s not found' % obj['name'], prt=False) return True elif resp.getheader('etag') != obj['hash']: report.reporter(msg='Checksum Mismatch on Target Object %s' % obj['name'], prt=False, lvl='debug') return _time_difference(resp, obj) else: return False fheaders = self.payload['headers'] for retry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=5, obj=obj['name']): # Open connection and perform operation fmt, date, date_delta, now = basic.time_stamp() spath = http.quoter(url=surl.path, cont=scontainer, ufile=obj['name']) tpath = http.quoter(url=turl.path, cont=tcontainer, ufile=obj['name']) conn = http.open_connection(url=turl) with meth.operation(retry, conn=conn, obj=obj): resp = self._header_getter(conn=conn, rpath=tpath, fheaders=fheaders, retry=retry) # If object comparison is True GET then PUT object if _compare(resp=resp, obj=obj) is not True: return None try: # Open Connection for source Download conn = http.open_connection(url=surl) with meth.operation(retry, conn=conn, obj=obj): # make a temp file. tfile = basic.create_tmp() # Make a connection resp = self._header_getter(conn=conn, rpath=spath, fheaders=fheaders, retry=retry) sheaders = dict(resp.getheaders()) # TODO(kevin) add the ability to short upload if timestamp # TODO(kevin) ... is newer on the target. # GET remote Object self._downloader(conn=conn, rpath=spath, fheaders=fheaders, lfile=tfile, source=None, retry=retry, skip=True) for nretry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=5, obj=obj): # open connection for target upload. conn = http.open_connection(url=turl) with meth.operation(retry, conn=conn, obj=obj, cleanup=_cleanup): resp = self._header_getter(conn=conn, rpath=tpath, fheaders=fheaders, retry=nretry) self.resp_exception(resp=resp, rty=nretry) # PUT remote object self._putter(conn=conn, fpath=tfile, rpath=tpath, fheaders=fheaders, retry=nretry, skip=True) # let the system rest for 3 seconds. basic.stupid_hack(wait=3) # With the source headers POST new headers on target if ARGS.get('clone_headers') is True: resp = self._header_getter(conn=conn, rpath=tpath, fheaders=fheaders, retry=nretry) theaders = dict(resp.getheaders()) for key in sheaders.keys(): if key not in theaders: fheaders.update({key: sheaders[key]}) # Force the SOURCE content Type on the Target. fheaders.update( {'content-type': sheaders.get('content-type')}) self._header_poster(conn=conn, rpath=tpath, fheaders=fheaders, retry=nretry) finally: _cleanup()
def object_syncer(self, surl, turl, scontainer, tcontainer, obj): """Download an Object from one Container and the upload it to a target. :param surl: :param turl: :param scontainer: :param tcontainer: :param obj: """ def _cleanup(): """Ensure that our temp file is removed.""" if locals().get('tfile') is not None: basic.remove_file(tfile) def _time_difference(resp, obj): if ARGS.get('save_newer') is True: # Get the source object last modified time. compare_time = resp.getheader('last_modified') if compare_time is None: return True elif cloud.time_delta(compare_time=compare_time, lmobj=obj['last_modified']) is True: return False else: return True else: return True def _compare(resp, obj): if resp.status == 404: report.reporter( msg='Target Object %s not found' % obj['name'], prt=False ) return True elif resp.getheader('etag') != obj['hash']: report.reporter( msg='Checksum Mismatch on Target Object %s' % obj['name'], prt=False, lvl='debug' ) return _time_difference(resp, obj) else: return False fheaders = self.payload['headers'] for retry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=5, obj=obj['name']): # Open connection and perform operation fmt, date, date_delta, now = basic.time_stamp() spath = http.quoter(url=surl.path, cont=scontainer, ufile=obj['name']) tpath = http.quoter(url=turl.path, cont=tcontainer, ufile=obj['name']) conn = http.open_connection(url=turl) with meth.operation(retry, conn=conn, obj=obj): resp = self._header_getter(conn=conn, rpath=tpath, fheaders=fheaders, retry=retry) # If object comparison is True GET then PUT object if _compare(resp=resp, obj=obj) is not True: return None try: # Open Connection for source Download conn = http.open_connection(url=surl) with meth.operation(retry, conn=conn, obj=obj): # make a temp file. tfile = basic.create_tmp() # Make a connection resp = self._header_getter(conn=conn, rpath=spath, fheaders=fheaders, retry=retry) sheaders = dict(resp.getheaders()) # TODO(kevin) add the ability to short upload if timestamp # TODO(kevin) ... is newer on the target. # GET remote Object self._downloader( conn=conn, rpath=spath, fheaders=fheaders, lfile=tfile, source=None, retry=retry, skip=True ) for nretry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=5, obj=obj): # open connection for target upload. conn = http.open_connection(url=turl) with meth.operation(retry, conn=conn, obj=obj, cleanup=_cleanup): resp = self._header_getter(conn=conn, rpath=tpath, fheaders=fheaders, retry=nretry) self.resp_exception(resp=resp, rty=nretry) # PUT remote object self._putter(conn=conn, fpath=tfile, rpath=tpath, fheaders=fheaders, retry=nretry, skip=True) # let the system rest for 3 seconds. basic.stupid_hack(wait=3) # With the source headers POST new headers on target if ARGS.get('clone_headers') is True: resp = self._header_getter(conn=conn, rpath=tpath, fheaders=fheaders, retry=nretry) theaders = dict(resp.getheaders()) for key in sheaders.keys(): if key not in theaders: fheaders.update({key: sheaders[key]}) # Force the SOURCE content Type on the Target. fheaders.update( {'content-type': sheaders.get('content-type')} ) self._header_poster( conn=conn, rpath=tpath, fheaders=fheaders, retry=nretry ) finally: _cleanup()