class SwiftStorage(object): def __init__(self, account, url, user, key): self.conn = Connection(authurl=url, user=user, key=key) self.cont = self.conn.get_container(account) self.account = account def list(self): return [x["name"] for x in self.cont[1]] def get_attrs(self, uuid): attrs = {} k = self.conn.get_object(container=self.account, obj=uuid)[0] print k attrs['tags'] = json.loads(k['x-object-meta-tags']) attrs['flags'] = json.loads(k['x-object-meta-flags']) attrs['stored'] = int(k['x-object-meta-stored']) return attrs def get_message(self, uuid): return self.conn.get_object(container=self.account, obj=uuid)[1] def put_message(self, uuid, msg, attrs): self.conn.put_object(container=self.account, obj=uuid, contents=msg, headers=attrs) self.put_attrs(uuid, attrs) def put_attrs(self, uuid, attrs): newattrs = {'X-Object-Meta-Tags': json.dumps(attrs['tags']), 'X-Object-Meta-Flags': json.dumps(attrs['flags']), 'X-Object-Meta-Stored': str(attrs['stored'])} print newattrs self.conn.post_object(container=self.account, obj=uuid, headers=newattrs) def del_message(self, uuid): self.conn.delete_object(container=self.account, obj=uuid)
def main(args): output_bucket = args.get("output_bucket") input_reducer_bucket = args.get("input_reducer_bucket") _authurl = "http://" + args.get('url') + ":8080/auth/v1.0" conn = Connection(authurl=_authurl, user=_user, key=_key, tenant_name=_tenant_name, auth_version=_auth_version) all_keys = [] for obj in conn.get_container(input_reducer_bucket)[1]: all_keys.append(obj['name']) final_str = [] for key in all_keys: start = time() response = conn.get_object(input_reducer_bucket, key) contents = response.read() contents = contents.decode() final_str.append(to_string(getResultReduce(contents))) final_str = "\n".join(final_str) print(final_str) final_res = getResultReduce(final_str) conn.put_object(output_bucket, "reduce_res.json", contents=json.dumps(final_res)) return {"res": "good"}
def main(args): mapper_bucket = args.get("mapper_bucket") input_bucket = args.get("input_bucket") mapper_id = args.get("mapper_id") src_keys = args.get("keys") _authurl = "http://" + args.get('url') + ":8080/auth/v1.0" conn = Connection(authurl=_authurl, user=_user, key=_key, tenant_name=_tenant_name, auth_version=_auth_version) keys = src_keys.split('/') # Download and process all keys for key in keys: print(key) start = time() _, response = conn.get_object(mapper_bucket, key) contents = response.read() contents = contents.strip() words = contents.split() result = "" for word in words: result = result + word.decode() + " 1\n" #s3.Bucket(mapper_bucket).put_object(Key=str(mapper_id), Body=result) conn.put_object(mapper_bucket, str(mapper_id), contents=result) #, content_type="image/jpeg") return {"res": "good"}
class SwiftStorageEngine(BaseStorageEngine): def __init__(self, authurl, user, key, **kwargs): self.client = Connection(authurl, user, key, **kwargs) def save(self, fp, location): location = location.strip("/") i = location.rfind("/") container = location[:i] object_ = location[i:] try: return self.client.put_object(container, object_, fp.read()) except ClientException as ex: logger.error("Failed to get media object: {0}" % location) raise ex def read(self, location): location = location.strip("/") i = location.rfind("/") container = location[:i] object_ = location[i:] try: return self.client.get_object(container, object_)[1] except ClientException as ex: logger.error("Failed to get media object: {0}" % location) raise ex def get_url(self, request, location): return request.url('media', location=location)
def prepareffmpeg(url): _authurl = "http://" + url + ":8080/auth/v1.0" conn = Connection(authurl=_authurl, user=_user, key=_key, tenant_name=_tenant_name, auth_version=_auth_version) _, ffmpegExec = conn.get_object(container_name, "ffmpeg") with open(FFMPEG_PATH, 'w') as local: local.write(ffmpegExec) #shutil.copyfile('ffmpeg', FFMPEG_PATH) os.chmod(FFMPEG_PATH, 0o0755)
def run(self, terms, variables=None, **kwargs): """Returns Ironic Inspector introspection data. Access swift and return introspection data for all nodes. :returns a list of tuples, one for each node. """ ret = [] session = get_auth_session(kwargs.get('auth_url'), "ironic", "service", kwargs.get('password')) swift_client = Connection(session=session) container = swift_client.get_container("ironic-inspector") for item in container[1]: if item['name'].startswith('inspector_data') and \ not item['name'].endswith("UNPROCESSED"): obj = swift_client.get_object("ironic-inspector", item['name']) ret.append((item['name'], obj)) return ret
class Swift(object): def __init__(self, bucket, noop): """Setup the S3 storage backend with the bucket we will use and optional region.""" # This is our Swift container self.bucket = bucket self.noop = noop # We assume your environment variables are set correctly just like # you would for the swift command line util try: self.conn = Connection(authurl=os.environ["ST_AUTH"], user=os.environ["ST_USER"], key=os.environ["ST_KEY"], timeout=30) except KeyError: logger.warning( "Missing environment variables for Swift authentication") logger.warning("Bailing...") sys.exit(1) headers, objs = self.conn.get_account(self.bucket) for i in objs: logger.debug("Searching for bucket %s == %s" % (self.bucket, i)) if not noop and self.bucket not in objs: self.conn.put_container(self.bucket) def list(self, prefix=None): """Return all keys in this bucket.""" headers, objs = self.conn.get_container(self.bucket, prefix=prefix) while objs: # Handle paging i = {} for i in objs: yield i["name"] headers, objs = self.conn.get_container(self.bucket, marker=i["name"], prefix=prefix) def get(self, src): """Return the contents of src from S3 as a string.""" try: headers, obj = self.conn.get_object(self.bucket, src) return obj except ClientException: # Request failed....object doesn't exist return None def put(self, dst, data): """Store the contents of the string data at a key named by dst in S3.""" if self.noop: logger.info("No-Op Put: %s" % dst) else: self.conn.put_object(self.bucket, dst, data) def delete(self, src): """Delete the object in S3 referenced by the key name src.""" if self.noop: logger.info("No-Op Delete: %s" % src) else: self.conn.delete_object(self.bucket, src)
class SwiftFileServer(object): """ Swift File server """ def __init__(self, region_name): # self.cfg = cfg.CONF self.region_name = region_name # self.authurl = self.cfg.FileSvc.authurl # self.auth_version = self.cfg.FileSvc.auth_version # self.user = self.cfg.FileSvc.user # self.key = self.cfg.FileSvc.key # self.tenant_name = self.cfg.FileSvc.tenant_name # self.container_name = self.cfg.FileSvc.container_name # self.temp_url_key = self.cfg.FileSvc.temp_url_key # self.temp_url_key_2 = self.cfg.FileSvc.temp_url_key_2 # self.chosen_temp_url_key = self.cfg.FileSvc.chosen_temp_url_key # self.authurl='http://10.204.248.50:35357/v2.0' self.authurl = 'http://10.204.248.228:35357/v2.0' self.auth_version = '2.0' self.user = '******' self.key = 'passw0rd' self.tenant_name = 'admin' self.temp_url_key = 'mykey' self.temp_url_key_2 = 'mykey2' self.chosen_temp_url_key = 'temp_url_key' self.container_name = 'mycontainer' self.storageurl = None self.swift_conn = None def connect_to_swift(self): """ return connect to swift fileserver """ for i in range(RETRY_CNT): i += 1 try: self.__init_swift_fileserver() break except Exception as err: print( '[Try %d/%d ]: Connecting swift fileserver failed. %s %s', i, RETRY_CNT, err, traceback.format_exc()) if i == RETRY_CNT: raise def __init_swift_fileserver(self): try: options = { 'authurl': self.authurl, 'user': self.user, 'key': self.key, 'auth_version': self.auth_version, 'tenant_name': self.tenant_name, 'insecure': True, 'timeout': CONN_TIMEOUT, 'os_options': { 'region_name': self.region_name } } headers = { 'Temp-URL-Key': self.temp_url_key, 'Temp-URL-Key-2': self.temp_url_key_2 } print_opts = copy.deepcopy(options) print_opts['key'] = '<password stripped>' self.swift_conn = Connection(**options) self.swift_conn.post_account(headers) self.swift_conn.put_container(self.container_name) self.storageurl = self.swift_conn.get_auth()[0] print('swift-file-server: Connected. options %s storageurl %s', print_opts, self.storageurl) except Exception as err: print('swift-file-server: Connect FAILED %s options %s', err, print_opts) raise def upload_file(self, fpath, fname=None, expires=3600): """ upload the file in 'filepath' on to the file server and return a temporary url for the users to download """ if not self.swift_conn: self.connect_to_swift() if fname is None: fname = os.path.basename(fpath) finp = open(fpath, 'rb') try: self.swift_conn.put_object(container=self.container_name, obj=fname, contents=finp) print('swift-file-server: Uploading file %s ... [OK]', fname) except Exception as err: logging.error( 'swift-file-server: Unable to upload the file %s: %s', fname, err) raise # return self.get_temp_download_url(fname,expires) def get_temp_download_url(self, fname, expires): """ return the temporary download url """ file_uri = '%s/%s/%s' % (self.storageurl, self.container_name, fname) file_path = urlparse(file_uri).path key = getattr(self, self.chosen_temp_url_key) try: temp_url = generate_temp_url(file_path, expires, key, 'GET') except Exception as err: logging.error( 'swift-file-server: Generating temp url for %s failed %s', fname, err) raise download_url = self.storageurl.replace( urlparse(self.storageurl).path, temp_url) print('swift-file-server: Temporary download URL for file %s: %s', fname, download_url) return download_url def delete_file(self, fpath): """ Delete the file from the file server """ fname = os.path.basename(fpath) try: self.swift_conn.delete_object(container=self.container_name, obj=fname) print('swift-file-server: Deleting file %s ... [OK]', fname) except Exception as err: print( 'swift-file-server: Deleting file %s ... [FAIL]: %s %s (IGNORED) ', fname, err, traceback.format_exc()) def getObjectContent(self, filename): return self.swift_conn.get_object(container=self.container_name, obj=filename)
class OpenstackDriver(BaseDriver): """OpenStackDriver for Object Storage""" def __init__(self, cloud_config): super(OpenstackDriver, self).__init__() self.auth_url = cloud_config['os_auth_url'] self.project_name = cloud_config['os_project_name'] self.username = cloud_config['os_username'] self.password = cloud_config['os_password'] self.user_domain_name = \ cloud_config.get('os_project_domain_name', 'default') self.project_domain_name = \ cloud_config.get('os_user_domain_name', 'default') self.driver_name = \ cloud_config.get('driver_name', 'default') self.tenant_id = cloud_config.get('tenant_id', None) self.limit = cloud_config.get('limit', None) self.auth_version = \ cloud_config.get('os_auth_version', '2') self._setup() def _setup(self): auth = v3.Password(auth_url=self.auth_url, user_domain_name=self.user_domain_name, username=self.username, password=self.password, project_domain_name=self.project_domain_name, project_name=self.project_name) sess = session.Session(auth=auth) self.client = Connection(auth_version=self.auth_version, session=sess) self.quota = OpenStackQuota( self.client, self.tenant_id, self.limit) def create_container(self, container, **kwargs): return self.client.put_container(container) def delete_container(self, container): return self.client.delete_container(container) def list_containers(self): return self.client.get_account()[1] def stat_container(self, container): return self.client.head_container(container) def update_container(self, container, metadata, **kwargs): metadata = {('x-container-meta-' + key.strip()): value for key, value in metadata.items() if not key.strip().startswith('x-container-meta-')} return self.client.post_container(container, metadata, **kwargs) def upload_object(self, container, obj, contents, content_length=None, metadata=None, **kwargs): if metadata: metadata = {('x-object-meta-' + key.strip()): value for key, value in metadata.items() if not key.strip().startswith('x-object-meta-')} return self.client.put_object(container, obj, contents=contents, content_length=content_length, headers=metadata, **kwargs) def download_object(self, container, obj, **kwargs): return self.client.get_object(container, obj, **kwargs) def stat_object(self, container, obj): return self.client.head_object(container, obj) def delete_object(self, container, obj, **kwargs): return self.client.delete_object(container, obj, **kwargs) def list_container_objects(self, container, prefix=None, delimiter=None): return self.client.get_container(container, prefix, delimiter)[1] def update_object(self, container, obj, metadata, **kwargs): # Format metedata key metadata = {('x-object-meta-' + key.strip()): value for key, value in metadata.items() if not key.strip().startswith('x-object-meta-')} return self.client.post_object(container, obj, metadata, **kwargs) def copy_object(self, container, obj, metadata=None, destination=None, **kwargs): return self.client.copy_object(container, obj, headers=metadata, destination=destination, **kwargs)
class OpenstackDriver(BaseDriver): """OpenStackDriver for Object Storage""" def __init__(self, cloud_config): super(OpenstackDriver, self).__init__() self.auth_url = cloud_config['os_auth_url'] self.project_name = cloud_config['os_project_name'] self.username = cloud_config['os_username'] self.password = cloud_config['os_password'] self.user_domain_name = \ cloud_config.get('os_project_domain_name', 'default') self.project_domain_name = \ cloud_config.get('os_user_domain_name', 'default') self.driver_name = \ cloud_config.get('driver_name', 'default') self.tenant_id = cloud_config.get('tenant_id', None) self.limit = cloud_config.get('limit', None) self.auth_version = \ cloud_config.get('os_auth_version', '2') self._setup() def _setup(self): auth = v3.Password(auth_url=self.auth_url, user_domain_name=self.user_domain_name, username=self.username, password=self.password, project_domain_name=self.project_domain_name, project_name=self.project_name) sess = session.Session(auth=auth) self.client = Connection(auth_version=self.auth_version, session=sess) self.quota = OpenStackQuota(self.client, self.tenant_id, self.limit) def create_container(self, container, **kwargs): return self.client.put_container(container) def delete_container(self, container): return self.client.delete_container(container) def list_containers(self): return self.client.get_account()[1] def stat_container(self, container): return self.client.head_container(container) def update_container(self, container, metadata, **kwargs): metadata = {('x-container-meta-' + key.strip()): value for key, value in metadata.items() if not key.strip().startswith('x-container-meta-')} return self.client.post_container(container, metadata, **kwargs) def upload_object(self, container, obj, contents, content_length=None, metadata=None, **kwargs): if metadata: metadata = {('x-object-meta-' + key.strip()): value for key, value in metadata.items() if not key.strip().startswith('x-object-meta-')} return self.client.put_object(container, obj, contents=contents, content_length=content_length, headers=metadata, **kwargs) def download_object(self, container, obj, **kwargs): return self.client.get_object(container, obj, **kwargs) def stat_object(self, container, obj): return self.client.head_object(container, obj) def delete_object(self, container, obj, **kwargs): return self.client.delete_object(container, obj, **kwargs) def list_container_objects(self, container, prefix=None, delimiter=None): return self.client.get_container(container, prefix, delimiter)[1] def update_object(self, container, obj, metadata, **kwargs): # Format metedata key metadata = {('x-object-meta-' + key.strip()): value for key, value in metadata.items() if not key.strip().startswith('x-object-meta-')} return self.client.post_object(container, obj, metadata, **kwargs) def copy_object(self, container, obj, metadata=None, destination=None, **kwargs): return self.client.copy_object(container, obj, headers=metadata, destination=destination, **kwargs)
class SwiftFileServer(object): """ Swift File server """ def __init__(self, region_name): # self.cfg = cfg.CONF self.region_name = region_name # self.authurl = self.cfg.FileSvc.authurl # self.auth_version = self.cfg.FileSvc.auth_version # self.user = self.cfg.FileSvc.user # self.key = self.cfg.FileSvc.key # self.tenant_name = self.cfg.FileSvc.tenant_name # self.container_name = self.cfg.FileSvc.container_name # self.temp_url_key = self.cfg.FileSvc.temp_url_key # self.temp_url_key_2 = self.cfg.FileSvc.temp_url_key_2 # self.chosen_temp_url_key = self.cfg.FileSvc.chosen_temp_url_key # self.authurl='http://10.204.248.50:35357/v2.0' self.authurl = 'http://10.204.248.228:35357/v2.0' self.auth_version='2.0' self.user='******' self.key='passw0rd' self.tenant_name='admin' self.temp_url_key= 'mykey' self.temp_url_key_2='mykey2' self.chosen_temp_url_key= 'temp_url_key' self.container_name= 'mycontainer' self.storageurl = None self.swift_conn = None def connect_to_swift(self): """ return connect to swift fileserver """ for i in range(RETRY_CNT): i += 1 try: self.__init_swift_fileserver() break except Exception as err: print('[Try %d/%d ]: Connecting swift fileserver failed. %s %s', i, RETRY_CNT, err, traceback.format_exc()) if i == RETRY_CNT: raise def __init_swift_fileserver(self): try: options = { 'authurl': self.authurl, 'user': self.user, 'key': self.key, 'auth_version': self.auth_version, 'tenant_name': self.tenant_name, 'insecure': True, 'timeout': CONN_TIMEOUT, 'os_options': {'region_name': self.region_name}} headers = {'Temp-URL-Key': self.temp_url_key, 'Temp-URL-Key-2': self.temp_url_key_2 } print_opts = copy.deepcopy(options) print_opts['key'] = '<password stripped>' self.swift_conn = Connection(**options) self.swift_conn.post_account(headers) self.swift_conn.put_container(self.container_name) self.storageurl = self.swift_conn.get_auth()[0] print ('swift-file-server: Connected. options %s storageurl %s', print_opts, self.storageurl) except Exception as err: print('swift-file-server: Connect FAILED %s options %s', err, print_opts) raise def upload_file(self, fpath, fname=None,expires=3600): """ upload the file in 'filepath' on to the file server and return a temporary url for the users to download """ if not self.swift_conn: self.connect_to_swift() if fname is None: fname = os.path.basename(fpath) finp = open(fpath, 'rb') try: self.swift_conn.put_object( container=self.container_name, obj=fname, contents=finp) print ('swift-file-server: Uploading file %s ... [OK]', fname) except Exception as err: logging.error( 'swift-file-server: Unable to upload the file %s: %s', fname, err) raise # return self.get_temp_download_url(fname,expires) def get_temp_download_url(self, fname, expires): """ return the temporary download url """ file_uri = '%s/%s/%s' % (self.storageurl, self.container_name, fname) file_path = urlparse(file_uri).path key = getattr(self, self.chosen_temp_url_key) try: temp_url = generate_temp_url(file_path, expires, key, 'GET') except Exception as err: logging.error( 'swift-file-server: Generating temp url for %s failed %s', fname, err) raise download_url = self.storageurl.replace( urlparse(self.storageurl).path, temp_url) print( 'swift-file-server: Temporary download URL for file %s: %s', fname, download_url) return download_url def delete_file(self, fpath): """ Delete the file from the file server """ fname = os.path.basename(fpath) try: self.swift_conn.delete_object( container=self.container_name, obj=fname) print('swift-file-server: Deleting file %s ... [OK]', fname) except Exception as err: print( 'swift-file-server: Deleting file %s ... [FAIL]: %s %s (IGNORED) ', fname, err, traceback.format_exc()) def getObjectContent(self, filename): return self.swift_conn.get_object(container=self.container_name, obj=filename)
def main(args): _authurl = "http://"+args.get('url')+":8080/auth/v1.0" conn = Connection( authurl=_authurl, user=_user, key=_key, tenant_name=_tenant_name, auth_version=_auth_version ) #Perfs counters container_name = "expe-faas" process = None if MEMORY_USAGE: process = psutil.Process(os.getpid()) end_time = 0 extract_time_start = extract_time_stop = 0 transform_time_start = transform_time_stop = 0 load_time_start = load_time_stop = 0 start_time = perf_counter() #Get program starttime rand_images = ['1KB.jpg', '16KB.jpg', '32KB.jpg', '64KB.jpg', '126KB.jpg', '257KB.jpg', '517KB.jpg', '1.3MB.jpg', '2MB.jpg', '3.2MB.jpg'] random.seed() image, angle = rand_images[random.randrange(0,len(rand_images)-1,1)], args['angle'] extract_time_start = perf_counter() #Get extract phase starttime _, imgstream = conn.get_object(container_name, image) extract_time_stop = perf_counter() #End recording the extract phase transform_time_start = perf_counter() with Image(blob=imgstream) as img: img.rotate(angle) outputsize = len(img.make_blob('jpg')) #Change this to persist to S3 with a given bucket img.save(filename='out.jpg') transform_time_stop = perf_counter() load_time_start = perf_counter() with open('out.jpg', 'rb') as local: conn.put_object(container_name, 'resRotate'+str(random.randrange(0,100,2))+'.jpg', contents=local, content_type="image/jpeg") load_time_stop = perf_counter() end_time = perf_counter() # print ( # { # 'outputsize': outputsize, # 'elapsed_time' : end_time - start_time, # 'extract_time' : extract_time_stop - extract_time_start, # 'transform_time' : transform_time_stop - transform_time_start, # 'load_time' : load_time_stop - load_time_start, # 'memory_usage' : process.memory_info()[0] >> 20 if MEMORY_USAGE else 'Not defined' # } # ) return { 'outputsize': outputsize, 'elapsed_time' : end_time - start_time, 'extract_time' : extract_time_stop - extract_time_start, 'transform_time' : transform_time_stop - transform_time_start, 'load_time' : load_time_stop - load_time_start, 'memory_usage' : process.memory_info()[0] >> 20 if MEMORY_USAGE else 'Not defined' }
class ObjectStore: RESP_LIMIT = 10000 # serverside limit of the response def __init__(self, container, name): if name == 'bag_brk': OBJECTSTORE['user'] = '******' OBJECTSTORE['key'] = BAG_OBJECTSTORE_PASSWORD OBJECTSTORE['os_options'][ 'tenant_id'] = '4f2f4b6342444c84b3580584587cfd18' OBJECTSTORE['tenant_name'] = 'BGE000081_BAG' else: OBJECTSTORE['user'] = '******' OBJECTSTORE['key'] = BGT_OBJECTSTORE_PASSWORD OBJECTSTORE['os_options'][ 'tenant_id'] = '1776010a62684386a08b094d89ce08d9' OBJECTSTORE['tenant_name'] = 'BGE000081_BGT' self.conn = Connection(**OBJECTSTORE) self.container = container def get_store_object(self, name): """ Returns the object store :param name: :return: """ return self.conn.get_object(self.container, name)[1] def get_store_objects(self, path): return self._get_full_container_list([], prefix=path) def _get_full_container_list(self, seed, **kwargs): kwargs['limit'] = self.RESP_LIMIT if len(seed): kwargs['marker'] = seed[-1]['name'] _, page = self.conn.get_container(self.container, **kwargs) seed.extend(page) return seed if len(page) < self.RESP_LIMIT else \ self._get_full_container_list(seed, **kwargs) def folders(self, path): objects_from_store = self._get_full_container_list( [], delimiter='/', prefix=path) return [ store_object['subdir'] for store_object in objects_from_store if 'subdir' in store_object] def files(self, path, file_id): file_list = self._get_full_container_list( [], delimiter='/', prefix=path + file_id) for file_object in file_list: file_object['container'] = self.container return file_list def put_to_objectstore(self, object_name, object_content, content_type): return self.conn.put_object(self.container, object_name, contents=object_content, content_type=content_type) def delete_from_objectstore(self, object_name): return self.conn.delete_object(self.container, object_name)
def handler(event, context): ensure_clean_state() videoUrl = event['videoUrl'] if 'decodeFps' in event: decodeFps = int(event['decodeFps']) else: decodeFps = DEFAULT_DECODE_FPS if 'decodeQuality' in event: decodeQuality = int(event['decodeQuality']) else: decodeQuality = DEFAULT_DECODE_QUALITY if decodeQuality < MIN_DECODE_QUALITY or \ decodeQuality > MAX_DECODE_QUALITY: raise Exception('Invalid decode quality: %d', decodeQuality) logLevel = DEFAULT_LOG_LEVEL if 'logLevel' in event: logLevel = event['logLevel'] outputBucket = None if 'outputBucket' in event: outputBucket = event['outputBucket'] outputPrefix = event['outputPrefix'] else: print 'Warning: no output location specified' outputBatchSize = DEFAULT_OUTPUT_BATCH_SIZE if 'outputBatchSize' in event: outputBatchSize = int(event['outputBatchSize']) keepOutput = DEFAULT_KEEP_OUTPUT if 'keepOutput' in event: keepOutput = event['keepOutput'].lower() == 'true' start_extract = now() print 'Downloading file: %s' % videoUrl #urllib.urlretrieve(videoUrl, INPUT_FILE_PATH) _authurl = "http://" + event['url'] + ":8080/auth/v1.0" conn = Connection(authurl=_authurl, user=_user, key=_key, tenant_name=_tenant_name, auth_version=_auth_version) _, videostream = conn.get_object(container_name, videoUrl) with open(INPUT_FILE_PATH, 'w') as local: local.write(videostream) print 'Download complete' end_extract = now() start_processing = now() if not os.path.exists(INPUT_FILE_PATH): raise Exception('%s does not exist' % INPUT_FILE_PATH) else: inputSize = os.path.getsize(INPUT_FILE_PATH) os.chmod(INPUT_FILE_PATH, 0o0755) print ' [%dKB] %s' % (inputSize >> 10, INPUT_FILE_PATH) print ' [md5] %s' % get_md5(INPUT_FILE_PATH) os.mkdir(TEMP_OUTPUT_DIR) assert (os.path.exists(TEMP_OUTPUT_DIR)) try: try: if not convert_video_to_jpegs(decodeFps, decodeQuality, logLevel): raise Exception('Failed to decode video') finally: os.remove(INPUT_FILE_PATH) if outputBatchSize > 1: combine_output_files(outputBatchSize) end_processing = now() start_load = now() if outputBucket: fileCount, totalSize = upload_output_to_s3(outputBucket, outputPrefix, event['url']) else: fileCount, totalSize = list_output_directory() end_load = now() finally: if not keepOutput: shutil.rmtree(TEMP_OUTPUT_DIR) return { 'statusCode': 200, 'body': { 'fileCount': fileCount, 'totalSize': totalSize, 'extract_time': end_extract - start_extract, 'transform_time': end_processing - start_processing, 'load_time': end_load - start_load } }
class Swift(object): def __init__(self, bucket, noop): """Setup the S3 storage backend with the bucket we will use and optional region.""" # This is our Swift container self.bucket = bucket self.noop = noop # We assume your environment variables are set correctly just like # you would for the swift command line util try: self.conn = Connection(authurl=os.environ["ST_AUTH"], user=os.environ["ST_USER"], key=os.environ["ST_KEY"]) except KeyError: logger.warning("Missing environment variables for Swift authentication") logger.warning("Bailing...") sys.exit(1) headers, objs = self.conn.get_account(self.bucket) for i in objs: logger.debug("Searching for bucket %s == %s" % (self.bucket, i)) if not noop and self.bucket not in objs: self.conn.put_container(self.bucket) def list(self, prefix=None): """Return all keys in this bucket.""" headers, objs = self.conn.get_container(self.bucket, prefix=prefix) while objs: # Handle paging i = {} for i in objs: yield i["name"] headers, objs = self.conn.get_container(self.bucket, marker=i["name"], prefix=prefix) def get(self, src): """Return the contents of src from S3 as a string.""" try: headers, obj = self.conn.get_object(self.bucket, src) return obj except ClientException: # Request failed....object doesn't exist return None def put(self, dst, data): """Store the contents of the string data at a key named by dst in S3.""" if self.noop: logger.info("No-Op Put: %s" % dst) else: self.conn.put_object(self.bucket, dst, data) def delete(self, src): """Delete the object in S3 referenced by the key name src.""" if self.noop: logger.info("No-Op Delete: %s" % src) else: self.conn.delete_object(self.bucket, src)
class ObjectStore: RESP_LIMIT = 10000 # serverside limit of the response def __init__(self, container, name): if name == 'bag_brk': OBJECTSTORE['user'] = '******' OBJECTSTORE['key'] = BAG_OBJECTSTORE_PASSWORD OBJECTSTORE['os_options'][ 'tenant_id'] = '4f2f4b6342444c84b3580584587cfd18' OBJECTSTORE['tenant_name'] = 'BGE000081_BAG' else: OBJECTSTORE['user'] = '******' OBJECTSTORE['key'] = BGT_OBJECTSTORE_PASSWORD OBJECTSTORE['os_options'][ 'tenant_id'] = '1776010a62684386a08b094d89ce08d9' OBJECTSTORE['tenant_name'] = 'BGE000081_BGT' self.conn = Connection(**OBJECTSTORE) self.container = container def get_store_object(self, name): """ Returns the object store :param name: :return: """ return self.conn.get_object(self.container, name)[1] def get_store_objects(self, path): return self._get_full_container_list([], prefix=path) def _get_full_container_list(self, seed, **kwargs): kwargs['limit'] = self.RESP_LIMIT if len(seed): kwargs['marker'] = seed[-1]['name'] _, page = self.conn.get_container(self.container, **kwargs) seed.extend(page) return seed if len(page) < self.RESP_LIMIT else \ self._get_full_container_list(seed, **kwargs) def folders(self, path): objects_from_store = self._get_full_container_list([], delimiter='/', prefix=path) return [ store_object['subdir'] for store_object in objects_from_store if 'subdir' in store_object ] def files(self, path, file_id): file_list = self._get_full_container_list([], delimiter='/', prefix=path + file_id) for file_object in file_list: file_object['container'] = self.container return file_list def put_to_objectstore(self, object_name, object_content, content_type): return self.conn.put_object(self.container, object_name, contents=object_content, content_type=content_type) def delete_from_objectstore(self, object_name): return self.conn.delete_object(self.container, object_name)