def object_exists(self,container,object): """ Returns true if object exists, false if not """ try: object = files.head_object(self.auth.url,self.auth.token,container,object) return True except: return False
def agent(): keystone = client.Client(token=ca.creds['token'], tenant_id=ca.creds['tenantId'], auth_url=ca.creds['identity_url']) object_store_catalog = keystone.service_catalog.get_endpoints()['object-store'] region_endpoints = None for endpoints in object_store_catalog: if endpoints['region'] == ca.conf['region']: region_endpoints = endpoints if not region_endpoints: ca.log_fail("Failing, region not found in endpoint list.") exit() t = Twython() # Figure out what files already exist, and what our latest tweet is. files = [] try: (headers,files) = swiftclient.get_container(region_endpoints['publicURL'],ca.creds['token'], ca.conf['container'],full_listing=True, prefix=ca.conf['path']) except swiftclient.client.ClientException: pass files = sorted(files, key=itemgetter('name')) last_tweet = 0 last_file = '' tweet_list = [] if files: (headers,last_file) = swiftclient.get_object(region_endpoints['publicURL'],ca.creds['token'], ca.conf['container'],files[-1]['name']) headers = swiftclient.head_object(region_endpoints['publicURL'],ca.creds['token'], ca.conf['container'],files[-1]['name']) last_tweet = headers.get('x-object-meta-last-tweet-id',0) tweet_list = json.loads(last_file) # Grab our tweet list (tweets since last tweet up to 3200), optimized for # fewest requests. try: if last_tweet: tweets = t.getUserTimeline(screen_name=ca.conf['screen_name'], count=200, since_id=last_tweet, include_rts=True) else: tweets = t.getUserTimeline(screen_name=ca.conf['screen_name'], count=200, include_rts=True) except TwythonError, e: ca.log_fail("Error accessing twitter stream. User not found or twitter down.") exit()
def head_object(self, container_name, object_name): token = self.fetch_token() url = self.auth['endpoint_url'] try: response = head_object(url=url, token=token, container=container_name, name=object_name) except ClientException: response = None return response
def get_headers(self, name): """ Optimization : only fetch headers once when several calls are made requiring information for the same name. When the caller is collectstatic, this makes a huge difference. According to my test, we get a *2 speed up. Which makes sense : two api calls were made.. """ if name != self.last_headers_name: # miss -> update self.last_headers_value = swiftclient.head_object(self.url, self.token, self.container_name, name, http_conn=self.http_conn) self.last_headers_name = name return self.last_headers_value
def get_headers(self, name): """ Optimization : only fetch headers once when several calls are made requiring information for the same name. When the caller is collectstatic, this makes a huge difference. According to my test, we get a *2 speed up. Which makes sense : two api calls were made.. """ if name != self.last_headers_name: # miss -> update self.last_headers_value = swiftclient.head_object( self.storage_url, self.token, self.container_name, name, http_conn=self.http_conn) self.last_headers_name = name return self.last_headers_value
def agent(): ca.log("Starting!") keystone = client.Client(token=ca.creds['token'], tenant_id=ca.creds['tenantId'], auth_url=ca.creds['identity_url']) object_store_catalog = keystone.service_catalog.get_endpoints( )['object-store'] region_endpoints = None for endpoints in object_store_catalog: if endpoints['region'] == ca.conf['region']: region_endpoints = endpoints if not region_endpoints: ca.log_fail("Failing, region not found in endpoint list.") exit() if ca.conf.get('date'): p = pdt.Calendar() result = p.parse(ca.conf['date']) dt = datetime.datetime.fromtimestamp(mktime(result[0])) path = dt.strftime(ca.conf['name']) else: path = ca.conf['name'] try: headers = swiftclient.head_object(region_endpoints['publicURL'], ca.creds['token'], ca.conf['container'], path) if headers['content-length'] >= 0: ca.log("File exists!") except swiftclient.client.ClientException, e: ca.log("File doesn't exist!") ca.email( "File missing: " + ca.conf['container'] + "/" + path, ''' The container '%s' appears to be missing the file '%s'. ''' % (ca.conf['container'], path))
def index_object(self, account, container, obj, verify=False): "Get details for object and index it" url = '/'.join(['http://127.0.0.1', 'v1', account]) headers = head_object(url, self.auth_token, container, obj, http_conn=self._get_conn(url)) self.logger.debug("Indexing Object: %s/%s/%s" % (account.encode('utf8'), container.encode('utf8'), obj.encode('utf8'))) update_handler = ObjectUpdateHandler(self.indexer, account, container, obj, index=self.index) _id, props = update_handler.parse_props(headers) if verify: # Check Indexed State of Object client = get_search_client(self.index_config) _id, props = update_handler.parse_props(headers) q = pyes.TermQuery("_id", _id) results = client.search(query=q, indices=[ self.index_config.get( 'search_index_name', 'os_default') ]) assert results.total == 1, '%s indexed instead of 1' % \ results.total for name, val in results[0].iteritems(): if encode_utf8(val) != encode_utf8(props[name]): self.logger.error('Indexed Property Does Not Match') self.logger.error(val) self.logger.error(props[name]) else: update_handler.index(_id, props, block=True)
def agent(): ca.log("Starting!") keystone = client.Client(token=ca.creds['token'], tenant_id=ca.creds['tenantId'], auth_url=ca.creds['identity_url']) object_store_catalog = keystone.service_catalog.get_endpoints()['object-store'] region_endpoints = None for endpoints in object_store_catalog: if endpoints['region'] == ca.conf['region']: region_endpoints = endpoints if not region_endpoints: ca.log_fail("Failing, region not found in endpoint list.") exit() if ca.conf.get('date'): p = pdt.Calendar() result = p.parse(ca.conf['date']) dt = datetime.datetime.fromtimestamp(mktime(result[0])) path = dt.strftime(ca.conf['name']) else: path = ca.conf['name'] try: headers = swiftclient.head_object(region_endpoints['publicURL'],ca.creds['token'], ca.conf['container'],path) if headers['content-length'] >= 0: ca.log("File exists!") except swiftclient.client.ClientException, e: ca.log("File doesn't exist!") ca.email("File missing: "+ca.conf['container']+"/"+path,''' The container '%s' appears to be missing the file '%s'. ''' % (ca.conf['container'], path))
path_type = len([i for i in [vrs, acc, cont, obj] if i]) # pick only one object for confiming and editing edit_param = [delete_confirm, meta_edit] if any(edit_param): edit_obj = filter(None, edit_param)[0] obj_list = [ obj for obj in _whole_obj_list if 'name' in obj and obj['name'] == edit_obj ] # get matadata for each objects for i in obj_list: try: if 'subdir' in i: mata = {} else: meta = head_object(storage_url, token, cont, i['name']) except ClientException, err: resp = Response(charset='utf8') resp.status = err.http_status return resp if 'subdir' in i: obj_meta[i['subdir']] = {} obj_unquote_name[i['subdir']] = unquote(i['subdir']) else: obj_meta[i['name']] = dict([ (m[len('x-object-meta-'):].capitalize(), meta[m]) for m in meta.keys() if m.startswith('x-object-meta') ]) obj_unquote_name[i['name']] = unquote(i['name']) if 'x-delete-at' in meta: obj_delete_set_time[i['name']] = meta.get('x-delete-at')
def return_md5_for_remotefile(self, keyname): """ SWIFT COMMON CLIENT factored in """ """ Takes a objectname on cloudfiles and returns md5sum for remote file """ header = files.head_object(self.auth.url,self.auth.token,self.container,keyname) return header['etag']
obj_meta = {} obj_unquote_name = {} obj_delete_set_time = {} path_type = len([i for i in [vrs, acc, cont, obj] if i]) # pick only one object for confiming and editing edit_param = [delete_confirm, meta_edit] if any(edit_param): edit_obj = filter(None, edit_param)[0] obj_list = [obj for obj in _whole_obj_list if 'name' in obj and obj['name'] == edit_obj] # get matadata for each objects for i in obj_list: try: if 'subdir' in i: mata = {} else: meta = head_object(storage_url, token, cont, i['name']) except ClientException, err: resp = Response(charset='utf8') resp.status = err.http_status return resp if 'subdir' in i: obj_meta[i['subdir']] = {} obj_unquote_name[i['subdir']] = unquote(i['subdir']) else: obj_meta[i['name']] = dict( [(m[len('x-object-meta-'):].capitalize(), meta[m]) for m in meta.keys() if m.startswith('x-object-meta')]) obj_unquote_name[i['name']] = unquote(i['name']) if 'x-delete-at' in meta: obj_delete_set_time[i['name']] = meta.get('x-delete-at') # calc markers for paging.