def _run(self, thread): if time.time() - self.heartbeat >= 15: self.heartbeat = time.time() self._log_status('PUTS') name = uuid.uuid4().hex if self.object_sources: source = random.choice(self.files) elif self.upper_object_size > self.lower_object_size: source = SourceFile(random.randint(self.lower_object_size, self.upper_object_size)) else: source = SourceFile(self.object_size) device = random.choice(self.devices) partition = str(random.randint(1, 3000)) container_name = random.choice(self.containers) with self.connection() as conn: try: if self.use_proxy: client.put_object(self.url, self.token, container_name, name, source, content_length=len(source), http_conn=conn) else: node = {'ip': self.ip, 'port': self.port, 'device': device} direct_client.direct_put_object(node, partition, self.account, container_name, name, source, content_length=len(source)) except client.ClientException as e: self.logger.debug(str(e)) self.failures += 1 else: self.names.append((device, partition, name, container_name)) self.complete += 1
def _save(self, name, content): if self.name_prefix: name = self.name_prefix + name if self.content_type_from_fd: content_type = magic.from_buffer(content.read(1024), mime=True) # Go back to the beginning of the file content.seek(0) else: content_type = mimetypes.guess_type(name)[0] # Hack in CORS for all fonts headers = {} if content_type: headers['Content-Type'] = content_type if IS_FONT_RE.match(name): headers['Access-Control-Allow-Origin'] = '*' swiftclient.put_object(self.storage_url, self.token, self.container_name, name, content, headers=headers, http_conn=self.http_conn, content_type=content_type) return name
def _save(self, name, content): swiftclient.put_object(self.storage_url, self.token, self.container_name, name, content, http_conn=self.http_conn) return name
def _save(self, name, content): if self.name_prefix: name = self.name_prefix + name swiftclient.put_object(self.storage_url, self.token, self.container_name, name, content, http_conn=self.http_conn) return name
def save_into_swift(name, content): content_type = guess_type(name)[0] swiftclient.put_object( PRE_AUTH_URL, PRE_AUTH_TOKEN, CONTAINER_NAME, name, content, http_conn=connect_swift(), content_type=content_type, ) return name
def _save(self, name, content): if self.name_prefix: name = self.name_prefix + name content_type = mimetypes.guess_type(name)[0] swiftclient.put_object(self.storage_url, self.token, self.container_name, name, content, http_conn=self.http_conn, content_type=content_type) return name
def rescale(source,largethumb,smallthumb,token): """ Resize a given graphic from a source size to a smaller size. """ (headers,source_file) = swiftclient.get_object(source['url'],token,source['container'],source['name']) large_image_file = StringIO.StringIO(source_file) small_image_file = StringIO.StringIO(source_file) if source['name'].endswith("jpg") or source['name'].endswith("JPG") or source['name'].endswith("jpeg") or source['name'].endswith("JPEG"): type = "JPEG" elif source['name'].endswith("png") or source['name'].endswith("PNG"): type = "PNG" large = Image.open(large_image_file) small = Image.open(small_image_file) # This chunk rotates images if they have exif data that indicates that they need rotation. for orientation in ExifTags.TAGS.keys(): if ExifTags.TAGS[orientation]=='Orientation': break if hasattr(large, '_getexif'): e = large._getexif() if e is not None: exif=dict(e.items()) orientation = exif[orientation] if orientation == 3 : large=large.transpose(Image.ROTATE_180) small=small.transpose(Image.ROTATE_180) elif orientation == 6 : large=large.transpose(Image.ROTATE_270) small=small.transpose(Image.ROTATE_270) elif orientation == 8 : large=large.transpose(Image.ROTATE_90) small=small.transpose(Image.ROTATE_90) large.thumbnail((800,800), Image.ANTIALIAS) small.thumbnail((150,150), Image.ANTIALIAS) largeoutput = StringIO.StringIO() large.save(largeoutput, format=type) largedata = largeoutput.getvalue() largeoutput.close() smalloutput = StringIO.StringIO() small.save(smalloutput, format=type) smalldata = smalloutput.getvalue() smalloutput.close() swiftclient.put_object(str(largethumb['url']+"/"+largethumb['container']+"/"+urllib2.quote(largethumb['name'])).encode('utf-8'),token=token,contents=largedata) swiftclient.put_object(str(smallthumb['url']+"/"+smallthumb['container']+"/"+urllib2.quote(smallthumb['name'])).encode('utf-8'),token=token,contents=smalldata)
def copy_object(url, token, from_cont, from_obj, to_cont, to_obj=None, http_conn=None, proxy=None): """ add to swiftclient """ to_obj_name = to_obj if to_obj else from_obj return put_object( url, token, to_cont, name=to_obj_name, contents=None, content_length=0, # if URL encorded strings was set in X-Copy-From, we have to # execute URL encording twice. # '%E6%97%A5%E6%9C%AC%E8%AA%9E' # -> %25E6%2597%25A5%25E6%259C%25AC%25E8%25AA%259E # See: line 810 in swift-1.8.0/swift/proxy/controllers/obj.py headers={ 'X-Copy-From': '/%s/%s' % (quote(from_cont), quote(from_obj)) }, http_conn=http_conn, proxy=proxy)
def upload(self, remotefilename, filename, expectmd5=None, cb=False): """ SWIFT COMMON CLIENT factored in """ if not self.object_exists(self.container,remotefilename): if expectmd5: files.put_object(self.auth.url,self.auth.token,self.container,remotefilename,filename) md5 = self.return_md5_for_remotefile(remotefilename) if md5 == expectmd5: log.instance.logger.info('Upload success! MD5 of local and remote copy match ' + str(expectmd5)) return True else: return False else: # In case your service class doesn't support md5 verification. Just fire and forget. files.put_object(self.auth.url,self.auth.token,self.container,remotefilename,filename) return True else: # Do not upload. log.instance.logger.error('Remote file ' + remotefilename + ' exists, we cant back up our local copy... skipping. ') return False
def worker(endpoint,container,token,key_name,bucket,accesskey,secretkey): #ca.log("Starting "+str(key_name)) my_s3 = S3Connection(accesskey,secretkey) my_bucket = Bucket(connection=my_s3, name=bucket) key = Key(my_bucket) key.key = key_name if key.name[-1] == "/" and key.size == 0: swiftclient.put_object(endpoint,container=container, token=token,name=key.name,contents=key, content_type="application/directory") else: swiftclient.put_object(endpoint,container=container, token=token,name=key.name, contents=key) copied_files.append(key.name) ca.log("Uploaded "+key.name)
def _save(self, name, content): if self.name_prefix: name = self.name_prefix + name if self.content_type_from_fd: content_type = magic.from_buffer(content.read(1024), mime=True) # Go back to the beginning of the file content.seek(0) else: content_type = mimetypes.guess_type(name)[0] swiftclient.put_object(self.storage_url, self.token, self.container_name, name, content, http_conn=self.http_conn, content_type=content_type) return name
def _save(self, name, content): print("Inspection Swift Storage Save:: %s" % inspect.getmembers(content)) if self.name_prefix: name = self.name_prefix + name content_type = mimetypes.guess_type(name)[0] print(self.storage_url) print(self.token) print(self.container_name) print(name) print(content_type) swiftclient.put_object(self.storage_url, self.token, self.container_name, name, content, http_conn=self.http_conn, content_type=content_type) return name
def put_object(self, container_name, object_name, content): token = self.fetch_token() url = self.auth['endpoint_url'] try: response = put_object(url=url, token=token, container=container_name, name=object_name, contents=content) except ClientException: response = None return response
def _save(self, name, content, headers=None): original_name = name name = self.name_prefix + name if self.content_type_from_fd: content_type = magic.from_buffer(content.read(1024), mime=True) # Go back to the beginning of the file content.seek(0) else: content_type = mimetypes.guess_type(name)[0] content_length = content.size swiftclient.put_object(self.storage_url, self.token, self.container_name, name, content, http_conn=self.http_conn, content_type=content_type, content_length=content_length, headers=headers) return original_name
def worker(endpoint, container, token, key_name, bucket, accesskey, secretkey): #ca.log("Starting "+str(key_name)) my_s3 = S3Connection(accesskey, secretkey) my_bucket = Bucket(connection=my_s3, name=bucket) key = Key(my_bucket) key.key = key_name if key.name[-1] == "/" and key.size == 0: swiftclient.put_object(endpoint, container=container, token=token, name=key.name, contents=key, content_type="application/directory") else: swiftclient.put_object(endpoint, container=container, token=token, name=key.name, contents=key) copied_files.append(key.name) ca.log("Uploaded " + key.name)
def _save(self, name, content): # quick fix for '.' directories end up on softlayer strip the ./ from name if name.startswith('./'): name = name[2:] if self.name_prefix: name = self.name_prefix + name if self.content_type_from_fd: content_type = magic.from_buffer(content.read(1024), mime=True) # Go back to the beginning of the file content.seek(0) else: content_type = mimetypes.guess_type(name)[0] self.get_token() # in case token is old swiftclient.put_object(self.storage_url, self.token, self.container_name, name, content, http_conn=self.http_conn, content_type=content_type) return name
def copy_object(url, token, from_cont, from_obj, to_cont, to_obj=None, http_conn=None, proxy=None): """ add to swiftclient """ to_obj_name = to_obj if to_obj else from_obj return put_object(url, token, to_cont, name=to_obj_name, contents=None, content_length=0, # if URL encorded strings was set in X-Copy-From, we have to # execute URL encording twice. # '%E6%97%A5%E6%9C%AC%E8%AA%9E' # -> %25E6%2597%25A5%25E6%259C%25AC%25E8%25AA%259E # See: line 810 in swift-1.8.0/swift/proxy/controllers/obj.py headers={'X-Copy-From': '/%s/%s' % (quote(from_cont), quote(from_obj))}, http_conn=http_conn, proxy=proxy)
break # Combine with the last tweet storage file contents tweet_list = sorted(tweet_list, key=itemgetter('id')) # Split into monthly files and save tweet_store = {'tweets': {},'mins':{},'maxes':{}} for tweet in tweet_list: time_struct = time.strptime(tweet['created_at'], "%a %b %d %H:%M:%S +0000 %Y") key = time.strftime("tweets-%Y%m.json",time_struct) if not tweet_store['tweets'].get(key): tweet_store['tweets'][key] = [tweet] else: tweet_store['tweets'][key].extend([tweet]) if not tweet_store['mins'].get(key) or tweet_store['mins'].get(key) > tweet['id']: tweet_store['mins'][key] = tweet['id'] if not tweet_store['maxes'].get(key) or tweet_store['maxes'].get(key) < tweet['id']: tweet_store['maxes'][key] = tweet['id'] swiftclient.put_container(region_endpoints['publicURL'],ca.creds['token'],ca.conf['container']) for key in sorted(tweet_store['tweets']): ca.log("Storing file: "+ca.conf['container']+"/"+ca.conf['path']+"/"+key) swiftclient.put_object(region_endpoints['publicURL']+"/"+ca.conf['container']+"/"+ca.conf['path']+"/"+key, token=ca.creds['token'], contents=json.dumps(tweet_store['tweets'][key]), headers={"X-Object-Meta-First-Tweet-ID": tweet_store['mins'][key], "X-Object-Meta-Last-Tweet-Id": tweet_store['maxes'][key]}) ca.run(agent)
return HTTP_PRECONDITION_FAILED obj_size = None if params.get('file_size'): obj_size = int(params['file_size']) else: try: obj_fp.seek(0, 2) obj_size = obj_fp.tell() obj_fp.seek(0, 0) except IOError, err: pass self.logger.debug('Upload obj size: %s' % obj_size) try: put_object(storage_url, token, cont, obj, obj_fp, content_length=obj_size) except ClientException, err: return err.http_status return HTTP_CREATED return HTTP_BAD_REQUEST if action == 'obj_get': (obj_status, hunk) = get_object(storage_url, token, cont, obj) #resp.headerlist = obj_status.items() #resp.body_file = hunk if action == 'obj_delete': try: delete_object(storage_url, token, cont, obj) except ClientException, err: return err.http_status
except ClientException, err: return err.http_status, json.dumps(headers) return HTTP_OK, json.dumps(headers) if action == 'obj_meta_list': headers = {} try: headers = head_object(storage_url, token, cont, obj) except ClientException, err: return err.http_status, headers return HTTP_OK, headers if action == 'obj_create': if obj: if len(obj) > 1024: return HTTP_PRECONDITION_FAILED try: put_object(storage_url, token, cont, obj, obj_fp) except ClientException, err: return err.http_status return HTTP_CREATED return HTTP_BAD_REQUEST if action == 'obj_get': (obj_status, hunk) = get_object(storage_url, token, cont, obj) #resp.headerlist = obj_status.items() #resp.body_file = hunk if action == 'obj_delete': try: delete_object(storage_url, token, cont, obj) except ClientException, err: return err.http_status return HTTP_NO_CONTENT if action == 'obj_metadata':
def container_sync_row(self, row, sync_to, sync_key, broker, info): """ Sends the update the row indicates to the sync_to container. :param row: The updated row in the local database triggering the sync update. :param sync_to: The URL to the remote container. :param sync_key: The X-Container-Sync-Key to use when sending requests to the other container. :param broker: The local container database broker. :param info: The get_info result from the local container database broker. :returns: True on success """ try: start_time = time() if row['deleted']: try: delete_object(sync_to, name=row['name'], headers={ 'x-timestamp': row['created_at'], 'x-container-sync-key': sync_key }, proxy=self.proxy) except ClientException as err: if err.http_status != HTTP_NOT_FOUND: raise self.container_deletes += 1 self.logger.increment('deletes') self.logger.timing_since('deletes.timing', start_time) else: part, nodes = self.object_ring.get_nodes( info['account'], info['container'], row['name']) shuffle(nodes) exc = None looking_for_timestamp = float(row['created_at']) timestamp = -1 headers = body = None for node in nodes: try: these_headers, this_body = direct_get_object( node, part, info['account'], info['container'], row['name'], resp_chunk_size=65536) this_timestamp = float(these_headers['x-timestamp']) if this_timestamp > timestamp: timestamp = this_timestamp headers = these_headers body = this_body except ClientException as err: # If any errors are not 404, make sure we report the # non-404 one. We don't want to mistakenly assume the # object no longer exists just because one says so and # the others errored for some other reason. if not exc or exc.http_status == HTTP_NOT_FOUND: exc = err except (Exception, Timeout) as err: exc = err if timestamp < looking_for_timestamp: if exc: raise exc raise Exception( _('Unknown exception trying to GET: %(node)r ' '%(account)r %(container)r %(object)r'), { 'node': node, 'part': part, 'account': info['account'], 'container': info['container'], 'object': row['name'] }) for key in ('date', 'last-modified'): if key in headers: del headers[key] if 'etag' in headers: headers['etag'] = headers['etag'].strip('"') headers['x-timestamp'] = row['created_at'] headers['x-container-sync-key'] = sync_key put_object(sync_to, name=row['name'], headers=headers, contents=FileLikeIter(body), proxy=self.proxy) self.container_puts += 1 self.logger.increment('puts') self.logger.timing_since('puts.timing', start_time) except ClientException as err: if err.http_status == HTTP_UNAUTHORIZED: self.logger.info( _('Unauth %(sync_from)r => %(sync_to)r'), { 'sync_from': '%s/%s' % (quote(info['account']), quote(info['container'])), 'sync_to': sync_to }) elif err.http_status == HTTP_NOT_FOUND: self.logger.info( _('Not found %(sync_from)r => %(sync_to)r \ - object %(obj_name)r'), { 'sync_from': '%s/%s' % (quote(info['account']), quote(info['container'])), 'sync_to': sync_to, 'obj_name': row['name'] }) else: self.logger.exception(_('ERROR Syncing %(db_file)s %(row)s'), { 'db_file': str(broker), 'row': row }) self.container_failures += 1 self.logger.increment('failures') return False except (Exception, Timeout) as err: self.logger.exception(_('ERROR Syncing %(db_file)s %(row)s'), { 'db_file': str(broker), 'row': row }) self.container_failures += 1 self.logger.increment('failures') return False return True
def _save(self, name, content): swiftclient.put_object(self.url, self.token, self.container_name, name, content, http_conn=self.http_conn) return name
def container_sync_row(self, row, sync_to, sync_key, broker, info): """ Sends the update the row indicates to the sync_to container. :param row: The updated row in the local database triggering the sync update. :param sync_to: The URL to the remote container. :param sync_key: The X-Container-Sync-Key to use when sending requests to the other container. :param broker: The local container database broker. :param info: The get_info result from the local container database broker. :returns: True on success """ try: start_time = time() if row['deleted']: try: delete_object(sync_to, name=row['name'], headers={'x-timestamp': row['created_at'], 'x-container-sync-key': sync_key}, proxy=self.proxy) except ClientException as err: if err.http_status != HTTP_NOT_FOUND: raise self.container_deletes += 1 self.logger.increment('deletes') self.logger.timing_since('deletes.timing', start_time) else: part, nodes = self.object_ring.get_nodes( info['account'], info['container'], row['name']) shuffle(nodes) exc = None looking_for_timestamp = float(row['created_at']) timestamp = -1 headers = body = None for node in nodes: try: these_headers, this_body = direct_get_object( node, part, info['account'], info['container'], row['name'], resp_chunk_size=65536) this_timestamp = float(these_headers['x-timestamp']) if this_timestamp > timestamp: timestamp = this_timestamp headers = these_headers body = this_body except ClientException as err: # If any errors are not 404, make sure we report the # non-404 one. We don't want to mistakenly assume the # object no longer exists just because one says so and # the others errored for some other reason. if not exc or exc.http_status == HTTP_NOT_FOUND: exc = err except (Exception, Timeout) as err: exc = err if timestamp < looking_for_timestamp: if exc: raise exc raise Exception( _('Unknown exception trying to GET: %(node)r ' '%(account)r %(container)r %(object)r'), {'node': node, 'part': part, 'account': info['account'], 'container': info['container'], 'object': row['name']}) for key in ('date', 'last-modified'): if key in headers: del headers[key] if 'etag' in headers: headers['etag'] = headers['etag'].strip('"') headers['x-timestamp'] = row['created_at'] headers['x-container-sync-key'] = sync_key put_object(sync_to, name=row['name'], headers=headers, contents=FileLikeIter(body), proxy=self.proxy) self.container_puts += 1 self.logger.increment('puts') self.logger.timing_since('puts.timing', start_time) except ClientException as err: if err.http_status == HTTP_UNAUTHORIZED: self.logger.info( _('Unauth %(sync_from)r => %(sync_to)r'), {'sync_from': '%s/%s' % (quote(info['account']), quote(info['container'])), 'sync_to': sync_to}) elif err.http_status == HTTP_NOT_FOUND: self.logger.info( _('Not found %(sync_from)r => %(sync_to)r \ - object %(obj_name)r'), {'sync_from': '%s/%s' % (quote(info['account']), quote(info['container'])), 'sync_to': sync_to, 'obj_name': row['name']}) else: self.logger.exception( _('ERROR Syncing %(db_file)s %(row)s'), {'db_file': broker.db_file, 'row': row}) self.container_failures += 1 self.logger.increment('failures') return False except (Exception, Timeout) as err: self.logger.exception( _('ERROR Syncing %(db_file)s %(row)s'), {'db_file': broker.db_file, 'row': row}) self.container_failures += 1 self.logger.increment('failures') return False return True
raise Exception( _('Unknown exception trying to GET: %(node)r ' '%(account)r %(container)r %(object)r'), {'node': node, 'part': part, 'account': info['account'], 'container': info['container'], 'object': row['name']}) for key in ('date', 'last-modified'): if key in headers: del headers[key] if 'etag' in headers: headers['etag'] = headers['etag'].strip('"') headers['x-timestamp'] = row['created_at'] headers['x-container-sync-key'] = sync_key put_object(sync_to, name=row['name'], headers=headers, contents=FileLikeIter(body), proxy=self.proxy) self.container_puts += 1 self.logger.increment('puts') self.logger.timing_since('puts.timing', start_time) except ClientException, err: if err.http_status == HTTP_UNAUTHORIZED: self.logger.info( _('Unauth %(sync_from)r => %(sync_to)r'), {'sync_from': '%s/%s' % (quote(info['account']), quote(info['container'])), 'sync_to': sync_to}) elif err.http_status == HTTP_NOT_FOUND: self.logger.info( _('Not found %(sync_from)r => %(sync_to)r \ - object %(obj_name)r'),
"node": node, "part": part, "account": info["account"], "container": info["container"], "object": row["name"], }, ) for key in ("date", "last-modified"): if key in headers: del headers[key] if "etag" in headers: headers["etag"] = headers["etag"].strip('"') headers["x-timestamp"] = row["created_at"] headers["x-container-sync-key"] = sync_key put_object( sync_to, name=row["name"], headers=headers, contents=_Iter2FileLikeObject(body), proxy=self.proxy ) self.container_puts += 1 self.logger.increment("puts") self.logger.timing_since("puts.timing", start_time) except ClientException, err: if err.http_status == HTTP_UNAUTHORIZED: self.logger.info( _("Unauth %(sync_from)r => %(sync_to)r"), {"sync_from": "%s/%s" % (quote(info["account"]), quote(info["container"])), "sync_to": sync_to}, ) elif err.http_status == HTTP_NOT_FOUND: self.logger.info( _( "Not found %(sync_from)r => %(sync_to)r \ - object %(obj_name)r"
# create a container """ element_id += 1 container_meta = {'X-Container-Meta-ID' : element_id, 'X-Container-Meta-Creator' : user} rv = SWIFT.put_container(swift_url, auth_token, container_name, headers=container_meta ) print 'put_container:', rv """ # get container info stats = SWIFT.head_container(swift_url, auth_token, container_name) print 'stats for testdir:', stats # store 10 objects for x in xrange(10): # create an object element_id += 1 object_meta = {'X-Object-Meta-ID' : element_id, 'X-Object-Meta-Creator' : user} obj_etag = SWIFT.put_object(swift_url, auth_token, container=container_name, name=obj_name+str(element_id), contents=contents, headers=object_meta ) print 'put_object:', obj_etag """ # get container info and list objects in container objs = SWIFT.get_container(swift_url, auth_token, container_name, marker='testobj10') print 'dir size:', objs[0]['x-container-bytes-used'] for obj in objs[1]: print obj['name'] """
if obj: if len(obj) > 1024: return HTTP_PRECONDITION_FAILED obj_size = None if params.get('file_size'): obj_size = int(params['file_size']) else: try: obj_fp.seek(0,2) obj_size = obj_fp.tell() obj_fp.seek(0,0) except IOError, err: pass self.logger.debug('Upload obj size: %s' % obj_size) try: put_object(storage_url, token, cont, obj, obj_fp, content_length=obj_size) except ClientException, err: return err.http_status return HTTP_CREATED return HTTP_BAD_REQUEST if action == 'obj_get': (obj_status, hunk) = get_object(storage_url, token, cont, obj) #resp.headerlist = obj_status.items() #resp.body_file = hunk if action == 'obj_delete': try: delete_object(storage_url, token, cont, obj) except ClientException, err: return err.http_status return HTTP_NO_CONTENT if action == 'obj_metadata':
def makedirs(self, dirs): swiftclient.put_object(self.storage_url, token=self.token, container=self.container_name, name='%s/.' % dirs, contents='')
def container_sync_row(self, row, sync_to, sync_key, broker, info): """ Sends the update the row indicates to the sync_to container. :param row: The updated row in the local database triggering the sync update. :param sync_to: The URL to the remote container. :param sync_key: The X-Container-Sync-Key to use when sending requests to the other container. :param broker: The local container database broker. :param info: The get_info result from the local container database broker. :returns: True on success """ try: start_time = time() if row["deleted"]: try: delete_object( sync_to, name=row["name"], headers={"x-timestamp": row["created_at"], "x-container-sync-key": sync_key}, proxy=self.proxy, ) except ClientException as err: if err.http_status != HTTP_NOT_FOUND: raise self.container_deletes += 1 self.logger.increment("deletes") self.logger.timing_since("deletes.timing", start_time) else: part, nodes = self.object_ring.get_nodes(info["account"], info["container"], row["name"]) shuffle(nodes) exc = None looking_for_timestamp = float(row["created_at"]) timestamp = -1 headers = body = None for node in nodes: try: these_headers, this_body = direct_get_object( node, part, info["account"], info["container"], row["name"], resp_chunk_size=65536 ) this_timestamp = float(these_headers["x-timestamp"]) if this_timestamp > timestamp: timestamp = this_timestamp headers = these_headers body = this_body except ClientException as err: # If any errors are not 404, make sure we report the # non-404 one. We don't want to mistakenly assume the # object no longer exists just because one says so and # the others errored for some other reason. if not exc or exc.http_status == HTTP_NOT_FOUND: exc = err except (Exception, Timeout) as err: exc = err if timestamp < looking_for_timestamp: if exc: raise exc raise Exception( _("Unknown exception trying to GET: %(node)r " "%(account)r %(container)r %(object)r"), { "node": node, "part": part, "account": info["account"], "container": info["container"], "object": row["name"], }, ) for key in ("date", "last-modified"): if key in headers: del headers[key] if "etag" in headers: headers["etag"] = headers["etag"].strip('"') headers["x-timestamp"] = row["created_at"] headers["x-container-sync-key"] = sync_key put_object(sync_to, name=row["name"], headers=headers, contents=FileLikeIter(body), proxy=self.proxy) self.container_puts += 1 self.logger.increment("puts") self.logger.timing_since("puts.timing", start_time) except ClientException as err: if err.http_status == HTTP_UNAUTHORIZED: self.logger.info( _("Unauth %(sync_from)r => %(sync_to)r"), {"sync_from": "%s/%s" % (quote(info["account"]), quote(info["container"])), "sync_to": sync_to}, ) elif err.http_status == HTTP_NOT_FOUND: self.logger.info( _( "Not found %(sync_from)r => %(sync_to)r \ - object %(obj_name)r" ), { "sync_from": "%s/%s" % (quote(info["account"]), quote(info["container"])), "sync_to": sync_to, "obj_name": row["name"], }, ) else: self.logger.exception(_("ERROR Syncing %(db_file)s %(row)s"), {"db_file": str(broker), "row": row}) self.container_failures += 1 self.logger.increment("failures") return False except (Exception, Timeout) as err: self.logger.exception(_("ERROR Syncing %(db_file)s %(row)s"), {"db_file": str(broker), "row": row}) self.container_failures += 1 self.logger.increment("failures") return False return True
def rescale(source, largethumb, smallthumb, token): """ Resize a given graphic from a source size to a smaller size. """ (headers, source_file) = swiftclient.get_object(source['url'], token, source['container'], source['name']) large_image_file = StringIO.StringIO(source_file) small_image_file = StringIO.StringIO(source_file) if source['name'].endswith("jpg") or source['name'].endswith( "JPG") or source['name'].endswith( "jpeg") or source['name'].endswith("JPEG"): type = "JPEG" elif source['name'].endswith("png") or source['name'].endswith("PNG"): type = "PNG" large = Image.open(large_image_file) small = Image.open(small_image_file) # This chunk rotates images if they have exif data that indicates that they need rotation. for orientation in ExifTags.TAGS.keys(): if ExifTags.TAGS[orientation] == 'Orientation': break if hasattr(large, '_getexif'): e = large._getexif() if e is not None: exif = dict(e.items()) orientation = exif[orientation] if orientation == 3: large = large.transpose(Image.ROTATE_180) small = small.transpose(Image.ROTATE_180) elif orientation == 6: large = large.transpose(Image.ROTATE_270) small = small.transpose(Image.ROTATE_270) elif orientation == 8: large = large.transpose(Image.ROTATE_90) small = small.transpose(Image.ROTATE_90) large.thumbnail((800, 800), Image.ANTIALIAS) small.thumbnail((150, 150), Image.ANTIALIAS) largeoutput = StringIO.StringIO() large.save(largeoutput, format=type) largedata = largeoutput.getvalue() largeoutput.close() smalloutput = StringIO.StringIO() small.save(smalloutput, format=type) smalldata = smalloutput.getvalue() smalloutput.close() swiftclient.put_object( str(largethumb['url'] + "/" + largethumb['container'] + "/" + urllib2.quote(largethumb['name'])).encode('utf-8'), token=token, contents=largedata) swiftclient.put_object( str(smallthumb['url'] + "/" + smallthumb['container'] + "/" + urllib2.quote(smallthumb['name'])).encode('utf-8'), token=token, contents=smalldata)
def makedirs(self, dirs): swiftclient.put_object(self.storage_url, token=self.token, container=self.container_name, name='%s/.' % (self.name_prefix + dirs), contents='')
def agent(): ca.log("Starting!") keystone = client.Client(token=ca.creds['token'], tenant_id=ca.creds['tenantId'], auth_url=ca.creds['identity_url']) object_store_catalog = keystone.service_catalog.get_endpoints()['object-store'] source_endpoint = None for endpoints in object_store_catalog: if endpoints['region'] == ca.conf['region']: source_endpoint = endpoints if not source_endpoint: ca.log_fail("Failing, source region not found in endpoint list.") exit() target_endpoint = None for endpoints in object_store_catalog: if endpoints['region'] == ca.conf['galleryregion']: target_endpoint = endpoints if not source_endpoint: ca.log_fail("Failing, target region not found in endpoint list.") exit() try: container = swiftclient.head_container(target_endpoint['publicURL'],ca.creds['token'], ca.conf['gallerycontainer']) if not '.r:*' in container.get('x-container-read'): ca.warn("Gallery container exists, but may not be publicly readable.","") except: ca.log("Gallery container doesn't exist, creating new publicly readable container.","") swiftclient.put_container(target_endpoint['publicURL'],ca.creds['token'], ca.conf['gallerycontainer'],{"X-Container-Read": ".r:*"}) ca.log("Getting target listing.","") targetlisting = get_swift_container(target_endpoint['publicURL']+"/"+ca.conf['gallerycontainer']+"?prefix="+ca.conf.get('gallerypath'),ca.creds['token']) ca.log("Getting source listing.","") sourcelisting = get_swift_container(source_endpoint['publicURL']+"/"+ca.conf['container']+"?prefix="+ca.conf.get('path'),ca.creds['token']) good_files = re.compile(".*\.(JPG|jpg|JPEG|jpeg|PNG|png)$") something_changed = False # We should add some stuff to clean up paths, ie: ensure they end in a slash. Later. for file in sourcelisting: if good_files.match(file): justfile = file.replace(ca.conf.get("path"), "") (name,ext) = justfile.rsplit(".",1) if not ca.conf.get("gallerypath")+name+"-large."+ext in targetlisting or not ca.conf.get("gallerypath")+name+"-small."+ext in targetlisting: something_changed = True ca.log("Scaling "+justfile+" to smaller sizes.","") rescale( {"url": source_endpoint['publicURL'],"container": ca.conf['container'],"name":ca.conf.get('path')+justfile}, {"url": target_endpoint['publicURL'],"container": ca.conf['gallerycontainer'],"name":ca.conf.get('gallerypath')+name+"-large."+ext}, {"url": target_endpoint['publicURL'],"container": ca.conf['gallerycontainer'],"name":ca.conf.get('gallerypath')+name+"-small."+ext}, ca.creds['token']) targetlisting.append(ca.conf['gallerypath']+name+"-large."+ext) tree = {} subs = {} if something_changed: # Rebuild our templates. for file in sourcelisting: if good_files.match(file): name = file.replace(ca.conf.get("path"), "") path = name.split("/") if len(path) > 1: subpath = "/".join(path[:-1]) if tree.get(subpath): tree[subpath].extend([path[-1]]) else: tree[subpath] = [path[-1]] else: if tree.get(""): tree[""].extend(path) else: tree[""] = path if len(path) > 1: level_above = "/".join(path[0:-2]) if subs.get(level_above): subs[level_above].add(path[-2]) else: subs[level_above] = set([path[-2]]) head = """ <head><title>%s Gallery</title> <link href="//netdna.bootstrapcdn.com/twitter-bootstrap/2.1.1/css/bootstrap-combined.min.css" rel="stylesheet"> <link href="https://region-a.geo-1.objects.hpcloudsvc.com:443/v1.0/16026287679679/testcontainer/colorbox/colorbox.css" rel="stylesheet"> <script src="//ajax.googleapis.com/ajax/libs/jquery/1.8.1/jquery.min.js"></script> <script src="//netdna.bootstrapcdn.com/twitter-bootstrap/2.1.1/js/bootstrap.min.js"></script> <script src="http://cdn.jsdelivr.net/colorbox/1.3.19.3/jquery.colorbox-min.js"></script> <script> $(document).ready(function(){ $('a.gallery').colorbox({ rel:'group1' }); }); </script> <style> .thumbnail { height: 150px, width: 150px } </style> </head> <body> <div class="container"> <h1>%s Gallery</h1> """ sublist = """ <h2>Sub-Galleries</h2> <ul class="nav nav-tabs nav-stacked"> """ back_up = """ <ul class="nav nav-tabs nav-stacked"> <li><a href="../">Back to %s</a></li> </ul> """ gallery_top = """ <ul class="thumbnails"> """ gallery_bottom = """ </ul> """ footer = """ </div> </body> """ for name, template in tree.items(): ca.log("Writing gallery template for "+name,"") pretty_name = name.split("/")[-1] html = head % (pretty_name,pretty_name) if name: if len(name.split("/")) == 1: html += back_up % ("Home") elif len(name.split("/")) > 1: html += back_up % (name.split("/")[-2]) if subs.get(name): html += sublist for subpath in subs[name]: html += '<li><a href="%s/">%s</a></li>\n' % (subpath, subpath) html += "</ul>\n" html += gallery_top for file in template: (filename,extension) = file.rsplit(".",1) html += "<li class='span3'>\n<a href='%s' class='gallery thumbnail'><img src='%s'></a></li>\n" % (filename+"-large."+extension,filename+"-small."+extension) html += gallery_bottom html += footer filename = name if filename and filename[-1] != '/': filename = filename +"/" swiftclient.put_object(target_endpoint['publicURL']+"/"+ca.conf['gallerycontainer']+"/"+ca.conf.get('gallerypath')+filename, contents=html, content_type="text/html", token=ca.creds['token']) ca.log("Gallery updated at "+target_endpoint['publicURL']+"/"+ca.conf['gallerycontainer']+"/"+ca.conf.get('gallerypath'),"") else: ca.log("No new files found.","")
def create(self, fname, data): swiftclient.put_object(self.sturl, self.token, self.container, fname, data)
def agent(): ca.log("Starting!") keystone = client.Client(token=ca.creds['token'], tenant_id=ca.creds['tenantId'], auth_url=ca.creds['identity_url']) object_store_catalog = keystone.service_catalog.get_endpoints( )['object-store'] source_endpoint = None for endpoints in object_store_catalog: if endpoints['region'] == ca.conf['region']: source_endpoint = endpoints if not source_endpoint: ca.log_fail("Failing, source region not found in endpoint list.") exit() target_endpoint = None for endpoints in object_store_catalog: if endpoints['region'] == ca.conf['galleryregion']: target_endpoint = endpoints if not source_endpoint: ca.log_fail("Failing, target region not found in endpoint list.") exit() try: container = swiftclient.head_container(target_endpoint['publicURL'], ca.creds['token'], ca.conf['gallerycontainer']) if not '.r:*' in container.get('x-container-read'): ca.warn( "Gallery container exists, but may not be publicly readable.", "") except: ca.log( "Gallery container doesn't exist, creating new publicly readable container.", "") swiftclient.put_container(target_endpoint['publicURL'], ca.creds['token'], ca.conf['gallerycontainer'], {"X-Container-Read": ".r:*"}) ca.log("Getting target listing.", "") targetlisting = get_swift_container( target_endpoint['publicURL'] + "/" + ca.conf['gallerycontainer'] + "?prefix=" + ca.conf.get('gallerypath'), ca.creds['token']) ca.log("Getting source listing.", "") sourcelisting = get_swift_container( source_endpoint['publicURL'] + "/" + ca.conf['container'] + "?prefix=" + ca.conf.get('path'), ca.creds['token']) good_files = re.compile(".*\.(JPG|jpg|JPEG|jpeg|PNG|png)$") something_changed = False # We should add some stuff to clean up paths, ie: ensure they end in a slash. Later. for file in sourcelisting: if good_files.match(file): justfile = file.replace(ca.conf.get("path"), "") (name, ext) = justfile.rsplit(".", 1) if not ca.conf.get( "gallerypath" ) + name + "-large." + ext in targetlisting or not ca.conf.get( "gallerypath") + name + "-small." + ext in targetlisting: something_changed = True ca.log("Scaling " + justfile + " to smaller sizes.", "") rescale( { "url": source_endpoint['publicURL'], "container": ca.conf['container'], "name": ca.conf.get('path') + justfile }, { "url": target_endpoint['publicURL'], "container": ca.conf['gallerycontainer'], "name": ca.conf.get('gallerypath') + name + "-large." + ext }, { "url": target_endpoint['publicURL'], "container": ca.conf['gallerycontainer'], "name": ca.conf.get('gallerypath') + name + "-small." + ext }, ca.creds['token']) targetlisting.append(ca.conf['gallerypath'] + name + "-large." + ext) tree = {} subs = {} if something_changed: # Rebuild our templates. for file in sourcelisting: if good_files.match(file): name = file.replace(ca.conf.get("path"), "") path = name.split("/") if len(path) > 1: subpath = "/".join(path[:-1]) if tree.get(subpath): tree[subpath].extend([path[-1]]) else: tree[subpath] = [path[-1]] else: if tree.get(""): tree[""].extend(path) else: tree[""] = path if len(path) > 1: level_above = "/".join(path[0:-2]) if subs.get(level_above): subs[level_above].add(path[-2]) else: subs[level_above] = set([path[-2]]) head = """ <head><title>%s Gallery</title> <link href="//netdna.bootstrapcdn.com/twitter-bootstrap/2.1.1/css/bootstrap-combined.min.css" rel="stylesheet"> <link href="https://region-a.geo-1.objects.hpcloudsvc.com:443/v1.0/16026287679679/testcontainer/colorbox/colorbox.css" rel="stylesheet"> <script src="//ajax.googleapis.com/ajax/libs/jquery/1.8.1/jquery.min.js"></script> <script src="//netdna.bootstrapcdn.com/twitter-bootstrap/2.1.1/js/bootstrap.min.js"></script> <script src="http://cdn.jsdelivr.net/colorbox/1.3.19.3/jquery.colorbox-min.js"></script> <script> $(document).ready(function(){ $('a.gallery').colorbox({ rel:'group1' }); }); </script> <style> .thumbnail { height: 150px, width: 150px } </style> </head> <body> <div class="container"> <h1>%s Gallery</h1> """ sublist = """ <h2>Sub-Galleries</h2> <ul class="nav nav-tabs nav-stacked"> """ back_up = """ <ul class="nav nav-tabs nav-stacked"> <li><a href="../">Back to %s</a></li> </ul> """ gallery_top = """ <ul class="thumbnails"> """ gallery_bottom = """ </ul> """ footer = """ </div> </body> """ for name, template in tree.items(): ca.log("Writing gallery template for " + name, "") pretty_name = name.split("/")[-1] html = head % (pretty_name, pretty_name) if name: if len(name.split("/")) == 1: html += back_up % ("Home") elif len(name.split("/")) > 1: html += back_up % (name.split("/")[-2]) if subs.get(name): html += sublist for subpath in subs[name]: html += '<li><a href="%s/">%s</a></li>\n' % (subpath, subpath) html += "</ul>\n" html += gallery_top for file in template: (filename, extension) = file.rsplit(".", 1) html += "<li class='span3'>\n<a href='%s' class='gallery thumbnail'><img src='%s'></a></li>\n" % ( filename + "-large." + extension, filename + "-small." + extension) html += gallery_bottom html += footer filename = name if filename and filename[-1] != '/': filename = filename + "/" swiftclient.put_object(target_endpoint['publicURL'] + "/" + ca.conf['gallerycontainer'] + "/" + ca.conf.get('gallerypath') + filename, contents=html, content_type="text/html", token=ca.creds['token']) ca.log( "Gallery updated at " + target_endpoint['publicURL'] + "/" + ca.conf['gallerycontainer'] + "/" + ca.conf.get('gallerypath'), "") else: ca.log("No new files found.", "")