def fetch_file_remote(self, key, file_path_local): """ Fetch key. Returns False if key not found and true if found.""" conn = self.conn message = "Fetching to local file: '{}' from a key: '{}' on s3 bucket: '{}' set on region: '{}'".format( file_path_local, key, config.get('aws', 'bucket'), config.get('aws', 'region')) Logger().info(message) bucket = conn.get_bucket(config.get('aws', 'bucket')) if not bucket.get_location(): conn = boto.s3.connect_to_region(config.get('aws', 'region')) bucket = conn.get_bucket(config.get('aws', 'bucket')) key_object = bucket.get_key(key) if key_object is not None: key_object.get_contents_to_filename(file_path_local) return True else: return False
def __init__(self): parameters = { 'host': config.get('database', 'host'), 'dbname': config.get('database', 'dbname'), 'user': config.get('database', 'user'), 'password': config.get('database', 'password'), 'port': config.get('database', 'port') } self.conn = psycopg2.connect("host='%(host)s' dbname='%(dbname)s' user='******' password='******' port='%(port)s'" % parameters) self.conn.autocommit = True
def __init__(self, identifier=None): logger_identifier = identifier if identifier is not None else config.get('logger', 'default_identifier') self.logger = logging.getLogger(logger_identifier) logging.basicConfig( filename=os.getcwd() + os.path.normpath(config.get('logger', 'log_file')), level=config.get('logger', 'log_level'), format='[%(asctime)-15s] {}.%(levelname)s: %(message)s'.format(config.get('logger', 'app_name')), datefmt='%Y-%m-%d %H:%M:%S' )
def key_delete(self, key): """ Check if key exists.""" conn = self.conn message = "Deleting key: '{}' on s3 bucket: '{}' set on region: '{}'".format( key, config.get('aws', 'bucket'), config.get('aws', 'region')) Logger().info(message) bucket = conn.get_bucket(config.get('aws', 'bucket')) if not bucket.get_location(): conn = boto.s3.connect_to_region(config.get('aws', 'region')) bucket = conn.get_bucket(config.get('aws', 'bucket')) return bucket.delete_key(key)
def send_file_local(self, key, file_path_local): """ Fetch all files for current user.""" conn = self.conn message = "Sending local file: '{}' under a key: '{}' on s3 bucket: '{}' set on region: '{}'".format( file_path_local, key, config.get('aws', 'bucket'), config.get('aws', 'region')) Logger().debug(message) bucket = conn.get_bucket(config.get('aws', 'bucket')) if not bucket.get_location(): conn = boto.s3.connect_to_region(config.get('aws', 'region')) bucket = conn.get_bucket(config.get('aws', 'bucket')) return bucket.new_key(key).set_contents_from_filename(file_path_local)
def rename_files(self, prefix_old, prefix_new): """ Copy all keys from prefix_old to exact keys with prefix_new. Then delete original keys.""" conn = self.conn bucket = conn.get_bucket(config.get('aws', 'bucket')) bucket_entries = bucket.list(prefix=prefix_old) count = 0 for entry in bucket_entries: new_key_name = entry.name.replace(prefix_old, prefix_new) entry.copy(config.get('aws', 'bucket'), new_key_name) entry.delete() count += 1 if count > 0: message = "{} files renamed from prefix: '{}' to new prefix: '{}' on s3 bucket: '{}' set on region: '{}'".format( count, prefix_old, prefix_new, config.get('aws', 'bucket'), config.get('aws', 'region')) else: message = "Tried to rename files from prefix: '{}' to new prefix: '{}' on s3 bucket: '{}' set on region: '{}' but none found.".format( prefix_old, prefix_new, config.get('aws', 'bucket'), config.get('aws', 'region')) Logger().debug(message)
def __init__(self): self.r = redis.StrictRedis( host=config.get('redis', 'host'), port=config.get('redis', 'port'), db=config.get('redis', 'db'))
def __init__(self): self.conn = S3Connection(config.get('aws', 'access_key_id'), config.get('aws', 'secret_access_key')) self.logger = Logger()