예제 #1
0
def download_S3_Bucket(boto3_client, str_bucket_name, str_bucket_path,
                       str_local_relative_target):
    """
    Downloads recursively the given S3 path to the target directory.
    :param client: S3 client to use.
    :param bucket: the name of the bucket to download from
    :param path: The S3 directory to download.
    :param target: the local directory to download the files to.
    """

    # Handle missing / at end of prefix
    if not str_bucket_path.endswith('/'):
        str_bucket_path += '/'

    paginator = boto3_client.get_paginator('list_objects_v2')
    for result in paginator.paginate(Bucket=str_bucket_name,
                                     Prefix=str_bucket_path):
        # Download each file individually
        for key in result['Contents']:
            # Calculate relative path
            rel_path = key['Key'][len(str_bucket_path):]
            # Skip paths ending in /
            if not key['Key'].endswith('/'):
                local_file_path = os.path.join(str_local_relative_target,
                                               rel_path)
                # Make sure directories exist
                local_file_dir = os.path.dirname(local_file_path)
                create_Folders_Along_Path(local_file_dir)
                boto3_client.download_file(str_bucket_name, key['Key'],
                                           local_file_path)
예제 #2
0
파일: log.py 프로젝트: zach-oliver/ref
    def append(self, str_log_line):
        create_Folders_Along_Path(self.loc)

        log_line = str(self.FUNCTION) + str(self.SEPARATOR) + str(str_log_line)

        if self.DEBUG:
            print log_line
        with open(self.loc, "a") as f:
            f.write(log_line)
            f.write("\n")
예제 #3
0
def download_S3_Object(str_bucket_name,
                       str_bucket_object_key,
                       str_local_dir,
                       DEBUG=False):
    if DEBUG:
        print 'aws_s3_functions.py |:| download_S3_Object: START'
        print 'aws_s3_functions.py |:| download_S3_Object: str_bucket_name=%s' % (
            str(str_bucket_name))
        print 'aws_s3_functions.py |:| download_S3_Object: str_bucket_object_key=%s' % (
            str(str_bucket_object_key))
        print 'aws_s3_functions.py |:| download_S3_Object: str_local_dir=%s' % (
            str(str_local_dir))
    else:
        print 'download_S3_Object: s3://%s/%s |:| %s' % (str(
            str_bucket_name), str(str_bucket_object_key), str(str_local_dir))

    s3_bucket = boto3.resource('s3').Bucket(str_bucket_name)

    if DEBUG:
        print 'aws_s3_functions.py |:| download_S3_Object: s3 RESOURCE INSTANTIATED'

    try:
        if DEBUG:
            print 'aws_s3_functions.py |:| download_S3_Object |:| os_functions.py |:| create_Folders_Along_Path(str_local_dir)'
        create_Folders_Along_Path(str_local_dir)
        if DEBUG:
            print 'aws_s3_functions.py |:| download_S3_Object |:| s3_bucket.download_file'
        s3_bucket.download_file(str_bucket_object_key, str_local_dir)
    except botocore.exceptions.ClientError as e:
        if e.response['Error']['Code'] == "404":
            if DEBUG:
                print 'aws_s3_functions.py |:| download_S3_Object: s3 OBJECT DOESNT EXIST'
                print 'aws_s3_functions.py |:| download_S3_Object: FINISH'
            return False
        else:
            raise
            return False

    if DEBUG:
        print 'aws_s3_functions.py |:| download_S3_Object: s3 BUCKET OBJECT CREATED'
        print 'aws_s3_functions.py |:| download_S3_Object: FINISH'

    return True
예제 #4
0
파일: log.py 프로젝트: zach-oliver/ref
    def write(self, log_line):
        create_Folders_Along_Path(self.loc)

        with open(self.loc, "a") as f:
            f.write(log_line)
            f.write("\n")
예제 #5
0
def df_export_csv(df, path, include_index=False):
    create_Folders_Along_Path(path)
    df.to_csv(path, index=include_index)