Ejemplo n.º 1
0
def put_recipe(bucket, keyname):
    s3 = Session().resource('s3')
    bucket = s3.Bucket(bucket)
    obj = bucket.Object(keyname)
    body = keyname
    response = obj.put(Body=body.encode('utf-8'),
                       ContentEncoding='utf-8',
                       ContentType='text/plane')
Ejemplo n.º 2
0
def put_img_list(bucket, img_list):
    s3 = Session().resource('s3')
    bucket = s3.Bucket(bucket)
    keyname = cfg['KEY']
    #  keyname = "{0:%Y-%m-%d}".format(datetime.today())
    obj = bucket.Object(keyname)
    img_list = ','.join(img_list)
    #  print(img_list)
    body = img_list
    response = obj.put(
            Body=body.encode('utf-8'),
            ContentEncoding='utf-8',
            ContentType='text/plane'
            )
Ejemplo n.º 3
0
class S3Driver(Driver):
    def _setup(self, driver_conf):
        self._client = Session(
            aws_access_key_id=driver_conf.access_key,
            aws_secret_access_key=driver_conf.secret_key,
        ).resource('s3', endpoint_url=driver_conf.endpoint)
        try:
            self._client.Bucket(self._bucket).create()
        except Exception:
            pass

    def _put(self, bucket, key, data):
        s3_client = self._client.Object(bucket, key)
        s3_client.upload_fileobj(data)
        return True

    def _get(self, bucket, key, output):
        s3_client = self._client.Object(bucket, key)
        s3_client.download_fileobj(output)
        return True
Ejemplo n.º 4
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--version', '-v', required=True,
                        help='Version to deploy')
    args = parser.parse_args()
    s3 = Session(
        aws_access_key_id=os.environ.get('AWS_ACCESS_KEY_ID'),
        aws_secret_access_key=os.environ.get('AWS_SECRET_ACCESS_KEY'),
        region_name=os.environ.get('AWS_REGION'),
    ).resource('s3')
    bucket = s3.Bucket(os.environ.get('S3_BUCKET_NAME'))

    files = [
        f'amplitude-{args.version}.js',
        f'amplitude-{args.version}-min.js',
        f'amplitude-{args.version}.umd.js',
        f'amplitude-{args.version}-min.umd.js'
    ]
    for file in files:
        if check_exists(s3.Object(os.environ.get('S3_BUCKET_NAME'), os.path.join('libs', file))):
            sys.exit(f'ERROR: {file} already exists and shouldn\'t be republished. Consider releasing a new version')
        print(f'Uploading {file}')
        upload(bucket, file, unzipped_args)

    gz_files = [
        f'amplitude-{args.version}-min.gz.js',
        f'amplitude-{args.version}-min.umd.gz.js'
    ]
    for file in gz_files:
        if check_exists(s3.Object(os.environ.get('S3_BUCKET_NAME'), file)):
            sys.exit(f'{file} already exists!')
        print(f'Uploading {file}')
        upload(bucket, file, zipped_args)

    print(f'Success: S3 upload completed. Example: https://cdn.amplitude.com/libs/amplitude-{args.version}.js')
    return 0
Ejemplo n.º 5
0
def get_recipe_list(bucket):
    s3 = Session().resource('s3')
    bucket = s3.Bucket(bucket)
    return [obj.key for obj in bucket.objects.all()]
Ejemplo n.º 6
0
class S3ArchiveDiffer(ArchiveDiffer):
    """
    AWS S3 backend for archiving.

    Archives CSV files into a S3 bucket, with keys "{indicator_prefix}/{csv_file_name}".
    Ideally, versioning should be enabled in this bucket to track versions of each CSV file.
    """
    def __init__(
        self,
        cache_dir: str,
        export_dir: str,
        bucket_name: str,
        indicator_prefix: str,
        aws_credentials: Dict[str, str],
    ):
        """
        Initialize a S3ArchiveDiffer.

        See this link for possible aws_credentials kwargs:
        https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html#boto3.session.Session

        Parameters
        ----------
        cache_dir: str
            The directory for storing most recent archived/uploaded CSVs to do start diffing from.
            Usually 'cache'.
        export_dir: str
            The directory with most recent exported CSVs to diff to.
            Usually 'receiving'.
        bucket_name: str
            The S3 bucket to upload files to.
        indicator_prefix: str
            The prefix for S3 keys related to this indicator.
        aws_credentials: Dict[str, str]
            kwargs to create a boto3.Session, containing AWS credentials/profile to use.
        """
        super().__init__(cache_dir, export_dir)
        self.s3 = Session(**aws_credentials).resource("s3")
        self.bucket = self.s3.Bucket(bucket_name)
        self.indicator_prefix = indicator_prefix

    def update_cache(self):
        """Make sure cache_dir is updated with all latest files from the S3 bucket."""
        # List all indicator-related objects from S3
        archive_objects = self.bucket.objects.filter(
            Prefix=self.indicator_prefix).all()
        archive_objects = [
            obj for obj in archive_objects if obj.key.endswith(".csv")
        ]

        # Check against what we have locally and download missing ones
        cached_files = set(
            basename(f) for f in glob(join(self.cache_dir, "*.csv")))
        for obj in archive_objects:
            archive_file = basename(obj.key)
            cached_file = join(self.cache_dir, archive_file)

            if archive_file not in cached_files:
                print(f"Updating cache with {cached_file}")
                obj.Object().download_file(cached_file)

        self._cache_updated = True

    def archive_exports(
            self,  # pylint: disable=arguments-differ
            exported_files: Files,
            update_cache: bool = True,
            update_s3: bool = True) -> Tuple[Files, Files]:
        """
        Handle actual archiving of files to the S3 bucket.

        Parameters
        ----------
        exported_files: Files
            List of files to be archived. Usually new and changed files.

        Returns
        -------
        (successes, fails): Tuple[Files, Files]
            successes: List of successfully archived files
            fails: List of unsuccessfully archived files
        """
        archive_success = []
        archive_fail = []

        for exported_file in exported_files:
            cached_file = abspath(join(self.cache_dir,
                                       basename(exported_file)))
            archive_key = join(self.indicator_prefix, basename(exported_file))

            try:
                if update_cache:
                    # Update local cache
                    shutil.copyfile(exported_file, cached_file)

                if update_s3:
                    self.bucket.Object(archive_key).upload_file(exported_file)

                archive_success.append(exported_file)
            except FileNotFoundError:
                archive_fail.append(exported_file)

        self._exports_archived = True

        return archive_success, archive_fail