Beispiel #1
0
 def handle_file(container_path, container_extract):
     name = container_path.lstrip("./")
     if pattern_compiled is not None and pattern_compiled.search(name):
         logger.info("Skipping %s" % name)
     else:
         container_extract(container_path, path=dest + "/")
         logger.info("Extracted %s" % name)
Beispiel #2
0
def upload(bucket, files):
    for [filename, local_path, s3_key] in files:
        logger.info("Uploading %s to %s" % (local_path, "s3://%s/%s" %
                                            (bucket, s3_key)))
        extra_args = {}
        mime_type = mimetypes.guess_type(filename)[0]
        if mime_type is not None:
            extra_args["ContentType"] = mimetypes.guess_type(filename)[0]
        s3_client.upload_file(local_path, bucket, s3_key, ExtraArgs=extra_args)
Beispiel #3
0
def cleanup_delete_files(s3_target, files):
    s3_target_bucket, s3_target_key = parse_s3_url(s3_target)

    objects = []
    for filename in files:
        logger.info("Deleting %s" % filename)
        objects.append({"Key": os.path.join(s3_target_key, filename)})

    result = s3_client.delete_objects(Bucket=s3_target_bucket,
                                      Delete={"Objects": objects})
    if "Errors" in result and len(result["Errors"]) > 0:
        logger.warn("Errors during delete: %s" % result["Errors"])
Beispiel #4
0
def create_invalidation(distribution_id):
    logger.info("Creating CloudFront invalidation for %s" % distribution_id)

    ref = str(int(time.time() * 1000))
    cf_client.create_invalidation(
        DistributionId=distribution_id,
        InvalidationBatch={
            "Paths": {
                "Quantity": 1,
                "Items": ["/*"]
            },
            "CallerReference": ref,
        },
    )
Beispiel #5
0
def process(s3_artifact):
    deploy_items = fetch_deployment_log(deploy_log_bucket_url)

    deploy_items = deploy_items + deploy(s3_artifact, target_bucket_url,
                                         exclude_pattern)

    older_than = int(time.time()) - expire_seconds
    deploy_items = cleanup(deploy_items, target_bucket_url, older_than)

    store_deployment_log(deploy_items, deploy_log_bucket_url)

    if cf_distribution_id is not None:
        create_invalidation(cf_distribution_id)

    logger.info("All done")
Beispiel #6
0
def fetch_deployment_log(s3_deployments_log):
    logger.info("Fetching deployments log from %s" % s3_deployments_log)
    try:
        bucket, key = parse_s3_url(s3_deployments_log)
        res = s3_client.get_object(Bucket=bucket, Key=key)
        body = res["Body"]
        result = []
        with closing(body):
            for line in body.iter_lines():
                result.append(DeployItem.from_line(line.decode("utf-8")))
        return result

    except ClientError as e:
        # Ignore not found as it covers the initial case.
        if e.response["Error"]["Code"] != "NoSuchKey":
            raise e

        return []
Beispiel #7
0
def extract(artifact_s3_url, source, dest, exclude_pattern):
    pattern_compiled = (re.compile(exclude_pattern)
                        if exclude_pattern is not None else None)

    def handle_file(container_path, container_extract):
        name = container_path.lstrip("./")
        if pattern_compiled is not None and pattern_compiled.search(name):
            logger.info("Skipping %s" % name)
        else:
            container_extract(container_path, path=dest + "/")
            logger.info("Extracted %s" % name)

    logger.info("Extracting archive")

    if artifact_s3_url.endswith(".zip"):
        logger.info("Found zip file")
        with zipfile.ZipFile(source, "r") as zip:
            for name in zip.namelist():
                handle_file(name, zip.extract)

    elif artifact_s3_url.endswith(".tgz"):
        logger.info("Found tgz file")
        with tarfile.open(source, "r:gz") as tar:
            for tar_resource in tar:
                if tar_resource.isfile():
                    handle_file(tar_resource.name, tar.extract)

    else:
        raise Exception(f"Unsupported extension: {artifact_s3_url}")
Beispiel #8
0
def get_artifact(s3_artifact, local_path):
    bucket, key = parse_s3_url(s3_artifact)

    logger.info("Downloading %s" % s3_artifact)
    s3_client.download_file(bucket, key, local_path)
Beispiel #9
0
def handler(event, context):
    logger.info("Handler was called - starting processing")
    process(event["artifactS3Url"])
Beispiel #10
0
def store_deployment_log(items, s3_deployments_log):
    logger.info("Uploading deployments log to %s" % s3_deployments_log)
    bucket, key = parse_s3_url(s3_deployments_log)
    s3_client.put_object(Bucket=bucket,
                         Key=key,
                         Body="".join([i.to_line() for i in items]))