def upload(config_file):
    bucket, config = read_static_file_config(config_file)

    # upload local files not already in the bucket
    for root, dirs, files in os.walk(config["static_root"]):
        for file in files:
            absolute_path = os.path.join(root, file)

            key_name = os.path.relpath(absolute_path,
                                       start=config["static_root"])

            type, encoding = mimetypes.guess_type(file)
            if not type:
                continue
            headers = {}
            headers['Expires'] = NEVER
            headers['Content-Type'] = type
            if encoding:
                headers['Content-Encoding'] = encoding

            existing_key = bucket.get_key(key_name)
            key = bucket.new_key(key_name)
            with open(absolute_path, 'rb') as f:
                etag, base64_tag = key.compute_md5(f)

                if existing_key and existing_key.etag.strip('"') == etag:
                    continue

                print "uploading", key_name, "to S3..."
                key.set_contents_from_file(
                    f,
                    headers=headers,
                    policy='public-read',
                    md5=(etag, base64_tag),
                )
Example #2
0
def clean_static_files(config_file):
    bucket, config = read_static_file_config(config_file)
    ignored_prefixes = tuple(p.strip() for p in config["ignored_prefixes"].split(","))

    plugins = PluginLoader()
    reachable_files = itertools.chain(get_source_static_files(plugins), get_generated_static_files())

    condemned_files = get_mature_files_on_s3(bucket)
    for reachable_file in reachable_files:
        if reachable_file in condemned_files:
            del condemned_files[reachable_file]

    for filename, key in condemned_files.iteritems():
        if not filename.startswith(ignored_prefixes):
            key.delete()
Example #3
0
def clean_static_files(config_file):
    bucket, config = read_static_file_config(config_file)
    ignored_prefixes = tuple(p.strip() for p in
                             config["ignored_prefixes"].split(","))

    plugins = PluginLoader()
    reachable_files = itertools.chain(
        get_source_static_files(plugins),
        get_generated_static_files(),
    )

    condemned_files = get_mature_files_on_s3(bucket)
    for reachable_file in reachable_files:
        if reachable_file in condemned_files:
            del condemned_files[reachable_file]

    for filename, key in condemned_files.iteritems():
        if not filename.startswith(ignored_prefixes):
            key.delete()