示例#1
0
def upload_compiled_js_to_s3(local_path, s3_path):
    with file(local_path, 'rb') as handle:
        raw_filedata = handle.read()

    filedata = gzip_string(raw_filedata)

    headers = {
        'Cache-Control': 'max-age=315360000, public',
        'Expires': 'Thu, 31 Dec 2037 23:55:55 GMT',
        'Content-Encoding': 'gzip',
        'Content-Type': 'text/javascript',
    }

    conn = S3Connection(*aws)
    bucket = conn.get_bucket(Config['compress_bucket'])

    key = Key(bucket)
    key.key = s3_path
    try:
        if key.exists():
            print "Skipping", s3_path, " already exists."
        else:
            print "Uploading %s (%s kb)" % (s3_path, len(filedata) // 1024)
            key.set_contents_from_string(filedata, headers=headers)
    except BotoServerError, bse:
        print bse
示例#2
0
def upload_compressed_files_to_s3(sender, type, mode, context, **kwargs):
    if mode != "file":
        return

    if not settings.COMPRESS_OFFLINE:
        return

    url = context['url']

    path = urlparse.urlparse(url).path

    source_name = "/var/canvas/website/" + path
    destination_name = path

    with file(source_name, 'rb') as handle:
        raw_filedata = handle.read()

    filedata = gzip_string(raw_filedata)

    content_types = {
        'css': 'text/css',
        'js': 'text/javascript',
    }

    content_type = content_types[type]

    headers = {
        "Cache-Control": "max-age=315360000, public",
        "Expires": "Thu, 31 Dec 2037 23:55:55 GMT",
        "Content-Encoding": "gzip",
        'Content-Type': content_type,
    }

    conn = S3Connection(*aws)
    bucket = conn.get_bucket(Config['compress_bucket'])

    key = Key(bucket)
    key.key = destination_name
    try:
        if key.exists():
            print "Skipping", destination_name, " already exists."
        else:
            print "Uploading %s (%s kb)" % (destination_name,
                                            len(filedata) // 1024)
            key.set_contents_from_string(filedata, headers=headers)
    except BotoServerError, bse:
        print bse
def upload_compressed_files_to_s3(sender, type, mode, context, **kwargs):
    if mode != "file":
        return

    if not settings.COMPRESS_OFFLINE:
        return

    url = context['url']

    path = urlparse.urlparse(url).path

    source_name = "/var/canvas/website/" + path
    destination_name = path

    with file(source_name, 'rb') as handle:
        raw_filedata = handle.read()

    filedata = gzip_string(raw_filedata)

    content_types = {
        'css': 'text/css',
        'js': 'text/javascript',
    }

    content_type = content_types[type]

    headers = {
        "Cache-Control": "max-age=315360000, public",
        "Expires": "Thu, 31 Dec 2037 23:55:55 GMT",
        "Content-Encoding": "gzip",
        'Content-Type': content_type,
    }

    conn = S3Connection(*aws)
    bucket = conn.get_bucket(Config['compress_bucket'])

    key = Key(bucket)
    key.key = destination_name
    try:
        if key.exists():
            print "Skipping", destination_name, " already exists."
        else:
            print "Uploading %s (%s kb)" % (destination_name, len(filedata) // 1024)
            key.set_contents_from_string(filedata, headers=headers)
    except BotoServerError, bse:
        print bse