def replacer(match): before, url_prefix, rel_path, after = match.groups() path = make_absolute_static_path(self.settings['static_dir'], rel_path) assert os.path.isfile(path), (path, str(self)) if os.stat(path).st_size > MAX_FILE_SIZE: logging.debug('Not inlining %s (%.2fKB)', path, os.stat(path).st_size / KB) return match.group(0) else: encoded = base64.b64encode(open(path).read()) mime_type, _ = mimetypes.guess_type(path) if not mime_type and path.endswith('.otf'): mime_type = 'application/octet-stream' if not mime_type and path.endswith('.ttf'): mime_type = 'font/ttf' if not mime_type and path.endswith('.eot'): mime_type = 'application/vnd.ms-fontobject' if not mime_type and path.endswith('.woff'): mime_type = 'application/x-font-woff' if not mime_type and path.endswith('.json'): mime_type = 'application/json' if not mime_type and path.endswith('.svg'): mime_type = 'image/svg+xml' data_uri = 'data:%s;base64,%s' % (mime_type, encoded) if len(data_uri) >= MAX_DATA_URI_SIZE: logging.debug('Not inlining %s (%.2fKB encoded)', path, len(data_uri) / KB) return match.group(0) seen_assets['%s%s' % (url_prefix, rel_path)] += 1 return ''.join([before, data_uri, after])
def iter_static_deps(static_dir, src_path, static_url_prefix): """Yields static resources (ie, image files) from the given source path. """ assert os.path.isfile(src_path), src_path for match in static_finder(open(src_path).read(), static_url_prefix): dep_path = make_absolute_static_path(static_dir, match.group(2)) if os.path.isfile(dep_path): yield dep_path else: logging.warn('Missing dep %s (src: %s)', dep_path, src_path)
def upload_assets_to_s3(manifest, settings, skip_s3_upload=False): """Uploads any assets that are in the given manifest and in our compiled output dir but missing from our static assets bucket to that bucket on S3. """ # We will gather a set of (file_name, file_path) tuples to be uploaded to_upload = set() # We know we want to upload each asset block (these correspond to the # assetman.include_* blocks in each template) for depspec in manifest.blocks.itervalues(): file_name = depspec['versioned_path'] file_path = make_output_path(settings['compiled_asset_root'], file_name) assert os.path.isfile( file_path), 'Missing compiled asset %s' % file_path to_upload.add((file_name, file_path)) # And we know that we'll want to upload any statically-referenced assets # (from assetman.static_url calls or referenced in any compiled assets), # but we'll need to filter out other entries in the complete 'assets' # block of the manifest. should_skip = re.compile(r'\.(scss|less|css|js|html)$', re.I).search for rel_path, depspec in manifest.assets.iteritems(): if should_skip(rel_path): continue file_path = make_absolute_static_path(settings['static_dir'], rel_path) assert os.path.isfile(file_path), 'Missing static asset %s' % file_path file_name = depspec['versioned_path'] to_upload.add((file_name, file_path)) logging.info('Found %d assets to upload to S3', len(to_upload)) if skip_s3_upload: logging.info('Skipping asset upload to S3 %s', to_upload) return to_upload # Upload assets to S3 using 5 threads queue = Queue.Queue() errors = [] for i in xrange(5): uploader = S3UploadThread(queue, errors, manifest, settings) uploader.setDaemon(True) uploader.start() map(queue.put, to_upload) queue.join() if errors: raise Exception(errors) return to_upload
def replacer(match): before, url_prefix, rel_path, after = match.groups() path = make_absolute_static_path(self.settings['static_dir'], rel_path) assert os.path.isfile(path), (path, str(self)) if os.stat(path).st_size > MAX_FILE_SIZE: logging.debug('Not inlining %s (%.2fKB)', path, os.stat(path).st_size / KB) return match.group(0) else: encoded = base64.b64encode(open(path).read()) mime, _ = mimetypes.guess_type(path) data_uri = 'data:%s;base64,%s' % (mime, encoded) if len(data_uri) >= MAX_DATA_URI_SIZE: logging.debug('Not inlining %s (%.2fKB encoded)', path, len(data_uri) / KB) return match.group(0) seen_assets['%s%s' % (url_prefix, rel_path)] += 1 return ''.join([before, data_uri, after])
def upload_assets_to_s3(manifest, settings, skip_s3_upload=False): """Uploads any assets that are in the given manifest and in our compiled output dir but missing from our static assets bucket to that bucket on S3. """ # We will gather a set of (file_name, file_path) tuples to be uploaded to_upload = set() # We know we want to upload each asset block (these correspond to the # assetman.include_* blocks in each template) for depspec in manifest.blocks.itervalues(): file_name = depspec['versioned_path'] file_path = make_output_path(settings['compiled_asset_root'], file_name) assert os.path.isfile(file_path), 'Missing compiled asset %s' % file_path to_upload.add((file_name, file_path)) # And we know that we'll want to upload any statically-referenced assets # (from assetman.static_url calls or referenced in any compiled assets), # but we'll need to filter out other entries in the complete 'assets' # block of the manifest. should_skip = re.compile(r'\.(scss|less|css|js|html)$', re.I).search for rel_path, depspec in manifest.assets.iteritems(): if should_skip(rel_path): continue file_path = make_absolute_static_path(settings['static_dir'], rel_path) assert os.path.isfile(file_path), 'Missing static asset %s' % file_path file_name = depspec['versioned_path'] to_upload.add((file_name, file_path)) logging.info('Found %d assets to upload to S3', len(to_upload)) if skip_s3_upload: logging.info('Skipping asset upload to S3 %s', to_upload) return to_upload # Upload assets to S3 using 5 threads queue = Queue.Queue() errors = [] for i in xrange(5): uploader = S3UploadThread(queue, errors, manifest, settings) uploader.setDaemon(True) uploader.start() map(queue.put, to_upload) queue.join() if errors: raise Exception(errors) return to_upload
def iter_template_deps(static_dir, src_path, static_url_prefix): """Yields static resources included as {{assetman.static_url()}} calls in the given source path, which should be a Tornado template. TODO: need one of these for every supported template language? """ src = open(src_path).read() for match in static_url_call_finder(src): arg = match.group(1) quotes = '\'"' if arg[0] not in quotes or arg[-1] not in quotes: msg = 'Vars not allowed in static_url calls: %s' % match.group(0) raise ParseError(src_path, msg) else: dep_path = make_absolute_static_path(static_dir, arg.strip(quotes)) if os.path.isfile(dep_path): yield dep_path else: logging.warn('Missing dep %s (src: %s)', dep_path, src_path)
def version_dependency(path, manifest): """A dependency's version is calculated using this recursive formula: version = md5(md5(path_contents) + version(deps)) So, the version of a path is based on the hash of its own file contents as well as those of each of its dependencies. """ assert path in manifest.assets, path if manifest.assets[path]['version']: return manifest.assets[path]['version'] h = hashlib.md5() h.update(get_file_hash(make_absolute_static_path(manifest.settings['static_dir'], path))) for dep_path in manifest.assets[path]['deps']: h.update(version_dependency(dep_path, manifest)) version = h.hexdigest() _, ext = os.path.splitext(path) manifest.assets[path]['version'] = version manifest.assets[path]['versioned_path'] = version + ext return manifest.assets[path]['version']
def version_dependency(path, manifest): """A dependency's version is calculated using this recursive formula: version = md5(md5(path_contents) + version(deps)) So, the version of a path is based on the hash of its own file contents as well as those of each of its dependencies. """ assert path in manifest.assets, path if manifest.assets[path]['version']: return manifest.assets[path]['version'] h = hashlib.md5() h.update( get_file_hash( make_absolute_static_path(manifest.settings['static_dir'], path))) for dep_path in manifest.assets[path]['deps']: h.update(version_dependency(dep_path, manifest)) version = h.hexdigest() _, ext = os.path.splitext(path) manifest.assets[path]['version'] = version manifest.assets[path]['versioned_path'] = version + ext return manifest.assets[path]['version']