def stylesheet_url(path, only_path=False, cache_buster=True): """ Generates a path to an asset found relative to the project's css directory. Passing a true value as the second argument will cause the only the path to be returned instead of a `url()` function """ filepath = String.unquoted(path).value if callable(config.STATIC_ROOT): try: _file, _storage = list(config.STATIC_ROOT(filepath))[0] except IndexError: filetime = None else: filetime = getmtime(_file, _storage) if filetime is None: filetime = 'NA' else: _path = os.path.join(config.STATIC_ROOT, filepath.strip('/')) filetime = getmtime(_path) if filetime is None: filetime = 'NA' BASE_URL = config.STATIC_URL url = '%s%s' % (BASE_URL, filepath) if cache_buster: url = add_cache_buster(url, filetime) if only_path: return String.unquoted(url) else: return Url.unquoted(url)
def _font_url(path, only_path=False, cache_buster=True, inline=False): filepath = String.unquoted(path).value file = None FONTS_ROOT = _fonts_root() if callable(FONTS_ROOT): try: _file, _storage = list(FONTS_ROOT(filepath))[0] except IndexError: filetime = None else: filetime = getmtime(_file, _storage) if filetime is None: filetime = 'NA' elif inline: file = _storage.open(_file) else: _path = os.path.join(FONTS_ROOT, filepath.strip('/')) filetime = getmtime(_path) if filetime is None: filetime = 'NA' elif inline: file = open(_path, 'rb') BASE_URL = config.FONTS_URL or config.STATIC_URL if file and inline: font_type = None if re.match(r'^([^?]+)[.](.*)([?].*)?$', path.value): font_type = String.unquoted( re.match(r'^([^?]+)[.](.*)([?].*)?$', path.value).groups()[1]).value try: mime = FONT_TYPES[font_type] except KeyError: raise Exception('Could not determine font type for "%s"' % path.value) mime = FONT_TYPES.get(font_type) if font_type == 'woff': mime = 'application/font-woff' elif font_type == 'eot': mime = 'application/vnd.ms-fontobject' url = make_data_url((mime if '/' in mime else 'font/%s' % mime), file.read()) file.close() else: url = '%s/%s' % (BASE_URL.rstrip('/'), filepath.lstrip('/')) if cache_buster and filetime != 'NA': url = add_cache_buster(url, filetime) if only_path: return String.unquoted(url) else: return Url.unquoted(url)
def _font_url(path, only_path=False, cache_buster=True, inline=False): filepath = String.unquoted(path).value file = None FONTS_ROOT = _fonts_root() if callable(FONTS_ROOT): try: _file, _storage = list(FONTS_ROOT(filepath))[0] except IndexError: filetime = None else: filetime = getmtime(_file, _storage) if filetime is None: filetime = 'NA' elif inline: file = _storage.open(_file) else: _path = os.path.join(FONTS_ROOT, filepath.strip('/')) filetime = getmtime(_path) if filetime is None: filetime = 'NA' elif inline: file = open(_path, 'rb') BASE_URL = config.FONTS_URL or config.STATIC_URL if file and inline: font_type = None if re.match(r'^([^?]+)[.](.*)([?].*)?$', path.value): font_type = String.unquoted(re.match(r'^([^?]+)[.](.*)([?].*)?$', path.value).groups()[1]).value try: mime = FONT_TYPES[font_type] except KeyError: raise Exception('Could not determine font type for "%s"' % path.value) mime = FONT_TYPES.get(font_type) if font_type == 'woff': mime = 'application/font-woff' elif font_type == 'eot': mime = 'application/vnd.ms-fontobject' url = make_data_url( (mime if '/' in mime else 'font/%s' % mime), file.read()) file.close() else: url = '%s/%s' % (BASE_URL.rstrip('/'), filepath.lstrip('/')) if cache_buster and filetime != 'NA': url = add_cache_buster(url, filetime) if only_path: return String.unquoted(url) else: return Url.unquoted(url)
def evaluate(self, calculator, divide=False): child = self.child.evaluate(calculator, divide) if isinstance(child, String): contents = child.value quotes = child.quotes else: # TODO compress contents = child.render() quotes = None # TODO unclear if this is the right place for this logic, or if it # should go in the Function constructor, or should be passed in # explicitly by the grammar, or even if Url should go away entirely if self.function_name == "url": return Url(contents, quotes=quotes) else: return Function(contents, self.function_name, quotes=quotes)
def font_url(sheet, type_, only_path=False, cache_buster=True): font_sheet = font_sheets.get(sheet.render()) type_ = String.unquoted(type_).render() if font_sheet: asset_files = font_sheet['*f*'] asset_file = asset_files.get(type_) if asset_file: url = '%s%s' % (config.ASSETS_URL, asset_file) params = [] # if type_ == 'eot': # params.append('#iefix') if cache_buster: params.append('v=%s' % font_sheet['*t*']) if type_ == 'svg': params.append('#' + font_sheet['*n*']) if params: url += '?' + '&'.join(params) if only_path: return String.unquoted(url) else: return Url.unquoted(url) return String.unquoted('')
def font_sheet(g, **kwargs): if not fontforge: raise SassMissingDependency('fontforge', 'font manipulation') font_sheets = _get_cache('font_sheets') now_time = time.time() globs = String(g, quotes=None).value globs = sorted(g.strip() for g in globs.split(',')) _k_ = ','.join(globs) files = None rfiles = None tfiles = None base_name = None glob_path = None glyph_name = None if _k_ in font_sheets: font_sheets[_k_]['*'] = now_time else: files = [] rfiles = [] tfiles = [] for _glob in globs: if '..' not in _glob: # Protect against going to prohibited places... if callable(config.STATIC_ROOT): _glob_path = _glob _rfiles = _files = sorted(config.STATIC_ROOT(_glob)) else: _glob_path = os.path.join(config.STATIC_ROOT, _glob) _files = glob.glob(_glob_path) _files = sorted((f, None) for f in _files) _rfiles = [(rf[len(config.STATIC_ROOT):], s) for rf, s in _files] if _files: files.extend(_files) rfiles.extend(_rfiles) base_name = os.path.basename(os.path.dirname(_glob)) _glyph_name, _, _glyph_type = base_name.partition('.') if _glyph_type: _glyph_type += '-' if not glyph_name: glyph_name = _glyph_name tfiles.extend([_glyph_type] * len(_files)) else: glob_path = _glob_path if files is not None: if not files: log.error("Nothing found at '%s'", glob_path) return String.unquoted('') key = [f for (f, s) in files] + [repr(kwargs), config.ASSETS_URL] key = glyph_name + '-' + make_filename_hash(key) asset_files = { 'eot': key + '.eot', 'woff': key + '.woff', 'ttf': key + '.ttf', 'svg': key + '.svg', } ASSETS_ROOT = _assets_root() asset_paths = dict((type_, os.path.join(ASSETS_ROOT, asset_file)) for type_, asset_file in asset_files.items()) cache_path = os.path.join(config.CACHE_ROOT or ASSETS_ROOT, key + '.cache') inline = Boolean(kwargs.get('inline', False)) font_sheet = None asset = None file_assets = {} inline_assets = {} if all( os.path.exists(asset_path) for asset_path in asset_paths.values()) or inline: try: save_time, file_assets, inline_assets, font_sheet, codepoints = pickle.load( open(cache_path)) if file_assets: file_asset = List( [file_asset for file_asset in file_assets.values()], separator=",") font_sheets[file_asset.render()] = font_sheet if inline_assets: inline_asset = List([ inline_asset for inline_asset in inline_assets.values() ], separator=",") font_sheets[inline_asset.render()] = font_sheet if inline: asset = inline_asset else: asset = file_asset except: pass if font_sheet: for file_, storage in files: _time = getmtime(file_, storage) if save_time < _time: if _time > now_time: log.warning( "File '%s' has a date in the future (cache ignored)" % file_) font_sheet = None # Invalidate cached custom font break if font_sheet is None or asset is None: cache_buster = Boolean(kwargs.get('cache_buster', True)) autowidth = Boolean(kwargs.get('autowidth', False)) autohint = Boolean(kwargs.get('autohint', True)) font = fontforge.font() font.encoding = 'UnicodeFull' font.design_size = 16 font.em = GLYPH_HEIGHT font.ascent = GLYPH_ASCENT font.descent = GLYPH_DESCENT font.fontname = glyph_name font.familyname = glyph_name font.fullname = glyph_name def glyphs(f=lambda x: x): for file_, storage in f(files): if storage is not None: _file = storage.open(file_) else: _file = open(file_) svgtext = _file.read() svgtext = svgtext.replace('<switch>', '') svgtext = svgtext.replace('</switch>', '') svgtext = svgtext.replace( '<svg>', '<svg xmlns="http://www.w3.org/2000/svg">') m = GLYPH_WIDTH_RE.search(svgtext) if m: width = float(m.group(1)) else: width = None m = GLYPH_HEIGHT_RE.search(svgtext) if m: height = float(m.group(1)) else: height = None _glyph = tempfile.NamedTemporaryFile(delete=False, suffix=".svg") _glyph.file.write(svgtext) _glyph.file.close() yield _glyph.name, width, height names = tuple( os.path.splitext(os.path.basename(file_))[0] for file_, storage in files) tnames = tuple(tfiles[i] + n for i, n in enumerate(names)) codepoints = [] for i, (glyph_filename, glyph_width, glyph_height) in enumerate(glyphs()): if glyph_height and glyph_height != GLYPH_HEIGHT: warnings.warn("Glyphs should be %spx-high" % GLYPH_HEIGHT) codepoint = i + GLYPH_START name = names[i] codepoints.append(codepoint) glyph = font.createChar(codepoint, name) glyph.importOutlines(glyph_filename) os.unlink(glyph_filename) glyph.width = glyph_width or GLYPH_WIDTH if autowidth: # Autowidth removes side bearings glyph.left_side_bearing = glyph.right_side_bearing = 0 glyph.round() filetime = int(now_time) # Generate font files if not inline: urls = {} for type_ in reversed(FONT_TYPES): asset_path = asset_paths[type_] try: if type_ == 'eot': ttf_path = asset_paths['ttf'] with open(ttf_path) as ttf_fh: contents = ttf2eot(ttf_fh.read()) if contents is not None: with open(asset_path, 'wb') as asset_fh: asset_fh.write(contents) else: font.generate(asset_path) if type_ == 'ttf': contents = None if autohint: with open(asset_path) as asset_fh: contents = ttfautohint(asset_fh.read()) if contents is not None: with open(asset_path, 'wb') as asset_fh: asset_fh.write(contents) asset_file = asset_files[type_] url = '%s%s' % (config.ASSETS_URL, asset_file) params = [] if not urls: params.append('#iefix') if cache_buster: params.append('v=%s' % filetime) if type_ == 'svg': params.append('#' + glyph_name) if params: url += '?' + '&'.join(params) urls[type_] = url except IOError: inline = False if inline: urls = {} for type_ in reversed(FONT_TYPES): contents = None if type_ == 'eot': ttf_path = asset_paths['ttf'] with open(ttf_path) as ttf_fh: contents = ttf2eot(ttf_fh.read()) if contents is None: continue else: _tmp = tempfile.NamedTemporaryFile(delete=False, suffix='.' + type_) _tmp.file.close() font.generate(_tmp.name) with open(_tmp.name) as asset_fh: if autohint: if type_ == 'ttf': _contents = asset_fh.read() contents = ttfautohint(_contents) if contents is None: contents = _contents os.unlink(_tmp.name) mime_type = FONT_MIME_TYPES[type_] url = make_data_url(mime_type, contents) urls[type_] = url assets = {} for type_, url in urls.items(): format_ = FONT_FORMATS[type_] if inline: assets[type_] = inline_assets[type_] = List( [Url.unquoted(url), String.unquoted(format_)]) else: assets[type_] = file_assets[type_] = List( [Url.unquoted(url), String.unquoted(format_)]) asset = List( [assets[type_] for type_ in FONT_TYPES if type_ in assets], separator=",") # Add the new object: font_sheet = dict(zip(tnames, zip(rfiles, codepoints))) font_sheet['*'] = now_time font_sheet['*f*'] = asset_files font_sheet['*k*'] = key font_sheet['*n*'] = glyph_name font_sheet['*t*'] = filetime codepoints = zip(files, codepoints) cache_tmp = tempfile.NamedTemporaryFile(delete=False, dir=ASSETS_ROOT) pickle.dump( (now_time, file_assets, inline_assets, font_sheet, codepoints), cache_tmp) cache_tmp.close() os.rename(cache_tmp.name, cache_path) # Use the sorted list to remove older elements (keep only 500 objects): if len(font_sheets) > MAX_FONT_SHEETS: for a in sorted(font_sheets, key=lambda a: font_sheets[a]['*'], reverse=True)[KEEP_FONT_SHEETS:]: del font_sheets[a] log.warning("Exceeded maximum number of font sheets (%s)" % MAX_FONT_SHEETS) font_sheets[asset.render()] = font_sheet font_sheet_cache = _get_cache('font_sheet_cache') for file_, codepoint in codepoints: font_sheet_cache[file_] = codepoint # TODO this sometimes returns an empty list, or is never assigned to return asset
def _image_url(path, only_path=False, cache_buster=True, dst_color=None, src_color=None, inline=False, mime_type=None, spacing=None, collapse_x=None, collapse_y=None): """ src_color - a list of or a single color to be replaced by each corresponding dst_color colors spacing - spaces to be added to the image collapse_x, collapse_y - collapsable (layered) image of the given size (x, y) """ if inline or dst_color or spacing: if not Image: raise Exception("Images manipulation require PIL") filepath = String.unquoted(path).value fileext = os.path.splitext(filepath)[1].lstrip('.').lower() if mime_type: mime_type = String.unquoted(mime_type).value if not mime_type: mime_type = mimetypes.guess_type(filepath)[0] if not mime_type: mime_type = 'image/%s' % fileext path = None IMAGES_ROOT = _images_root() if callable(IMAGES_ROOT): try: _file, _storage = list(IMAGES_ROOT(filepath))[0] except IndexError: filetime = None else: filetime = getmtime(_file, _storage) if filetime is None: filetime = 'NA' elif inline or dst_color or spacing: path = _storage.open(_file) else: _path = os.path.join(IMAGES_ROOT.rstrip(os.sep), filepath.strip('\\/')) filetime = getmtime(_path) if filetime is None: filetime = 'NA' elif inline or dst_color or spacing: path = open(_path, 'rb') BASE_URL = config.IMAGES_URL or config.STATIC_URL if path: dst_colors = [list(Color(v).value[:3]) for v in List.from_maybe(dst_color) if v] src_color = Color.from_name('black') if src_color is None else src_color src_colors = [tuple(Color(v).value[:3]) for v in List.from_maybe(src_color)] len_colors = max(len(dst_colors), len(src_colors)) dst_colors = (dst_colors * len_colors)[:len_colors] src_colors = (src_colors * len_colors)[:len_colors] spacing = Number(0) if spacing is None else spacing spacing = [int(Number(v).value) for v in List.from_maybe(spacing)] spacing = (spacing * 4)[:4] file_name, file_ext = os.path.splitext(os.path.normpath(filepath).replace(os.sep, '_')) key = (filetime, src_color, dst_color, spacing) asset_file = file_name + '-' + make_filename_hash(key) + file_ext ASSETS_ROOT = config.ASSETS_ROOT or os.path.join(config.STATIC_ROOT, 'assets') asset_path = os.path.join(ASSETS_ROOT, asset_file) if os.path.exists(asset_path): filepath = asset_file BASE_URL = config.ASSETS_URL if inline: path = open(asset_path, 'rb') url = make_data_url(mime_type, path.read()) else: url = '%s%s' % (BASE_URL, filepath) if cache_buster: filetime = getmtime(asset_path) url = add_cache_buster(url, filetime) else: simply_process = False image = None if fileext in ('cur',): simply_process = True else: try: image = Image.open(path) except IOError: if not collapse_x and not collapse_y and not dst_colors: simply_process = True if simply_process: if inline: url = make_data_url(mime_type, path.read()) else: url = '%s%s' % (BASE_URL, filepath) if cache_buster: filetime = getmtime(asset_path) url = add_cache_buster(url, filetime) else: width, height = collapse_x or image.size[0], collapse_y or image.size[1] new_image = Image.new( mode='RGBA', size=(width + spacing[1] + spacing[3], height + spacing[0] + spacing[2]), color=(0, 0, 0, 0) ) for i, dst_color in enumerate(dst_colors): src_color = src_colors[i] pixdata = image.load() for _y in xrange(image.size[1]): for _x in xrange(image.size[0]): pixel = pixdata[_x, _y] if pixel[:3] == src_color: pixdata[_x, _y] = tuple([int(c) for c in dst_color] + [pixel[3] if len(pixel) == 4 else 255]) iwidth, iheight = image.size if iwidth != width or iheight != height: cy = 0 while cy < iheight: cx = 0 while cx < iwidth: cropped_image = image.crop((cx, cy, cx + width, cy + height)) new_image.paste(cropped_image, (int(spacing[3]), int(spacing[0])), cropped_image) cx += width cy += height else: new_image.paste(image, (int(spacing[3]), int(spacing[0]))) if not inline: try: new_image.save(asset_path) filepath = asset_file BASE_URL = config.ASSETS_URL if cache_buster: filetime = getmtime(asset_path) except IOError: log.exception("Error while saving image") inline = True # Retry inline version url = os.path.join(config.ASSETS_URL.rstrip(os.sep), asset_file.lstrip(os.sep)) if cache_buster: url = add_cache_buster(url, filetime) if inline: output = six.BytesIO() new_image.save(output, format='PNG') contents = output.getvalue() output.close() url = make_data_url(mime_type, contents) else: url = os.path.join(BASE_URL.rstrip('/'), filepath.lstrip('\\/')) if cache_buster and filetime != 'NA': url = add_cache_buster(url, filetime) if not os.sep == '/': url = url.replace(os.sep, '/') if only_path: return String.unquoted(url) else: return Url.unquoted(url)
def _image_url(path, only_path=False, cache_buster=True, dst_color=None, src_color=None, inline=False, mime_type=None, spacing=None, collapse_x=None, collapse_y=None): """ src_color - a list of or a single color to be replaced by each corresponding dst_color colors spacing - spaces to be added to the image collapse_x, collapse_y - collapsable (layered) image of the given size (x, y) """ if inline or dst_color or spacing: if not Image: raise SassMissingDependency('PIL', 'image manipulation') filepath = String.unquoted(path).value fileext = os.path.splitext(filepath)[1].lstrip('.').lower() if mime_type: mime_type = String.unquoted(mime_type).value if not mime_type: mime_type = mimetypes.guess_type(filepath)[0] if not mime_type: mime_type = 'image/%s' % fileext path = None IMAGES_ROOT = _images_root() if callable(IMAGES_ROOT): try: _file, _storage = list(IMAGES_ROOT(filepath))[0] except IndexError: filetime = None else: filetime = getmtime(_file, _storage) if filetime is None: filetime = 'NA' elif inline or dst_color or spacing: path = _storage.open(_file) else: _path = os.path.join(IMAGES_ROOT.rstrip(os.sep), filepath.strip('\\/')) filetime = getmtime(_path) if filetime is None: filetime = 'NA' elif inline or dst_color or spacing: path = open(_path, 'rb') BASE_URL = config.IMAGES_URL or config.STATIC_URL if path: dst_colors = [ list(Color(v).value[:3]) for v in List.from_maybe(dst_color) if v ] src_color = Color.from_name( 'black') if src_color is None else src_color src_colors = [ tuple(Color(v).value[:3]) for v in List.from_maybe(src_color) ] len_colors = max(len(dst_colors), len(src_colors)) dst_colors = (dst_colors * len_colors)[:len_colors] src_colors = (src_colors * len_colors)[:len_colors] spacing = Number(0) if spacing is None else spacing spacing = [int(Number(v).value) for v in List.from_maybe(spacing)] spacing = (spacing * 4)[:4] file_name, file_ext = os.path.splitext( os.path.normpath(filepath).replace(os.sep, '_')) key = (filetime, src_color, dst_color, spacing) asset_file = file_name + '-' + make_filename_hash(key) + file_ext ASSETS_ROOT = _assets_root() asset_path = os.path.join(ASSETS_ROOT, asset_file) if os.path.exists(asset_path): filepath = asset_file BASE_URL = config.ASSETS_URL if inline: path = open(asset_path, 'rb') url = make_data_url(mime_type, path.read()) else: url = '%s%s' % (BASE_URL, filepath) if cache_buster: filetime = getmtime(asset_path) url = add_cache_buster(url, filetime) else: simply_process = False image = None if fileext in ('cur', ): simply_process = True else: try: image = Image.open(path) except IOError: if not collapse_x and not collapse_y and not dst_colors: simply_process = True if simply_process: if inline: url = make_data_url(mime_type, path.read()) else: url = '%s%s' % (BASE_URL, filepath) if cache_buster: filetime = getmtime(asset_path) url = add_cache_buster(url, filetime) else: width, height = collapse_x or image.size[ 0], collapse_y or image.size[1] new_image = Image.new(mode='RGBA', size=(width + spacing[1] + spacing[3], height + spacing[0] + spacing[2]), color=(0, 0, 0, 0)) for i, dst_color in enumerate(dst_colors): src_color = src_colors[i] pixdata = image.load() for _y in xrange(image.size[1]): for _x in xrange(image.size[0]): pixel = pixdata[_x, _y] if pixel[:3] == src_color: pixdata[_x, _y] = tuple( [int(c) for c in dst_color] + [pixel[3] if len(pixel) == 4 else 255]) iwidth, iheight = image.size if iwidth != width or iheight != height: cy = 0 while cy < iheight: cx = 0 while cx < iwidth: cropped_image = image.crop( (cx, cy, cx + width, cy + height)) new_image.paste(cropped_image, (int(spacing[3]), int(spacing[0])), cropped_image) cx += width cy += height else: new_image.paste(image, (int(spacing[3]), int(spacing[0]))) if not inline: try: new_image.save(asset_path) filepath = asset_file BASE_URL = config.ASSETS_URL if cache_buster: filetime = getmtime(asset_path) except IOError: log.exception("Error while saving image") inline = True # Retry inline version url = os.path.join(config.ASSETS_URL.rstrip(os.sep), asset_file.lstrip(os.sep)) if cache_buster: url = add_cache_buster(url, filetime) if inline: output = six.BytesIO() new_image.save(output, format='PNG') contents = output.getvalue() output.close() url = make_data_url(mime_type, contents) else: url = os.path.join(BASE_URL.rstrip('/'), filepath.lstrip('\\/')) if cache_buster and filetime != 'NA': url = add_cache_buster(url, filetime) if not os.sep == '/': url = url.replace(os.sep, '/') if only_path: return String.unquoted(url) else: return Url.unquoted(url)
def font_sheet(g, **kwargs): if not fontforge: raise Exception("Fonts manipulation require fontforge") now_time = time.time() globs = String(g, quotes=None).value globs = sorted(g.strip() for g in globs.split(',')) _k_ = ','.join(globs) files = None rfiles = None tfiles = None base_name = None glob_path = None glyph_name = None if _k_ in font_sheets: font_sheets[_k_]['*'] = now_time else: files = [] rfiles = [] tfiles = [] for _glob in globs: if '..' not in _glob: # Protect against going to prohibited places... if callable(config.STATIC_ROOT): _glob_path = _glob _rfiles = _files = sorted(config.STATIC_ROOT(_glob)) else: _glob_path = os.path.join(config.STATIC_ROOT, _glob) _files = glob.glob(_glob_path) _files = sorted((f, None) for f in _files) _rfiles = [(rf[len(config.STATIC_ROOT):], s) for rf, s in _files] if _files: files.extend(_files) rfiles.extend(_rfiles) base_name = os.path.basename(os.path.dirname(_glob)) _glyph_name, _, _glyph_type = base_name.partition('.') if _glyph_type: _glyph_type += '-' if not glyph_name: glyph_name = _glyph_name tfiles.extend([_glyph_type] * len(_files)) else: glob_path = _glob_path if files is not None: if not files: log.error("Nothing found at '%s'", glob_path) return String.unquoted('') key = [f for (f, s) in files] + [repr(kwargs), config.ASSETS_URL] key = glyph_name + '-' + make_filename_hash(key) asset_files = { 'eot': key + '.eot', 'woff': key + '.woff', 'ttf': key + '.ttf', 'svg': key + '.svg', } ASSETS_ROOT = config.ASSETS_ROOT or os.path.join(config.STATIC_ROOT, 'assets') asset_paths = dict((type_, os.path.join(ASSETS_ROOT, asset_file)) for type_, asset_file in asset_files.items()) cache_path = os.path.join(config.CACHE_ROOT or ASSETS_ROOT, key + '.cache') inline = Boolean(kwargs.get('inline', False)) font_sheet = None asset = None file_assets = {} inline_assets = {} if all(os.path.exists(asset_path) for asset_path in asset_paths.values()) or inline: try: save_time, file_assets, inline_assets, font_sheet, codepoints = pickle.load(open(cache_path)) if file_assets: file_asset = List([file_asset for file_asset in file_assets.values()], separator=",") font_sheets[file_asset.render()] = font_sheet if inline_assets: inline_asset = List([inline_asset for inline_asset in inline_assets.values()], separator=",") font_sheets[inline_asset.render()] = font_sheet if inline: asset = inline_asset else: asset = file_asset except: pass if font_sheet: for file_, storage in files: _time = getmtime(file_, storage) if save_time < _time: if _time > now_time: log.warning("File '%s' has a date in the future (cache ignored)" % file_) font_sheet = None # Invalidate cached custom font break if font_sheet is None or asset is None: cache_buster = Boolean(kwargs.get('cache_buster', True)) autowidth = Boolean(kwargs.get('autowidth', False)) autohint = Boolean(kwargs.get('autohint', True)) font = fontforge.font() font.encoding = 'UnicodeFull' font.design_size = 16 font.em = GLYPH_HEIGHT font.ascent = GLYPH_ASCENT font.descent = GLYPH_DESCENT font.fontname = glyph_name font.familyname = glyph_name font.fullname = glyph_name def glyphs(f=lambda x: x): for file_, storage in f(files): if storage is not None: _file = storage.open(file_) else: _file = open(file_) svgtext = _file.read() svgtext = svgtext.replace('<switch>', '') svgtext = svgtext.replace('</switch>', '') svgtext = svgtext.replace('<svg>', '<svg xmlns="http://www.w3.org/2000/svg">') m = GLYPH_WIDTH_RE.search(svgtext) if m: width = float(m.group(1)) else: width = None m = GLYPH_HEIGHT_RE.search(svgtext) if m: height = float(m.group(1)) else: height = None _glyph = tempfile.NamedTemporaryFile(delete=False, suffix=".svg") _glyph.file.write(svgtext) _glyph.file.close() yield _glyph.name, width, height names = tuple(os.path.splitext(os.path.basename(file_))[0] for file_, storage in files) tnames = tuple(tfiles[i] + n for i, n in enumerate(names)) codepoints = [] for i, (glyph_filename, glyph_width, glyph_height) in enumerate(glyphs()): if glyph_height and glyph_height != GLYPH_HEIGHT: warnings.warn("Glyphs should be %spx-high" % GLYPH_HEIGHT) codepoint = i + GLYPH_START name = names[i] codepoints.append(codepoint) glyph = font.createChar(codepoint, name) glyph.importOutlines(glyph_filename) os.unlink(glyph_filename) glyph.width = glyph_width or GLYPH_WIDTH if autowidth: # Autowidth removes side bearings glyph.left_side_bearing = glyph.right_side_bearing = 0 glyph.round() filetime = int(now_time) # Generate font files if not inline: urls = {} for type_ in reversed(FONT_TYPES): asset_path = asset_paths[type_] try: if type_ == 'eot': ttf_path = asset_paths['ttf'] with open(ttf_path) as ttf_fh: contents = ttf2eot(ttf_fh.read()) if contents is not None: with open(asset_path, 'wb') as asset_fh: asset_fh.write(contents) else: font.generate(asset_path) if type_ == 'ttf': contents = None if autohint: with open(asset_path) as asset_fh: contents = ttfautohint(asset_fh.read()) if contents is not None: with open(asset_path, 'wb') as asset_fh: asset_fh.write(contents) asset_file = asset_files[type_] url = '%s%s' % (config.ASSETS_URL, asset_file) params = [] if not urls: params.append('#iefix') if cache_buster: params.append('v=%s' % filetime) if type_ == 'svg': params.append('#' + glyph_name) if params: url += '?' + '&'.join(params) urls[type_] = url except IOError: inline = False if inline: urls = {} for type_ in reversed(FONT_TYPES): contents = None if type_ == 'eot': ttf_path = asset_paths['ttf'] with open(ttf_path) as ttf_fh: contents = ttf2eot(ttf_fh.read()) if contents is None: continue else: _tmp = tempfile.NamedTemporaryFile(delete=False, suffix='.' + type_) _tmp.file.close() font.generate(_tmp.name) with open(_tmp.name) as asset_fh: if autohint: if type_ == 'ttf': _contents = asset_fh.read() contents = ttfautohint(_contents) if contents is None: contents = _contents os.unlink(_tmp.name) mime_type = FONT_MIME_TYPES[type_] url = make_data_url(mime_type, contents) urls[type_] = url assets = {} for type_, url in urls.items(): format_ = FONT_FORMATS[type_] if inline: assets[type_] = inline_assets[type_] = List([Url.unquoted(url), String.unquoted(format_)]) else: assets[type_] = file_assets[type_] = List([Url.unquoted(url), String.unquoted(format_)]) asset = List([assets[type_] for type_ in FONT_TYPES if type_ in assets], separator=",") # Add the new object: font_sheet = dict(zip(tnames, zip(rfiles, codepoints))) font_sheet['*'] = now_time font_sheet['*f*'] = asset_files font_sheet['*k*'] = key font_sheet['*n*'] = glyph_name font_sheet['*t*'] = filetime codepoints = zip(files, codepoints) cache_tmp = tempfile.NamedTemporaryFile(delete=False, dir=ASSETS_ROOT) pickle.dump((now_time, file_assets, inline_assets, font_sheet, codepoints), cache_tmp) cache_tmp.close() os.rename(cache_tmp.name, cache_path) # Use the sorted list to remove older elements (keep only 500 objects): if len(font_sheets) > MAX_FONT_SHEETS: for a in sorted(font_sheets, key=lambda a: font_sheets[a]['*'], reverse=True)[KEEP_FONT_SHEETS:]: del font_sheets[a] log.warning("Exceeded maximum number of font sheets (%s)" % MAX_FONT_SHEETS) font_sheets[asset.render()] = font_sheet for file_, codepoint in codepoints: _font_sheet_cache[file_] = codepoint # TODO this sometimes returns an empty list, or is never assigned to return asset
def atom(self): _token_ = self._peek(self.u_expr_chks) if _token_ == 'LPAR': LPAR = self._scan('LPAR') _token_ = self._peek(self.atom_rsts) if _token_ == 'RPAR': v = ListLiteral([], comma=False) elif _token_ not in self.argspec_item_chks: expr_map = self.expr_map() v = expr_map else: # in self.argspec_item_chks expr_lst = self.expr_lst() v = expr_lst RPAR = self._scan('RPAR') return Parentheses(v) elif _token_ == '"url"': self._scan('"url"') LPAR = self._scan('LPAR') _token_ = self._peek(self.atom_rsts_) if _token_ == 'URL': URL = self._scan('URL') quotes = None elif _token_ == '"\\""': self._scan('"\\""') URL = self._scan('URL') self._scan('"\\""') quotes = '"' else: # == '"\'"' self._scan('"\'"') URL = self._scan('URL') self._scan('"\'"') quotes = "'" RPAR = self._scan('RPAR') return Literal(Url(URL, quotes=quotes)) elif _token_ == 'FNCT': FNCT = self._scan('FNCT') LPAR = self._scan('LPAR') argspec = self.argspec() RPAR = self._scan('RPAR') return CallOp(FNCT, argspec) elif _token_ == 'BANG_IMPORTANT': BANG_IMPORTANT = self._scan('BANG_IMPORTANT') return Literal(String(BANG_IMPORTANT, quotes=None)) elif _token_ == 'ID': ID = self._scan('ID') return Literal(parse_bareword(ID)) elif _token_ == 'NUM': NUM = self._scan('NUM') UNITS = None if self._peek(self.atom_rsts__) == 'UNITS': UNITS = self._scan('UNITS') return Literal(Number(float(NUM), unit=UNITS)) elif _token_ == 'STR': STR = self._scan('STR') return Literal(String(dequote(STR), quotes="'")) elif _token_ == 'QSTR': QSTR = self._scan('QSTR') return Literal(String(dequote(QSTR), quotes='"')) elif _token_ == 'COLOR': COLOR = self._scan('COLOR') return Literal(Color.from_hex(COLOR, literal=True)) else: # == 'VAR' VAR = self._scan('VAR') return Variable(VAR)