def css(request, file): response = render(request, "core/" + file + ".css", content_type='text/css') response.content = re.sub(r'[\s]+', ' ', compress(response.content.decode("UTF-8"))) return response
def render_report(deps): """Render a report with a specific output""" base = os.path.join(os.path.dirname(__file__), "html") # Calculate which dependencies needs to be updated requires_updates = [] for dep in deps.values(): if not len(dep.requires_updates): continue requires_updates.append(dep) requires_updates.sort(key=lambda i: i.package) # Get the minified stylesheet with open(os.path.join(base, "style.css")) as f: minified_stylesheet = csscompressor.compress(f.read()) # Get the base64-encoded favicon favicon = "green.ico" if len(requires_updates): favicon = "orange.ico" with open(os.path.join(base, favicon), "rb") as f: base64_favicon = base64.b64encode(f.read()).decode("ascii") # Render the rendering environment env = jinja2.Environment(loader=jinja2.FileSystemLoader(base)) env.filters["format_day"] = format_day tmpl = env.get_template("template.html") res = tmpl.render(requires_updates=requires_updates, minified_stylesheet=minified_stylesheet, base64_favicon=base64_favicon) return htmlmin.minify.html_minify(res)
def __init__(self, pelican): """ Minifies the files. :param pelican: the pelican object :type pelican: pelican.Pelican """ for path, subdirs, files in os.walk(pelican.output_path): for name in files: path_file = os.path.join(path, name) if fnmatch(name, '*.html'): self.write_to_file( path_file, lambda content: htmlmin.minify( content, remove_comments=True, remove_empty_space=True, reduce_boolean_attributes=True, keep_pre=True, ) ) elif fnmatch(name, '*.css'): self.write_to_file( path_file, lambda content: csscompressor.compress(content) )
def prepare_js_css_minify(): from csscompressor import compress from jsmin import jsmin cwd = os.getcwd() os.chdir("../static/js") list_files = os.listdir() if "app.min.js" in list_files: os.remove("app.min.js") list_files.remove("app.min.js") if "app.js" in list_files: os.remove("app.js") list_files.remove("app.js") data_js = ['"use strict";'] for file in list_files: if ".js" in file and not 'webworker' in file: with open(file) as f: data_js.append(f.read().replace('"use strict";', '')) concat = "".join(data_js) with open("app.js", "w") as f: f.write(concat) minified = jsmin(concat, quote_chars="'\"`") with open("app.min.js", "w") as f: f.write(minified) os.chdir("../css") if os.path.exists("style.min.css"): os.remove("style.min.css") with open("style.css") as f: minified_css = compress(f.read()) with open("style.min.css", "w") as f: f.write(minified_css) os.chdir(cwd)
def minimize_css(output_dir: str) -> None: for file in get_files(output_dir, ".css"): text = file.read_text() minimized = csscompressor.compress(text, preserve_exclamation_comments=False) file.write_text(minimized)
def build_css(self, sass_file): """Compile the Sass code to CSS using the "libsass-python" package and compress the CSS using the "csscompressor" package. Apply the custom filter on CSS by calling the `_filter_css` method. """ logger.debug("Reading the Sass file.") sass_content = sass_file.read() logger.debug("Compiling the Sass file.") process = subprocess.run( ["npx", "-q", "sass", "--no-charset", "main.scss"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, ) if process.stderr: raise SassCompileException(process.stderr, sass_file.name) if process.returncode != 0: raise SassCompileException( f"Sass gave non-zero exit code {process.returncode}", sass_file.name, ) css_content = process.stdout logger.debug( "Compiled the Sass file. CSS content size: {} bytes".format( len(css_content), )) logger.debug("Filtering the CSS content.") css_content = self._filter_css(css_content) logger.debug("Compressing the CSS content.") css_content = csscompressor.compress(css_content) self.assets.css_content = css_content
def build_css(self, sass_file): """Compile the Sass code to CSS using the "libsass-python" package and compress the CSS using the "csscompressor" package. Apply the custom filter on CSS by calling the `_filter_css` method. """ logger.debug("Reading the Sass file.") sass_content = sass_file.read() try: logger.debug("Compiling the Sass file.") css_content = sass.compile(filename=sass_file.name) except sass.CompileError as error: raise SassCompileException(error, sass_file.name) logger.debug( "Compiled the Sass file. CSS content size: {} bytes".format( len(css_content), )) logger.debug("Filtering the CSS content.") css_content = self._filter_css(css_content) logger.debug("Compressing the CSS content.") css_content = csscompressor.compress(css_content) self.assets.css_content = css_content
def minify_scripts(): try: import slimit for fn in load_static_file_paths('ui/*.js'): if 'packages' in fn or not os.access(os.path.dirname(fn), os.W_OK): return if fn.endswith('.min.js'): continue min_fn = fn.replace('.js', '.min.js') if os.path.exists(min_fn) and os.path.getmtime(fn) <= os.path.getmtime(min_fn): continue with codecs.open(fn, encoding='utf-8') as inf: content = inf.read() minified = slimit.minify(content, mangle=True, mangle_toplevel=True) with codecs.open(min_fn, 'w', encoding='utf-8') as outf: outf.write(minified) import csscompressor for fn in load_static_file_paths('ui/*.css'): if 'packages' in fn or not os.access(os.path.dirname(fn), os.W_OK): return if fn.endswith('.min.css'): continue min_fn = fn.replace('.css', '.min.css') if os.path.exists(min_fn) and os.path.getmtime(fn) <= os.path.getmtime(min_fn): continue with codecs.open(fn, encoding='utf-8') as inf: content = inf.read() minified = csscompressor.compress(content) with codecs.open(min_fn, 'w', encoding='utf-8') as outf: outf.write(minified) except ImportError: pass
def main(): # Open config file with open("config.json", 'r') as config_file: config = json.load(config_file) # Incriment the build counter config["build"] += 1 # Print starting script message print("\n{} CSS; build #{}\n\nStarting...\n".format( config["name"], config["build"])) # Generate the CSS file header comment compiled_css = "" author_comment = "/*\n\tStylesheet for {}; build #{}\n\tAuthor{}: {}\n\tBuild Date: {}\n*/\n\n".format( config["name"], config["build"], "s" if len(config["authors"]) > 1 else "", " & ".join(", ".join(config["authors"]).rsplit(', ', 1)), datetime.datetime.utcnow().strftime("%m/%d/%Y @ %H:%M UTC")) # Print messages for css file print("Reading from CSS files:") # Open indivual CSS files for file in config["files"]: # Try opening file try: with open('{}/'.format(config["css_directory"]) + file, 'r') as css_file: # Add css file to final CSS file compiled_css += "/* {}\n------------------------------------------------------------------------------ */\n".format( file) + css_file.read() + "\n\n" print("\t Succesfully added \"{}/{}\"".format( config["css_directory"], file)) # Print error if file not found except FileNotFoundError: print("\tError reading \"{}/{}\": File not Found".format( config["css_directory"], file)) # Write css to file with open(config["unminified_file"], 'w') as output_file: output_file.write(author_comment + compiled_css) output_file.close() # Write minified css to file with open(config["minified_file"], 'w') as output_file: output_file.write(author_comment + compress(compiled_css)) output_file.close() # Write config to file with open("config.json", 'w') as config_file: config_file.write(json.dumps(config, sort_keys=True, indent=4)) config_file.close() # Print confirming CSS file was succesfully generated print("\nSuccesfully generated css files!") print("\tunminified: {}, minified: {}".format(config["unminified_file"], config["minified_file"]))
def css(content, comment='', encoding=None): if comment: comment = '/* %s */\n' % comment else: comment = '' if encoding and type(content) is not unicode: content = unicode(content, encoding) return comment + csscompressor.compress(content)
def test_linelen_1(self): input = ''' a {content: '}}'} b {content: '}'} c {content: '{'} ''' output = compress(input, max_linelen=2) assert output == "a{content:'}}'}\nb{content:'}'}\nc{content:'{'}"
def test_compress_1(self): input = ''' a {content: '}}'} /* b {content: '}'} c {content: '{'} d {content: '{'} ''' output = compress(input) assert output == "a{content:'}}'}"
def test_linelen_3(self): input = ''' a {content: '}}'} b {content: '}'} c {content: '{'} d {content: '{'} ''' output = compress(input, max_linelen=100) assert output == "a{content:'}}'}b{content:'}'}c{content:'{'}\nd{content:'{'}"
def main(): not_minified = [f"./css/{f}" for f in os.listdir("./css") if not f.endswith(".min.css")] for file in not_minified: print(file) with open(file, "r") as input_file: css_minified = compress(input_file.read()) with open(f"{file.replace('.css', '')}.min.css", "w") as output_file: output_file.write(css_minified)
def _minify_css(self, path): """ Will keep bang/exclamation comments. """ with self.open(path, 'rb') as css_file: minified_text = compress( css_file.read().decode('utf-8'), preserve_exclamation_comments=True, ) return minified_text
def generate_style(): print('genreated css') style_path = lib.helper.join_path(Config.INPUT, Config.STYLE) fpath = lib.helper.join_path(style_path, Config.STYLE_INPUT_FILE_NAME) with open(fpath) as f: sass_content = f.read() data = sass.compile(string=sass_content, include_paths=[style_path]) dst = lib.helper.join_path(Config.OUTPUT, Config.STYLE_OUTPUT_DIR) lib.helper.makedirs(dst) data = csscompressor.compress(data) lib.helper.write_file( lib.helper.join_path(dst, Config.STYLE_OUTPUT_FILE_NAME), data)
def compile_scss(src): if not ntpath.basename(src).startswith('_'): scss_compiled = scss.compiler.compile_file(src) css_filename = src.replace('.scss', '.css') css_file = open(css_filename, 'w') css_file.write(compress(scss_compiled)) css_file.close() print(Fore.GREEN + css_filename + ' ' + 'compiled')
def minify_css_js(folder): for file in os.listdir(folder): if file.endswith(".css"): with open(os.path.join(folder, file), 'r') as css_file: raw_css = css_file.read() with open(os.path.join(folder, file), 'w') as css_file_minified: css_file_minified.write(compress(raw_css)) if file.endswith(".js"): with open(os.path.join(folder, file), 'r') as js_file: raw_js = js_file.read() with open(os.path.join(folder, file), 'w') as js_file_minified: js_file_minified.write(rjsmin.jsmin(raw_js))
def minify_text(filepath, file_type): print "in minify_text" url = os.path.realpath('.') print "url: " + url print(SITE_URL + filepath) print requests.get(SITE_URL + filepath) text = requests.get(SITE_URL + filepath).content #print text if file_type is "js": minified = minify(text) elif file_type is "css": minified = compress(text) return minified
def minify_stuff(): print "in /minify!" dat = request.json print dat minifiedjs = "" minifiedcss = "" if 'js' in dat: minifiedjs = minify(dat['js']) if 'css' in dat: minifiedcss = compress(dat['css']) toRet = {"js": minifiedjs, "css": minifiedcss} print 'RETURN IN MINIFY: ' print toRet return jsonify(result=toRet)
def minify_css(file_name, root): oldsize = os.stat(os.path.join(root, file_name)).st_size fr = open(root + "/" + file_name, "r") contents = fr.read() fr.close() output = compress(contents) fw = open(root + "/" + file_name, "w") fw.write(output) fw.close() newsize = os.stat(os.path.join(root, file_name)).st_size percent = (oldsize - newsize) / float(oldsize) * 100 dictcss['old_size'].append(oldsize) dictcss['new_size'].append(newsize) dictcss['percentage'].append(percent)
def css_wrapper(css_files, ): """ Wrapper for css_compress handling with non-existing files """ for item in css_files: try: self.logger.debug('Loading CSS %s/css/%s', self.working_dir, item) with open('{}/css/{}'.format(self.working_dir, item), 'r') as css_file: yield '/* CSS from metadata: {} */ {}'.format( item, compress(css_file.read())) except IOError: yield ''
def minify_file(path: str) -> str: with open(path, 'r') as f: content = f.read() css = compress(content) h = str(n_bit_hash(content, 8)) minified_file_path = '%s.%s.css' % (path.replace('.css', ''), h) with open(minified_file_path, 'w+') as f: f.write(css) os.remove(path) return ntpath.basename(minified_file_path)
def get_css(): """ collects all styles from the static directory into one string """ styles = [ "normalize.css", "milligram.css", "main.css", "nav.css", "socialicons.css" ] css = "" for file in styles: path = "static/" + file with open(path) as f: css = css + f.read() css = compress(css) return css
def build(): print 'building...' for (dest, srcs) in maps.items(): dest_data = [] for src in srcs: with open('sources/{}'.format(src), 'rb') as srcf: dest_data.append(srcf.read()) dest_data = '\n'.join(dest_data) if dest.endswith('.css'): dest_data = csscompressor.compress(dest_data) with open(dest, 'wb') as destf: destf.write(dest_data)
def compile_css(self, css_filename): """Generates CSS files.""" # Exits if cached. if css_filename in self.built_dict: return False css_fullpath = os.path.join(Dir.templates, Dir.css, css_filename) with open(css_fullpath, 'r', encoding='utf8') as f: css_content = ''.join(f.readlines()) if App.minimize_css and css_filename.find('.min.') == -1: css_content = csscompressor.compress(css_content) output_filename = os.path.join(Dir.builds, Dir.css, css_filename) with open(output_filename, 'w', encoding='utf8') as o: o.write(css_content) self.built_dict.add(css_filename) return True
def embed_css_and_js(html, target): """Embed all external css and javascript into an html """ with open(html, 'r', encoding='utf-8') as sh: soup = BeautifulSoup(sh, features='lxml') scripts = soup.findAll("script", attrs={"src": True}) for script in scripts: source = script.attrs['src'] if source.startswith('http'): infile = urlopen(source) content = infile.read().decode('utf-8') infile.close() else: path = html.parent / source with open(path, 'r') as sh: content = sh.read() # Minification with jsmin didn't work tag = soup.new_tag('script') tag['type'] = 'text/javascript' tag.append(content) script.replace_with(tag) stylesheets = soup.findAll("link", attrs={"rel": "stylesheet"}) for stylesheet in stylesheets: stylesheet_src = stylesheet.attrs['href'] tag = soup.new_tag("style") tag['type'] = 'text/css' path = html.parent / stylesheet_src with open(path, 'r') as sh: content = sh.read() if '@import' in content: import_files = re.findall(r'@import\s+url\("([A-Za-z0-9.]+)"\)', content) for name in import_files: with open(path.parent / name, 'r') as import_file: import_content = import_file.read() content = re.sub(r'@import\s+url\("' + name + r'"\);', import_content, content) minified_content = compress(content) tag.append(minified_content) stylesheet.replace_with(tag) with open(target, 'w', encoding='utf-8') as dh: dh.write(str(soup))
def _test(self, input, output): result = compress(input) if result != output.strip(): print() print('CSM', repr(result)) print() print('YUI', repr(output)) print() # import difflib # d = difflib.Differ() # diff = list(d.compare(result, output.strip())) # from pprint import pprint # pprint(diff) assert False
def build(root): print 'building...' for (dest, srcs) in maps.items(): dest_data = [] for src_file in srcs: src_path = os.path.join(root, 'sources/{}'.format(src_file)) with open(src_path, 'rb') as srcf: dest_data.append(srcf.read()) dest_data = '\n'.join(dest_data) if dest.endswith('.css'): dest_data = csscompressor.compress(dest_data) with open(os.path.join(root, dest), 'wb') as destf: destf.write(dest_data)
def main(htmlfile, cssfile, mongo_id, *args): with open(htmlfile) as f: soup = BeautifulSoup(f.read(), 'html.parser') html = unicode(soup.find(id='page-container')) html = htmlmin.minify(html, remove_comments=True, remove_empty_space=True) with open(cssfile) as f: css = csscompressor.compress(f.read()) db.paper.update_one({'_id': ObjectId(mongo_id)}, {'$set': { 'content.css': css, 'content.html': html, }})
def minify_css(s): """ Minify CSS code. @param s: css to minify @type s: L{str} @return: the minfied css @rtype: L{str} """ if csscompressor is None: raise NotImplementedError( "Dependency 'csscompressor' required, but not found!") return csscompressor.compress( s, preserve_exclamation_comments=False, )
def build_site(): """ Does all the work to build the production version of the site. This includes: * Rendering the HTML templates * Minifying and setting up the CSS/JS files * Copying the favicons into place * Setting the site up for pushing to GitHub Assumes that the site has not already been built. """ # Create a directory for the output os.makedirs(OUTPUT_PATH) # The index template is the landing page, so we write it to index.html # in the top-level of the output directory. index_page = Template('index') index_page.export(use_subdir=False) # Create and export all the other pages for name in ['about', 'contact', 'privacy', 'results']: page = Template(name) page.export() # Copy the favicons into place os.makedirs(os.path.join(OUTPUT_PATH, 'images')) for filename in ['favicon.ico', 'favicon.png', 'apple-touch-icon.png']: shutil.copyfile(src=os.path.join(ASSET_PATH, filename), dst=os.path.join(OUTPUT_PATH, 'images', filename)) # Copy the CSS file into place, and minify appropriately os.makedirs(os.path.join(OUTPUT_PATH, 'css')) with open(os.path.join(ASSET_PATH, 'style.css')) as infile, \ open(os.path.join(OUTPUT_PATH, 'css', 'style.min.css'), 'w') as outfile: outfile.write(csscompressor.compress(infile.read())) # Copy the JS file into place, and minify appropriately os.makedirs(os.path.join(OUTPUT_PATH, 'javascript')) with open(os.path.join(ASSET_PATH, 'main.js')) as infile, \ open(os.path.join(OUTPUT_PATH, 'javascript', 'main.min.js'), 'w') as outfile: outfile.write(slimit.minify(infile.read())) # GitHub Pages requires a CNAME file for custom domains. Create # this file in the output path. # https://help.github.com/articles/adding-a-cname-file-to-your-repository/ with open(os.path.join(OUTPUT_PATH, 'CNAME'), 'w') as outfile: outfile.write('finduntaggedtumblrposts.com')
def merge(merge_info, base_dir, build_dir, excludes, obfuscate=False): """For merging files there are three cases to consider: 1. path ends with one star(*) 2. path ends with two star(**) 3. a file path """ merged_objects = [] for dest_path in merge_info.keys(): print("start merging files into: %s..." % dest_path) merging_files = [] ext = "" if dest_path.endswith("js"): ext = "js" elif dest_path.endswith("css"): ext = "css" merging_files = get_files(base_dir, merge_info[dest_path], ext) # Now open, minify and merge files: os.makedirs(os.path.join(build_dir, os.path.dirname(dest_path)), exist_ok=True) dest_file = open(os.path.join(build_dir, dest_path), "w", encoding="utf-8") for f in merging_files: if f in excludes: continue print(" processing file:", f) if is_min_file(f): print(" already minified:", f) data = open(f, "r", encoding="utf-8").read() elif ext == "js": data = slimit.minify(open(f, "r", encoding="utf-8").read(), mangle=obfuscate, mangle_toplevel=False) elif ext == "css": data = csscompressor.compress(open(f, "r", encoding="utf-8").read()) dest_file.write(data) dest_file.close() print("finish merging %s files into %s" % (len(merging_files), dest_path)) # Creating merge-object. we use this later to keep order of script/css tags: mo = MergeObject(dest_path) mo.merging_files = merging_files merged_objects.append(mo) # End of merge_info loop return merged_objects
def css(self, path): """Return optionally minified CSS for filepath""" if self.config['minify_css'] is False: with codecs.open(path, 'r', 'utf8') as fh: data = fh.read() else: with codecs.open(path, 'r', 'utf8') as fh: data = csscompressor.compress(fh.read()) try: if self.config['generate'] is True and len(data) > 0: outfile = self.config['css_output'] + '/' + os.path.basename( path) with open(outfile, 'wb') as fh: fh.write(str(data).encode('utf8')) except (OSError, IOError): pass return data
def process_css(base_dir, fingerprint_nonminimized=True): """Searches the base_dir for all CSS files. When found, the file may be minimized (if not already), and may be fingerprinted. """ css_map = {} css_files = [] for root, dirs, files in os.walk(base_dir): for fname in [x for x in files if x.endswith(".css") and (".min" not in x)]: css_files.append(os.path.join(root, fname)) for css_file in css_files: text = slurp(css_file) dirname, fname = os.path.split(css_file) fbase, fext = os.path.splitext(fname) if is_minimized(css_file) and fingerprint_nonminimized: fprint = fingerprint(text) else: text = compress(text) fprint = fingerprint(text) new_fname = "%s-%s%s" % (fbase, fprint, fext) new_path = os.path.join(dirname, new_fname) with open(new_path, "wb") as fh: fh.write(text) new_url = new_path[len(base_dir):].replace("\\", "/") old_url = css_file[len(base_dir):].replace("\\", "/") new_size = os.stat(new_path).st_size old_size = os.stat(css_file).st_size print "%s reduced by %0.2f%%" % (old_url, 100.0 * (old_size - new_size) / old_size) css_map[old_url] = { "url": new_url, "size": new_size, "path": new_path } os.unlink(css_file) return css_map
def run(): with open(r'build.dat', 'r+') as build: build_ver = int(build.read()) + 1 print('\nBUILD #' + str(build_ver)) if not os.path.isdir(src_dir): print('\nFailed: the source directory, "' + src_dir + '" was not found') return # Combine files in specified order with open(dist_dir + '\\' + dist_file, 'wb') as outfile: for src_file in order: path = src_dir + '\\' + src_file if os.path.isfile(path): with open(path, 'rb') as readfile: shutil.copyfileobj(readfile, outfile) print(' + ' + path) else: print('\nFailed: target file not found: ' + path + ';\n' + 'if this file is no longer in use then you must remove it from the "order" variable in build.py') return # copy an unminified version of the stylesheet to a separate file shutil.copyfile(dist_dir + '\\' + dist_file, dist_dir + '\\' + unmin_file) # Compress with open(dist_dir + '\\' + dist_file, 'r+') as outfile: raw = outfile.read() mini = compress(raw) outfile.seek(0) outfile.write(top(build_ver) + '\n' + mini) outfile.truncate() # Increment build version in build.dat afterwards in case of failure build.seek(0) build.write(str(build_ver)) print('\nDone!')
def main(): args = _get_args() buffer = [] for name in args.input: with open(name, 'rt') as f: buffer.append(f.read()) buffer = '\n\n'.join(buffer) line_break = 0 if args.line_break is not None: line_break = args.line_break output = csscompressor.compress(buffer, max_linelen=line_break) if args.output: with open(args.output, 'wt') as f: f.write(output) f.write('\n') else: print(output)
def build(): """ Build sprites and stylesheets """ import glob # Build sprites source_dir = "design/sprites" image_dir = "build/images" css_dir = "build/css" glue_options = "glue --source=%s --project --css=%s --img=%s" % (source_dir, css_dir, image_dir) glue_args = glue_options.split() glue(argv=glue_args) f = open("design/stylesheet.css", "r") stylesheet = f.read() f.close() # Insert sprites stylesheets css_files = glob.glob("build/css/*.css") for css_file in css_files: with open(css_file, "r") as f: css_basename = os.path.splitext(os.path.basename(css_file))[0] search_string = "/* --- INSERT BUILD %s --- */" % css_basename.upper() insert_point = stylesheet.find(search_string) if insert_point > -1: print "Adding %s to stylesheet" % css_file build_style = "/* --- BUILD START %s --- */\n" % css_basename.upper() build_style += compress(f.read()) build_style += "\n/* --- BUILD END %s --- */\n" % css_basename.upper() stylesheet = stylesheet.replace(search_string, build_style) f.close() f = open("build/stylesheet.css", "w") f.write(stylesheet) f.close() print "Done."
def save_compiled_css(): # Compile our SASS css_in_fn = make_path([settings.input_dir, settings.templates_dir, settings.css_dir, settings.css_input_file ]) sass_output = sass.compile(filename=css_in_fn) if not args.dev: sass_output = compress(sass_output) # generate a hash of the file css_hash = hashlib.sha1(sass_output.encode('utf-8')).hexdigest() # save the css file css_out_name = settings.css_output_file_mask.format( hash=css_hash[:settings.filename_hash_length]) css_file_name = save_to_output(sass_output, [settings.css_dir, css_out_name]) return '/' + css_file_name
def _BuildHeader(self): """Builds the <head> section of the HTML file. The header contains the page title and either embedded or linked CSS and JS files. Returns: A string with <head>...</head> HTML. """ html = ['<head>', '<title>Results</title>'] # Add Material Design hosted libs. html.append('<link rel="stylesheet" href="http://fonts.googleapis.com/' 'css?family=Roboto:300,400,500,700" type="text/css">') html.append('<link rel="stylesheet" href="https://fonts.googleapis.com/' 'icon?family=Material+Icons">') html.append('<link rel="stylesheet" href="https://code.getmdl.io/1.3.0/' 'material.indigo-pink.min.css">') html.append('<script defer src="https://code.getmdl.io/1.3.0/' 'material.min.js"></script>') # Embed custom JavaScript and CSS files. html.append('<script>') with open(self._JS_FILEPATH) as f: html.append(jsmin.jsmin(f.read()) if self._JS_MINIFIED else ( f.read().rstrip())) html.append('</script>') html.append('<style>') with open(self._CSS_FILEPATH) as f: html.append(csscompressor.compress(f.read()) if self._CSS_MINIFIED else ( f.read().rstrip())) html.append('</style>') html.append('</head>') return self._NEW_LINE.join(html)
def convert_string(self, ctx, css, path=None): if self.conf.minify: import csscompressor css = csscompressor.compress(css) return css
def minify(self, source): #Override in subclass return compress(source)
def css(): return compress(open(SRC_FILE('main.css'), 'r').read())
user_agent="script:geo1088/reddit-stylesheet-sync:v1.0 (written by /u/geo1088; run by /u/{})".format(username)) print("Logged into Reddit as /u/{}".format(username)) # Read stylesheet with open(os.path.join(os.getcwd(), "style.css"), "r") as stylesheet_file: stylesheet = stylesheet_file.read() print("Got stylesheet.") # Strip leading @charset (Reddit doesn't allow it but Sass adds it sometimes) stylesheet = re.sub(r"^@charset.*\n", "", stylesheet) # Minify if we should if skip_minify: print("Skipping minification.") else: stylesheet = compress(stylesheet) print("Minified stylesheet.") # Push the stylesheet to the subreddit print("Writing stylesheet to /r/{}".format(sub_name)) sub = r.subreddit(sub_name) try: edit_msg = "https://github.com/{}/compare/{}".format( os.environ['TRAVIS_REPO_SLUG'], os.environ['TRAVIS_COMMIT_RANGE']) sub.wiki['config/stylesheet'].edit(stylesheet, edit_msg) except Exception as e: print("Ran into an error while uploading stylesheet; aborting.") raise e print("That's a wrap!")
## shutil.copytree(os.path.join(STATIC_DIR, fic), os.path.join(dir, blog_dir, fic)) ##""" site = make_site( searchpath=blog_dir, # outpath = os.path.join(dir, blog_dir), outpath=os.path.join(dir), extensions=["jinja2.ext.i18n"], ) translations = gettext.translation(domain="website", localedir=LOCALE_DIR, languages=[lang], codeset="utf-8") site._env.install_gettext_translations(translations) site.render() ### optimize builded data print("Optimizing css...") # minify all the css files mypath = OUT_DIR for root, dirs, files in os.walk(mypath): for fic in files: fic_path = os.path.join(root, fic) if fic[-4:] == ".css": # minify only not already minified files if fic[-8:-4] != ".min": print("- {0}".format(fic_path)) with open(fic_path) as f: new_css = compress(f.read()) with open(fic_path, "w") as f: f.write(new_css)
def test_linelen_2(self): input = '' output = compress(input, max_linelen=2) assert output == ""
def output(self, inp, out, **kw): out.write(compress(inp.read()))
def build(input_file, configs): # Making build folder build_name = os.path.normpath(configs["buildPath"] + configs["timeStamp"]) base_dir = os.path.abspath(os.path.dirname(input_file)) build_dir = os.path.join(base_dir, build_name) os.makedirs(build_dir, exist_ok=True) # Getting list of excluded files: excludes = get_files(base_dir, configs["exclude"]) # Merging requested files in config-file: merged_objects = merge(configs["merge"], base_dir, build_dir, excludes, configs["obfuscate"]) merged_files = [f for mo in merged_objects for f in mo.merging_files] # Traverse directory of the given html file recursively and process unmerged remaining files: for root, dirs, files in os.walk(base_dir): if build_name in dirs: dirs.remove(build_name) if files: # remove config file from files list: if configs["file"] in files: files.remove(configs["file"]) dest = os.path.join(build_dir, os.path.relpath(root, base_dir)) for file in files: file_path = os.path.join(root, file) # If file was already part of a merge process: if file_path in merged_files or file_path in excludes: continue if is_min_file_exists(file, file_path): continue os.makedirs(dest, exist_ok=True) ext = os.path.splitext(file)[1][1:] if ext in configs["minify"] and not is_min_file(file): # Processing(minifying) js/css file: print("minifying %s" % file_path) min_file_path = os.path.join(dest, os.path.splitext(os.path.basename(file))[0] + ".min." + ext) if ext == "js": data = slimit.minify(open(file_path, "r", encoding="utf-8").read(), mangle=configs["obfuscate"], mangle_toplevel=False) else: data = csscompressor.compress(open(file_path, "r", encoding="utf-8").read()) with open(min_file_path, "w", encoding="utf-8") as min_file: min_file.write(data) else: # just copy the file. print("copying %s" % file_path) shutil.copy2(file_path, dest) # Now editing given html file <script> and <link> tags: print("Updating html file...") tree = None with open(os.path.join(build_dir, os.path.basename(input_file)), "r", encoding="utf-8") as html_file: tree = html.parse(html_file) # Updating javascript tags: for tag in tree.findall("//script[@src]"): if tag.attrib["src"].startswith("http"): continue # Get the complete path of source file: src_file = os.path.normpath(os.path.join(base_dir, tag.attrib["src"])) if src_file in merged_files: # Source file is part of a merge: mo = get_merge_object(merged_objects, src_file) if mo is not None and not mo.added: # Replacing new merged file tag with old one: new_tag = E.SCRIPT(type="text/javascript", src=mo.into) tag.getparent().replace(tag, new_tag) mo.added = True else: # Merged file tag was already added. tag.getparent().remove(tag) elif not src_file.endswith(".min.js"): # replacing source file with minified one: tag.attrib["src"] = os.path.relpath(src_file, base_dir)[:-2] + "min.js" # Updating stylesheet link tags: for tag in tree.xpath('//*[@rel="stylesheet" or @media="all" or @media="screen"]'): if tag.attrib["href"].startswith("http"): continue # Get the complete path of source file: href_file = os.path.normpath(os.path.join(base_dir, tag.attrib["href"])) if href_file in merged_files: # Source file is part of a merge: mo = get_merge_object(merged_objects, href_file) if mo is not None and not mo.added: # Replacing new merged file tag with old one: new_tag = E.LINK(rel="stylesheet", type="text/css", href=mo.into) tag.getparent().replace(tag, new_tag) mo.added = True else: # Merged file tag was already added. tag.getparent().remove(tag) elif not href_file.endswith(".min.css"): # replacing href file with minified one: tag.attrib["href"] = os.path.relpath(href_file, base_dir)[:-3] + "min.css" with open(os.path.join(build_dir, os.path.basename(input_file)), "wb") as html_file: html_file.write(html.tostring(tree, encoding="utf-8", pretty_print=True)) print("Done!")
def output(self, _in, out, **kw): out.write(csscompressor.compress(_in.read(), max_linelen=500))
def do_job(fpath, job, output_suffix, skip=[]): """Simple Build Tool for Webdeveloper""" if 'files' not in job: files = [fpath] else: files = job['files'] for f in files: f = os.path.normcase(os.path.normpath(f)) if output_suffix not in f and f not in skip: if os.path.isfile(f): click.echo(click.style('==>', fg='green')+'%s' % f) else: click.echo(click.style('==X', fg='red')+'%s' % f) break with open(f) as fo: data = fo.read() data = data.decode('UTF-8') output = template(os.path.dirname(f), data) if job['type'] in ['sass', 'scss']: f = re.sub(r"\.scss|\.sass", r".%s.css" % output_suffix, f) try: if job['compress'] == True: output = sass.compile(string=output, output_style='compressed') else: output = sass.compile(string=output, output_style='expanded') except sass.CompileError as e: click.echo(click.style(' !!! %s' % e.message.replace('stdin:', '').replace('\n',''), fg='red')) if job['type'] in ['coffeescript']: f = re.sub(r"\.coffee", r".%s.js" % output_suffix, f) output = coffeescript.compile(output) if job['compress'] == True: output = jsmin.jsmin(output) if job['type'] in ['markdown']: f = re.sub(r"\.md|\.markdown", r".%s.html" % output_suffix, f) output = markdown.markdown(output) if job['compress'] == True: output = htmlmin.minify(output) if job['type'] in ['css']: f = re.sub(r"\.(?=[^.]*$)", r".%s." % output_suffix, f) if job['compress'] == True: output = csscompressor.compress(output) if job['type'] in ['html']: f = re.sub(r"\.(?=[^.]*$)", r".%s." % output_suffix, f) if job['compress'] == True: output = htmlmin.minify(output) if job['type'] in ['javascript']: f = re.sub(r"\.(?=[^.]*$)", r".%s." % output_suffix, f) if job['compress'] == True: output = jsmin.jsmin(output) if job['type'] in ['plaintext']: f = re.sub(r"\.(?=[^.]*$)", r".%s." % output_suffix, f) with open(f, 'w') as fh: fh.write(output.encode('UTF-8')) click.echo(click.style('<==', fg='blue')+'%s' % f) return files return []
def css_compress(asset): if asset['destination'].endswith('.css'): asset['raw'] = csscompressor.compress(asset['raw']) return asset