def _bundle_v2(tmp_out_dir, in_path, out_path, manifest_out_path, args, excludes): in_html_args = [] for f in args.html_in_files: in_html_args.append(f) exclude_args = [] for f in excludes: exclude_args.append('--exclude') exclude_args.append(f); node.RunNode( [node_modules.PathToBundler()] + _VULCANIZE_BASE_ARGS + _VULCANIZE_REDIRECT_ARGS + exclude_args + [ '--manifest-out', manifest_out_path, '--root', in_path, '--redirect', 'chrome://%s/|%s' % (args.host, in_path + '/'), '--out-dir', os.path.relpath(tmp_out_dir, _CWD).replace('\\', '/'), '--shell', args.html_in_files[0], ] + in_html_args) for index, html_file in enumerate(args.html_in_files): with open(os.path.join( os.path.relpath(tmp_out_dir, _CWD), html_file), 'r') as f: output = f.read() # Grit includes are not supported, use HTML imports instead. output = output.replace('<include src="', '<include src-disabled="') if args.insert_in_head: assert '<head>' in output # NOTE(dbeam): polymer-bundler eats <base> tags after processing. # This undoes that by adding a <base> tag to the (post-processed) # generated output. output = output.replace('<head>', '<head>' + args.insert_in_head) # Open file again with 'w' such that the previous contents are # overwritten. with open(os.path.join( os.path.relpath(tmp_out_dir, _CWD), html_file), 'w') as f: f.write(output) f.close() bundled_paths = [] for index, html_in_file in enumerate(args.html_in_files): bundled_paths.append( os.path.join(tmp_out_dir, args.html_out_files[index])) js_out_file = args.js_out_files[index] # Run crisper to separate the JS from the HTML file. node.RunNode([node_modules.PathToCrisper(), '--source', os.path.join(tmp_out_dir, html_in_file), '--script-in-head', 'false', '--html', bundled_paths[index], '--js', os.path.join(tmp_out_dir, js_out_file)]) return bundled_paths
def _vulcanize(in_folder, args): in_path = os.path.normpath(os.path.join(_CWD, in_folder)) out_path = os.path.join(_CWD, args.out_folder) html_out_path = os.path.join(out_path, args.html_out_file) js_out_path = os.path.join(out_path, args.js_out_file) exclude_args = [] for f in args.exclude or []: exclude_args.append('--exclude') exclude_args.append(f) output = node.RunNode([node_modules.PathToVulcanize( )] + _VULCANIZE_BASE_ARGS + _VULCANIZE_REDIRECT_ARGS + exclude_args + [ '--out-request-list', _request_list_path(out_path, args.html_out_file), '--redirect', '"/|%s"' % in_path, '--redirect', '"chrome://%s/|%s"' % (args.host, in_path), # TODO(dpapad): Figure out why vulcanize treats the input path # differently on Windows VS Linux/Mac. os.path.join(in_path if platform.system() == 'Windows' else os.sep, args.html_in_file) ]) # Grit includes are not supported, use HTML imports instead. output = output.replace('<include src="', '<include src-disabled="') if args.insert_in_head: assert '<head>' in output # NOTE(dbeam): Vulcanize eats <base> tags after processing. This undoes # that by adding a <base> tag to the (post-processed) generated output. output = output.replace('<head>', '<head>' + args.insert_in_head) with tempfile.NamedTemporaryFile(mode='wt+', delete=False) as tmp: tmp.write(output) try: node.RunNode([ node_modules.PathToCrisper(), '--source', tmp.name, '--script-in-head', 'false', '--html', html_out_path, '--js', js_out_path ]) # Create an empty JS file if crisper did not create one. if not os.path.isfile(js_out_path): open(js_out_path, 'w').close() node.RunNode([ node_modules.PathToUglifyJs(), js_out_path, '--comments', '"/Copyright|license|LICENSE|\<\/?if/"', '--output', js_out_path ]) finally: os.remove(tmp.name)
def _optimize(in_folder, args): in_path = os.path.normpath(os.path.join(_CWD, in_folder)).replace('\\', '/') out_path = os.path.join(_CWD, args.out_folder).replace('\\', '/') manifest_out_path = _request_list_path(out_path, args.host_url) tmp_out_dir = tempfile.mkdtemp(dir=out_path).replace('\\', '/') excludes = _BASE_EXCLUDES + [ # This file is dynamically created by C++. Need to specify an exclusion # URL for both the relative URL and chrome:// URL syntax. 'strings.js', 'strings.m.js', '%s/strings.js' % args.host_url, '%s/strings.m.js' % args.host_url, ] excludes.extend(args.exclude or []) external_paths = args.external_paths or [] try: if args.js_module_in_files: pcb_out_paths = [ os.path.join(tmp_out_dir, f) for f in args.js_out_files ] bundled_paths = _bundle_v3(tmp_out_dir, in_path, out_path, manifest_out_path, args, excludes, external_paths) else: # Ensure Polymer 2 and Polymer 3 request lists don't collide. manifest_out_path = _request_list_path(out_path, args.host_url[:-1] + '-v2/') pcb_out_paths = [ os.path.join(out_path, f) for f in args.html_out_files ] bundled_paths = _bundle_v2(tmp_out_dir, in_path, out_path, manifest_out_path, args, excludes) # Run polymer-css-build. node.RunNode([node_modules.PathToPolymerCssBuild()] + ['--polymer-version', '2'] + ['--no-inline-includes', '-f'] + bundled_paths + ['-o'] + pcb_out_paths) # Pass the JS files through Uglify and write the output to its final # destination. for index, js_out_file in enumerate(args.js_out_files): node.RunNode([ node_modules.PathToTerser(), os.path.join(tmp_out_dir, js_out_file), '--comments', '/Copyright|license|LICENSE|\<\/?if/', '--output', os.path.join(out_path, js_out_file) ]) finally: shutil.rmtree(tmp_out_dir) return manifest_out_path
def main(): polymer_dir = os.path.join(_HERE_PATH, 'components-chromium', 'polymer2') # Final HTML bundle. polymer_html = os.path.join(polymer_dir, 'polymer.html') # Final JS bundle. polymer_js = os.path.join(polymer_dir, 'polymer-extracted.js') # Move the entire checkout to a temp location. tmp_dir = os.path.join(_HERE_PATH, 'components-chromium', 'polymer2temp') if os.path.exists(tmp_dir): shutil.rmtree(tmp_dir) shutil.move(polymer_dir, tmp_dir) tmp_out_dir = os.path.join(tmp_dir, 'out') os.makedirs(tmp_out_dir) try: # Combine everything to a single HTML bundle file. node.RunNode([ node_modules.PathToBundler(), '--strip-comments', '--inline-scripts', '--inline-css', '--out-file', os.path.join(tmp_out_dir, 'polymer.html'), os.path.join(tmp_dir, 'polymer.html'), ]) # Extract the JS to a separate file named polymer-extracted.js. extract_inline_scripts.ExtractFrom( os.path.join(tmp_out_dir, 'polymer.html')) # Minify the JS bundle. extracted_js = os.path.join(tmp_out_dir, 'polymer-extracted.js') node.RunNode([ node_modules.PathToTerser(), extracted_js, '--comments', '/Copyright|license|LICENSE/', '--output', extracted_js ]) # Copy generated bundled JS/HTML files back to the original location. os.makedirs(polymer_dir) shutil.move(os.path.join(tmp_out_dir, 'polymer.html'), polymer_html) shutil.move(extracted_js, polymer_js) # Copy a few more files. shutil.move(os.path.join(tmp_dir, 'bower.json'), polymer_dir) shutil.move(os.path.join(tmp_dir, 'LICENSE.txt'), polymer_dir) finally: # Delete component-chromium/shadycss since it ends up in the bundle. shutil.rmtree( os.path.join(_HERE_PATH, 'components-chromium', 'shadycss')) shutil.rmtree(tmp_dir)
def _bundle_v3(tmp_out_dir, in_path, out_path, manifest_out_path, args, excludes): if not os.path.exists(tmp_out_dir): os.makedirs(tmp_out_dir) path_to_plugin = os.path.join(os.path.abspath(_HERE_PATH), 'tools', 'rollup_plugin.js') rollup_config_file = _generate_rollup_config(tmp_out_dir, path_to_plugin, in_path, args.host, excludes) bundled_paths = [] for index, js_module_in_file in enumerate(args.js_module_in_files): js_out_file = args.js_out_files[index] rollup_js_out_file = '%s.rollup.js' % js_out_file[:-3] rollup_js_out_path = os.path.join(tmp_out_dir, rollup_js_out_file) node.RunNode([node_modules.PathToRollup()] + [ '--format', 'esm', '--input', os.path.join(in_path, js_module_in_file), '--file', rollup_js_out_path, '--sourcemap', '--sourcemapExcludeSources', '--config', rollup_config_file, '--silent', ]) # Create the manifest file from the sourcemap generated by rollup. _generate_manifest_file(rollup_js_out_file, tmp_out_dir, in_path, manifest_out_path) bundled_paths.append(rollup_js_out_path) return bundled_paths
def RunEsLintChecks(self, affected_js_files, format='stylish'): """Runs lint checks using ESLint. The ESLint rules being applied are defined in the .eslintrc.js configuration file. """ os_path = self.input_api.os_path try: # Import ESLint. _HERE_PATH = os_path.dirname(os_path.realpath(__file__)) _SRC_PATH = os_path.normpath(os_path.join(_HERE_PATH, '..', '..')) import sys old_sys_path = sys.path[:] sys.path.append(os_path.join(_SRC_PATH, 'third_party', 'node')) import node, node_modules finally: sys.path = old_sys_path # Extract paths to be passed to ESLint. affected_js_files_paths = [] presubmit_path = self.input_api.PresubmitLocalPath() for f in affected_js_files: affected_js_files_paths.append( os_path.relpath(f.AbsoluteLocalPath(), presubmit_path)) output = node.RunNode([ node_modules.PathToEsLint(), '--color', '--format', format, '--ignore-pattern \'!.eslintrc.js\'', ' '.join(affected_js_files_paths) ]) return [self.output_api.PresubmitError(output)] if output else []
def _css_build(out_folder, input_files, output_files): out_path = os.path.join(_CWD, out_folder) in_paths = map(lambda f: os.path.join(out_path, f), input_files) out_paths = map(lambda f: os.path.join(out_path, f), output_files) args = ['--no-inline-includes', '-f'] + in_paths + ['-o'] + out_paths node.RunNode([node_modules.PathToPolymerCssBuild()] + args)
def _bundle_v3(tmp_out_dir, in_path, out_path, manifest_out_path, args, excludes, external_paths): if not os.path.exists(tmp_out_dir): os.makedirs(tmp_out_dir) path_to_plugin = os.path.join( os.path.abspath(_HERE_PATH), 'tools', 'rollup_plugin.js') rollup_config_file = _generate_rollup_config(tmp_out_dir, path_to_plugin, in_path, args.host, excludes, external_paths) rollup_args = [os.path.join(in_path, f) for f in args.js_module_in_files] # Confirm names are as expected. This is necessary to avoid having to replace # import statements in the generated output files. # TODO(rbpotter): Is it worth adding import statement replacement to support # arbitrary names? bundled_paths = [] for index, js_file in enumerate(args.js_module_in_files): base_file_name = os.path.basename(js_file) expected_name = '%s.rollup.js' % base_file_name[:-len('.js')] assert args.js_out_files[index] == expected_name, \ 'Output file corresponding to %s should be named %s' % \ (js_file, expected_name) bundled_paths.append(os.path.join(tmp_out_dir, expected_name)) # This indicates that rollup is expected to generate a shared chunk file as # well as one file per module. Set its name using --chunkFileNames. Note: # Currently, this only supports 2 entry points, which generate 2 corresponding # outputs and 1 shared output. if (len(args.js_out_files) == 3): assert len(args.js_module_in_files) == 2, \ 'Expect 2 module entry points for generating 3 outputs' shared_file_name = args.js_out_files[2] rollup_args += [ '--chunkFileNames', shared_file_name ] bundled_paths.append(os.path.join(tmp_out_dir, shared_file_name)) node.RunNode( [node_modules.PathToRollup()] + rollup_args + [ '--format', 'esm', '--dir', tmp_out_dir, '--entryFileNames', '[name].rollup.js', '--sourcemap', '--sourcemapExcludeSources', '--config', rollup_config_file, '--silent', ]) # Create the manifest file from the sourcemaps generated by rollup. generated_paths = _generate_manifest_file(tmp_out_dir, in_path, manifest_out_path) assert len(generated_paths) == len(bundled_paths), \ 'unexpected number of bundles - %s - generated by rollup' % \ (len(generated_paths)) for bundled_file in bundled_paths: with open(bundled_file, 'r') as f: output = f.read() assert "<if expr" not in output, \ 'Unexpected <if expr> found in bundled output. Check that all ' + \ 'input files using such expressions are preprocessed.' return bundled_paths
def _extract_imports(input_file): path_to_acorn = path.join('node_modules', 'acorn', 'bin', 'acorn') ast = node.RunNode([path_to_acorn, '--module', input_file]) imports = map( lambda n: n['source']['raw'][1:-1], filter(lambda n: n['type'] == 'ImportDeclaration', json.loads(ast)['body'])) return set(imports)
def main(argv): parser = argparse.ArgumentParser() parser.add_argument('--deps', nargs='*') parser.add_argument('--gen_dir', required=True) parser.add_argument('--path_mappings', nargs='*') parser.add_argument('--root_dir', required=True) parser.add_argument('--out_dir', required=True) parser.add_argument('--in_files', nargs='*') parser.add_argument('--definitions', nargs='*') args = parser.parse_args(argv) root_dir = os.path.relpath(args.root_dir, args.gen_dir) out_dir = os.path.relpath(args.out_dir, args.gen_dir) with open(os.path.join(_HERE_DIR, 'tsconfig_base.json')) as root_tsconfig: tsconfig = json.loads(root_tsconfig.read()) tsconfig['files'] = [] if args.in_files is not None: # Source .ts files are always resolved as being relative to |root_dir|. tsconfig['files'].extend( [os.path.join(root_dir, f) for f in args.in_files]) if args.definitions is not None: tsconfig['files'].extend(args.definitions) tsconfig['compilerOptions']['rootDir'] = root_dir tsconfig['compilerOptions']['outDir'] = out_dir # Handle custom path mappings, for example chrome://resources/ URLs. if args.path_mappings is not None: path_mappings = {} for m in args.path_mappings: mapping = m.split('|') if not path_mappings.has_key(mapping[0]): path_mappings[mapping[0]] = [] path_mappings[mapping[0]].append(os.path.join('./', mapping[1])) tsconfig['compilerOptions']['paths'] = path_mappings if args.deps is not None: tsconfig['references'] = [{'path': dep} for dep in args.deps] _write_tsconfig_json(args.gen_dir, tsconfig) node.RunNode([ node_modules.PathToTypescript(), '--project', os.path.join(args.gen_dir, 'tsconfig.json') ]) if args.in_files is not None: with open(os.path.join(args.gen_dir, 'tsconfig.manifest'), 'w') \ as manifest_file: manifest_data = {} manifest_data['base_dir'] = args.out_dir manifest_data['files'] = \ [re.sub(r'\.ts$', '.js', f) for f in args.in_files] json.dump(manifest_data, manifest_file)
def main(argv): parser = argparse.ArgumentParser() parser.add_argument('--tsconfig_path', required=True) args = parser.parse_args(argv) result = node.RunNode([node_modules.PathToTypescript()] + ['--project', args.tsconfig_path]) if len(result) != 0: raise RuntimeError('Failed to compile Typescript: \n%s' % result)
def _optimize(in_folder, args): in_path = os.path.normpath(os.path.join(_CWD, in_folder)).replace('\\', '/') out_path = os.path.join(_CWD, args.out_folder).replace('\\', '/') manifest_out_path = _request_list_path(out_path, args.target_name) tmp_out_dir = tempfile.mkdtemp(dir=out_path).replace('\\', '/') excludes = _BASE_EXCLUDES + [ # This file is dynamically created by C++. Should always be imported with a # relative path. 'strings.m.js', ] excludes.extend(args.exclude or []) external_paths = args.external_paths or [] try: pcb_out_paths = [ os.path.join(tmp_out_dir, f) for f in args.js_out_files ] bundled_paths = _bundle_v3(tmp_out_dir, in_path, out_path, manifest_out_path, args, excludes, external_paths) # Run polymer-css-build. node.RunNode([node_modules.PathToPolymerCssBuild()] + ['--polymer-version', '2'] + ['--no-inline-includes', '-f'] + bundled_paths + ['-o'] + pcb_out_paths) # Pass the JS files through Terser and write the output to its final # destination. for index, js_out_file in enumerate(args.js_out_files): node.RunNode([ node_modules.PathToTerser(), os.path.join(tmp_out_dir, js_out_file), '--comments', '/Copyright|license|LICENSE|\<\/?if/', '--output', os.path.join(out_path, js_out_file) ]) finally: shutil.rmtree(tmp_out_dir) return manifest_out_path
def main(original_html): name = os_path.splitext(os_path.basename(original_html))[0] dst_dir = os_path.dirname(original_html) extracted_html = os_path.join(dst_dir, name + '-extracted.html') extracted_js = os_path.join(dst_dir, name + '-extracted.js') node.RunNode([ node_modules.PathToCrisper(), '--script-in-head', 'false', '--source', original_html, '--html', extracted_html, '--js', extracted_js ]) shutil.move(extracted_html, original_html)
def main(argv): parser = argparse.ArgumentParser() parser.add_argument('--out_folder', required=True) args = parser.parse_args(argv) node.RunNode([ node_modules.PathToTerser(), os.path.join(_HERE_PATH, 'lottie_worker.js'), '--ascii_only=true', '-b', 'beautify=false', '--compress', '--mangle', 'reserved=[\'$\',\'onmessage\',\'postMessage\']', '--output', os.path.join(args.out_folder, 'lottie_worker.min.js') ])
def Minify(source): # Open two temporary files, so that uglify can read the input from one and # write its output to the other. with tempfile.NamedTemporaryFile(suffix='.js') as infile, \ tempfile.NamedTemporaryFile(suffix='.js') as outfile: infile.write(source) infile.flush() node.RunNode([ node_modules.PathToTerser(), infile.name, '--output', outfile.name ]) result = outfile.read() return result
def Run(os_path=None, args=None): try: _HERE_PATH = os_path.dirname(os_path.realpath(__file__)) _SRC_PATH = os_path.normpath(os_path.join(_HERE_PATH, '..', '..')) import sys old_sys_path = sys.path[:] sys.path.append(os_path.join(_SRC_PATH, 'third_party', 'node')) import node, node_modules finally: sys.path = old_sys_path return node.RunNode([node_modules.PathToEsLint()] + args)
def __StripJsComments(filename): """Returns the minified contents of a JavaScript file with comments and grit directives removed. Args: filename: The name of the file to read. Returns: A string consisting of the minified file contents with comments and grit directives removed. """ with open(filename) as f: text = f.read() text = re.sub('<if .*?>', '', text, flags=re.IGNORECASE) text = re.sub('</if>', '', text, flags=re.IGNORECASE) return node.RunNode([node_modules.PathToTerser(), filename])
def main(argv): parser = argparse.ArgumentParser() parser.add_argument('--gen_dir', required=True) parser.add_argument('--out_dir', required=True) parser.add_argument('--root_dir', required=True) parser.add_argument('--js_files', nargs='*', required=True) parser.add_argument('--path_mappings', nargs='*') args = parser.parse_args(argv) with open(os.path.join(_HERE_DIR, _TSCONFIG_BASE)) as root_tsconfig: tsconfig = json.loads(root_tsconfig.read()) root_dir = os.path.relpath(args.root_dir, args.gen_dir) out_dir = os.path.relpath(args.out_dir, args.gen_dir) tsconfig['files'] = [os.path.join(root_dir, f) for f in args.js_files] tsconfig['compilerOptions']['rootDir'] = root_dir tsconfig['compilerOptions']['outDir'] = out_dir # Handle custom path mappings, for example chrome://resources/ URLs. if args.path_mappings is not None: path_mappings = {} for m in args.path_mappings: mapping = m.split('|') if not path_mappings.has_key(mapping[0]): path_mappings[mapping[0]] = [] path_mappings[mapping[0]].append(os.path.join('./', mapping[1])) tsconfig['compilerOptions']['paths'] = path_mappings _write_tsconfig_json(args.gen_dir, tsconfig) if (args.root_dir == args.out_dir): # Delete .d.ts files if they already exist, otherwise TypeScript compiler # throws "error TS5055: Cannot write file ... because it would overwrite # input file" errors. for f in args.js_files: to_delete = os.path.join(args.out_dir, re.sub(r'\.js$', '.d.ts', f)) if os.path.exists(to_delete): os.remove(to_delete) node.RunNode([ node_modules.PathToTypescript(), '--project', os.path.join(args.gen_dir, _TSCONFIG_GEN) ])
def Run(os_path=None, args=None): _HERE_PATH = os_path.dirname(os_path.realpath(__file__)) _SRC_PATH = os_path.normpath(os_path.join(_HERE_PATH, '..', '..')) import sys old_sys_path = sys.path[:] sys.path.append(os_path.join(_SRC_PATH, 'third_party', 'node')) try: import node, node_modules finally: sys.path = old_sys_path # Removing viewBox is not always safe, since it assumes that width/height are # not overriden in all usages of an SVG file. Feel free to remove viewBox # manually from a certain SVG if you have audited all its usages. default_args = ['--disable=removeViewBox'] return node.RunNode([node_modules.PathToSvgo()] + default_args + args)
def main(argv): parser = argparse.ArgumentParser() parser.add_argument('--filelist', required=True) args = parser.parse_args(argv) files = [] with open(args.filelist) as filelist_file: for line in filelist_file: for f in line.split(): files.append(os.path.join(os.getcwd(), f)) file_paths = ' '.join(files) result = node.RunNode([node_modules.PathToTypescript()] + [ "--target 'es6'", "--module 'es6'", "--lib 'es6, esnext.bigint'", "--strict", file_paths ]) if len(result) != 0: raise RuntimeError('Failed to compile Typescript: \n%s' % result)
def merge_istanbul_reports(istanbul_coverage_dir, source_dir, output_file): """Merges all disparate istanbul reports into a single report. Args: istanbul_coverage_dir (str): Directory containing separate coverage files. source_dir (str): Directory containing instrumented source code. output_file (str): File path to output merged coverage. Raises: RuntimeError: If the underlying node command fails. """ return node.RunNode([ coverage_modules.PathToNyc(), 'merge', istanbul_coverage_dir, output_file, '--cwd', source_dir, ])
def main(argv): parser = argparse.ArgumentParser() parser.add_argument('--deps', nargs='*') parser.add_argument('--gen_dir', required=True) parser.add_argument('--path_mappings', nargs='*') parser.add_argument('--root_dir', required=True) parser.add_argument('--sources', nargs='*', required=True) args = parser.parse_args(argv) root_dir = os.path.relpath(args.root_dir, args.gen_dir) sources = [os.path.join(root_dir, f) for f in args.sources] with open(os.path.join(_HERE_DIR, 'tsconfig_base.json')) as root_tsconfig: tsconfig = json.loads(root_tsconfig.read()) tsconfig['files'] = sources tsconfig['compilerOptions']['rootDir'] = root_dir # Handle custom path mappings, for example chrome://resources/ URLs. if args.path_mappings is not None: path_mappings = {} for m in args.path_mappings: mapping = m.split('|') path_mappings[mapping[0]] = [os.path.join('./', mapping[1])] tsconfig['compilerOptions']['paths'] = path_mappings if args.deps is not None: tsconfig['references'] = [{'path': dep} for dep in args.deps] _write_tsconfig_json(args.gen_dir, tsconfig) node.RunNode([ node_modules.PathToTypescript(), '--project', os.path.join(args.gen_dir, 'tsconfig.json') ]) with open(os.path.join(args.gen_dir, 'tsconfig.manifest'), 'w') \ as manifest_file: manifest_data = {} manifest_data['base_dir'] = args.gen_dir manifest_data['files'] = [re.sub(r'\.ts$', '.js', f) for f in args.sources] json.dump(manifest_data, manifest_file)
def convert_raw_coverage_to_istanbul(raw_coverage_dirs, source_dir, task_output_dir): """Calls the node helper script convert_to_istanbul.js Args: raw_coverage_dirs (list): Directory that contains raw v8 code coverage. source_dir (str): Root directory containing the instrumented source. Raises: RuntimeError: If the underlying node command fails. """ return node.RunNode([ os.path.join(_HERE_PATH, 'convert_to_istanbul.js'), '--source-dir', source_dir, '--output-dir', task_output_dir, '--raw-coverage-dirs', ' '.join(raw_coverage_dirs), ])
def main(argv): parser = argparse.ArgumentParser() parser.add_argument('--gen_dir', required=True) parser.add_argument('--root_dir', required=True) parser.add_argument('--js_files', nargs='*', required=True) args = parser.parse_args(argv) js_files = [os.path.join(args.root_dir, f) for f in args.js_files] node.RunNode([ node_modules.PathToTypescript(), '--declaration', '--allowJs', '--emitDeclarationOnly', '--removeComments', '--noResolve', '--rootDir', args.root_dir, '--outDir', args.gen_dir, ] + js_files)
def _bundle_v3(tmp_out_dir, in_path, out_path, manifest_out_path, args, excludes): if not os.path.exists(tmp_out_dir): os.makedirs(tmp_out_dir) path_to_plugin = os.path.join( os.path.abspath(_HERE_PATH), 'tools', 'rollup_plugin.js') rollup_config_file = _generate_rollup_config(tmp_out_dir, path_to_plugin, in_path, args.host, excludes) bundled_paths = [] for index, js_module_in_file in enumerate(args.js_module_in_files): js_out_file = args.js_out_files[index] rollup_js_out_file = '%s.rollup.js' % js_out_file[:-3] rollup_js_out_path = os.path.join(tmp_out_dir, rollup_js_out_file) node.RunNode( [node_modules.PathToRollup()] + [ '--format', 'esm', '--input', os.path.join(in_path, js_module_in_file), '--file', rollup_js_out_path, '--sourcemap', '--sourcemapExcludeSources', '--config', rollup_config_file, '--silent', ]) # Copy the HTML file and replace the script name. html_file = args.html_in_files[index] html_out_file = args.html_out_files[index] with open(os.path.join(in_path, html_file), 'r') as f: output = f.read() output = output.replace(js_module_in_file, js_out_file); with open(os.path.join(out_path, html_out_file), 'w') as f: f.write(output) f.close() # Create the manifest file from the sourcemap generated by rollup. _generate_manifest_file(rollup_js_out_file, tmp_out_dir, in_path, manifest_out_path) bundled_paths.append(rollup_js_out_path) return bundled_paths
def _build(in_folder, args): in_path = os.path.normpath(os.path.join(_CWD, in_folder)).replace('\\', '/') out_path = os.path.join(_CWD, args.out_folder).replace('\\', '/') request_list_path = _request_list_path(out_path, args.host_url) tmp_out_dir = tempfile.mkdtemp(dir=out_path).replace('\\', '/') excludes = _BASE_EXCLUDES + [ # This file is dynamically created by C++. Need to specify an exclusion # URL for both the relative URL and chrome:// URL syntax. 'strings.js', 'strings.m.js', '%s/strings.js' % args.host_url, '%s/strings.m.js' % args.host_url, ] excludes.extend(args.exclude or []) external_paths = args.external_paths or [] try: if args.js_module_in_files: bundled_paths = build(tmp_out_dir, in_path, out_path, request_list_path, args, excludes, external_paths) # Pass the JS files through Uglify and write the output to its final # destination. for index, js_out_file in enumerate(args.js_out_files): node.RunNode([ node_modules.PathToTerser(), os.path.join(tmp_out_dir, js_out_file), '--comments', '/Copyright|license|LICENSE|\<\/?if/', '--output', os.path.join(out_path, js_out_file) ]) finally: shutil.rmtree(tmp_out_dir) return request_list_path
def _optimize(in_folder, args): in_path = os.path.normpath(os.path.join(_CWD, in_folder)).replace('\\', '/') out_path = os.path.join(_CWD, args.out_folder).replace('\\', '/') manifest_out_path = _request_list_path(out_path, args.host) exclude_args = [] for f in args.exclude or []: exclude_args.append('--exclude') exclude_args.append(f) in_html_args = [] for f in args.html_in_files: in_html_args.append(f) tmp_out_dir = os.path.join(out_path, 'bundled').replace('\\', '/') node.RunNode( [node_modules.PathToBundler()] + _VULCANIZE_BASE_ARGS + _VULCANIZE_REDIRECT_ARGS + exclude_args + [# This file is dynamically created by C++. Need to specify an exclusion # URL for both the relative URL and chrome:// URL syntax. '--exclude', 'strings.js', '--exclude', 'chrome://%s/strings.js' % args.host, '--manifest-out', manifest_out_path, '--root', in_path, '--redirect', '"chrome://%s/|%s"' % (args.host, in_path + '/'), '--out-dir', os.path.relpath(tmp_out_dir, _CWD).replace('\\', '/'), '--shell', args.html_in_files[0], ] + in_html_args) for index, html_file in enumerate(args.html_in_files): with open( os.path.join(os.path.relpath(tmp_out_dir, _CWD), html_file), 'r') as f: output = f.read() # Grit includes are not supported, use HTML imports instead. output = output.replace('<include src="', '<include src-disabled="') if args.insert_in_head: assert '<head>' in output # NOTE(dbeam): polymer-bundler eats <base> tags after processing. This # undoes that by adding a <base> tag to the (post-processed) generated # output. output = output.replace('<head>', '<head>' + args.insert_in_head) # Open file again with 'w' such that the previous contents are overwritten. with open( os.path.join(os.path.relpath(tmp_out_dir, _CWD), html_file), 'w') as f: f.write(output) f.close() try: crisper_html_out_paths = [] for index, html_in_file in enumerate(args.html_in_files): crisper_html_out_paths.append( os.path.join(tmp_out_dir, args.html_out_files[index])) js_out_file = args.js_out_files[index] # Run crisper to separate the JS from the HTML file. node.RunNode([node_modules.PathToCrisper(), '--source', os.path.join(tmp_out_dir, html_in_file), '--script-in-head', 'false', '--html', crisper_html_out_paths[index], '--js', os.path.join(tmp_out_dir, js_out_file)]) if args.replace_for_html_imports_polyfill == js_out_file: # Replace the output file with a loader script, to wait until HTML # imports are ready before loading. with open(crisper_html_out_paths[index], 'r') as f: output = f.read() output = output.replace(js_out_file + '"', 'chrome://resources/js/crisper_loader.js"' + \ ' data-script-name="' + js_out_file + '"') # Preload the final script, even though it will not be evaluated # until after crisper_loader.js executes. output = output.replace('<head>', '<head><link rel="preload" href="' + \ js_out_file + '" as="script">') f.close() # Open file again with 'w' such that the previous contents are # overwritten. with open(crisper_html_out_paths[index], 'w') as f: f.write(output) f.close() # Pass the JS file through Uglify and write the output to its final # destination. node.RunNode([node_modules.PathToUglify(), os.path.join(tmp_out_dir, js_out_file), '--comments', '"/Copyright|license|LICENSE|\<\/?if/"', '--output', os.path.join(out_path, js_out_file)]) # Run polymer-css-build and write the output HTML files to their final # destination. html_out_paths = [ os.path.join(out_path, f) for f in args.html_out_files] node.RunNode([node_modules.PathToPolymerCssBuild()] + ['--polymer-version', '2'] + ['--no-inline-includes', '-f'] + crisper_html_out_paths + ['-o'] + html_out_paths) finally: shutil.rmtree(tmp_out_dir) return manifest_out_path
#!/usr/bin/env python import os.path as os_path import sys _HERE_PATH = os_path.dirname(os_path.realpath(__file__)) _SRC_PATH = os_path.normpath(os_path.join(_HERE_PATH, '..', '..', '..', '..')) old_sys_path = sys.path[:] sys.path.append(os_path.join(_SRC_PATH, 'third_party', 'node')) import node, node_modules SERVER_PATH = os_path.normpath( os_path.join(_HERE_PATH, '..', 'node_modules', 'http-server', 'bin', 'http-server')) if len(sys.argv) < 2: SERVER_PATH = os_path.normpath(os_path.join(_HERE_PATH, 'server.js')) print SERVER_PATH node.RunNode([SERVER_PATH]) elif sys.argv[1].isdigit(): print SERVER_PATH PORT = sys.argv[1] node.RunNode([SERVER_PATH, '-p', PORT, '.']) else: SERVER_PATH = sys.argv[1] print SERVER_PATH node.RunNode([SERVER_PATH])
def main(argv): parser = argparse.ArgumentParser() parser.add_argument('--deps', nargs='*') parser.add_argument('--gen_dir', required=True) parser.add_argument('--path_mappings', nargs='*') parser.add_argument('--root_dir', required=True) parser.add_argument('--out_dir', required=True) parser.add_argument('--tsconfig_base') parser.add_argument('--in_files', nargs='*') parser.add_argument('--definitions', nargs='*') parser.add_argument('--composite', action='store_true') args = parser.parse_args(argv) root_dir = os.path.relpath(args.root_dir, args.gen_dir) out_dir = os.path.relpath(args.out_dir, args.gen_dir) TSCONFIG_BASE_PATH = os.path.join(_HERE_DIR, 'tsconfig_base.json') tsconfig = collections.OrderedDict() tsconfig['extends'] = args.tsconfig_base \ if args.tsconfig_base is not None \ else os.path.relpath(TSCONFIG_BASE_PATH, args.gen_dir) tsconfig['compilerOptions'] = collections.OrderedDict() tsconfig['compilerOptions']['rootDir'] = root_dir tsconfig['compilerOptions']['outDir'] = out_dir if args.composite: tsbuildinfo_name = 'tsconfig.tsbuildinfo' tsconfig['compilerOptions']['composite'] = True tsconfig['compilerOptions']['declaration'] = True tsconfig['compilerOptions']['tsBuildInfoFile'] = tsbuildinfo_name tsconfig['files'] = [] if args.in_files is not None: # Source .ts files are always resolved as being relative to |root_dir|. tsconfig['files'].extend( [os.path.join(root_dir, f) for f in args.in_files]) if args.definitions is not None: tsconfig['files'].extend(args.definitions) # Handle custom path mappings, for example chrome://resources/ URLs. if args.path_mappings is not None: path_mappings = collections.defaultdict(list) for m in args.path_mappings: mapping = m.split('|') path_mappings[mapping[0]].append(os.path.join('./', mapping[1])) tsconfig['compilerOptions']['paths'] = path_mappings if args.deps is not None: tsconfig['references'] = [{'path': dep} for dep in args.deps] _write_tsconfig_json(args.gen_dir, tsconfig) # Delete any obsolete .ts files (from previous builds) corresponding to .js # |in_files| in the |root_dir| folder, as they would cause the following error # to be thrown: # # "error TS5056: Cannot write file '...' because it would be overwritten by # multiple input files." # # This can happen when a ts_library() is migrating JS to TS one file at a time # and a bot is switched from building a later CL to building an earlier CL. if args.in_files is not None: for f in args.in_files: [pathname, extension] = os.path.splitext(f) if extension == '.js': to_check = os.path.join(args.root_dir, pathname + '.ts') if os.path.exists(to_check): os.remove(to_check) node.RunNode([ node_modules.PathToTypescript(), '--project', os.path.join(args.gen_dir, 'tsconfig.json') ]) if args.composite: # `.tsbuildinfo` is generated by TypeScript for incremenetal compilation # freshness checks. Since GN already decides which ts_library() targets are # dirty, `.tsbuildinfo` is not needed for our purposes and is deleted. # # Moreover `.tsbuildinfo` can cause flakily failing builds since the TS # compiler checks the `.tsbuildinfo` file and sees that none of the source # files are changed and does not regenerate any output, without checking # whether output files have been modified/deleted, which can lead to bad # builds (missing files or picking up obsolete generated files). os.remove(os.path.join(args.gen_dir, tsbuildinfo_name)) if args.in_files is not None: with open(os.path.join(args.gen_dir, 'tsconfig.manifest'), 'w') \ as manifest_file: manifest_data = {} manifest_data['base_dir'] = args.out_dir manifest_data['files'] = \ [re.sub(r'\.ts$', '.js', f) for f in args.in_files] json.dump(manifest_data, manifest_file)
def _vulcanize(in_folder, args): in_path = os.path.normpath(os.path.join(_CWD, in_folder)) out_path = os.path.join(_CWD, args.out_folder) manifest_out_path = _request_list_path(out_path, args.host) exclude_args = [] for f in args.exclude or []: exclude_args.append('--exclude') exclude_args.append(f) in_html_args = [] for f in args.html_in_files: in_html_args.append('--in-html') in_html_args.append(f) tmp_out_dir = os.path.join(out_path, 'bundled') node.RunNode( [node_modules.PathToBundler()] + _VULCANIZE_BASE_ARGS + _VULCANIZE_REDIRECT_ARGS + exclude_args + [ # This file is dynamically created by C++. Need to specify an exclusion # URL for both the relative URL and chrome:// URL syntax. '--exclude', 'strings.js', '--exclude', 'chrome://%s/strings.js' % args.host, '--manifest-out', manifest_out_path, '--root', in_path, '--redirect', '"chrome://%s/|%s"' % (args.host, in_path), '--out-dir', os.path.relpath(tmp_out_dir, _CWD), '--shell', args.html_in_files[0], ] + in_html_args) for index, html_file in enumerate(args.html_in_files): with open(os.path.join(os.path.relpath(tmp_out_dir, _CWD), html_file), 'r') as f: output = f.read() # Grit includes are not supported, use HTML imports instead. output = output.replace('<include src="', '<include src-disabled="') if args.insert_in_head: assert '<head>' in output # NOTE(dbeam): polymer-bundler eats <base> tags after processing. This # undoes that by adding a <base> tag to the (post-processed) generated # output. output = output.replace('<head>', '<head>' + args.insert_in_head) # Open file again with 'w' such that the previous contents are overwritten. with open(os.path.join(os.path.relpath(tmp_out_dir, _CWD), html_file), 'w') as f: f.write(output) f.close() try: for index, html_in_file in enumerate(args.html_in_files): html_out_file = args.html_out_files[index] js_out_file = args.js_out_files[index] # Run crisper to separate the JS from the HTML file. node.RunNode([ node_modules.PathToCrisper(), '--source', os.path.join(tmp_out_dir, html_in_file), '--script-in-head', 'false', '--html', os.path.join(tmp_out_dir, html_out_file), '--js', os.path.join(tmp_out_dir, js_out_file) ]) # Move the HTML file to its final destination. shutil.copy(os.path.join(tmp_out_dir, html_out_file), out_path) # Pass the JS file through Uglify and write the output to its final # destination. node.RunNode([ node_modules.PathToUglify(), os.path.join(tmp_out_dir, js_out_file), '--comments', '"/Copyright|license|LICENSE|\<\/?if/"', '--output', os.path.join(out_path, js_out_file) ]) finally: shutil.rmtree(tmp_out_dir) return manifest_out_path