def run(self): need_normal_clean = True exclude_files = [] remove_files = [] remove_dirs = [] # remove also: DIRS: `build, dist, cover, *._pyxbld, *.egg-info` # and FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html if self.all: need_normal_clean = True for dir_ in {'build', 'dist', 'cover'}: dir_path = path_join(ROOT_PACKAGE_PATH, dir_) if path_exists(dir_path): remove_dirs.append(dir_path) for root, dirs, files in os_walk(ROOT_PACKAGE_PATH): for dir_ in dirs: if '_pyxbld' in dir_ or 'egg-info' in dir_: remove_dirs.append(path_join(root, dir_)) # remove FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html for root, dirs, files in os_walk(MAIN_PACKAGE_PATH): for file_ in files: if file_ not in exclude_files: if path_splitext(file_)[-1] in {'.so', '.c'}: remove_files.append(path_join(root, file_)) tmp_name, tmp_ext = path_splitext(file_) if tmp_ext == '.pyx': # Check if we have a html with the same name check_html_path = path_join(root, tmp_name + '.html') if isfile(check_html_path): remove_files.append(check_html_path) # do the general clean if need_normal_clean: for file_ in {'.coverage', 'MANIFEST'}: if path_exists(file_): remove_files.append(file_) for root, dirs, files in os_walk(ROOT_PACKAGE_PATH): for file_ in files: if file_ not in exclude_files: if path_splitext(file_)[-1] in {'.pyc', '.pyo', '.pyd', '.o', '.orig'}: remove_files.append(path_join(root, file_)) for dir_ in dirs: if '__pycache__' in dir_: remove_dirs.append(path_join(root, dir_)) # REMOVE ALL SELECTED # noinspection PyBroadException try: for file_ in remove_files: if path_exists(file_): os_remove(file_) for dir_ in remove_dirs: if path_exists(dir_): rmtree(dir_) except Exception: pass
def getfilepathextlist(_pathname: str) -> list: """Return the path, filename, and extension as a list >>> getfilepathextlist('/etc/mime.types') ['/etc', 'mime', '.types'] >>> getfilepathextlist('/bin/sh') ['/bin', 'sh', ''] """ return [path_split(_pathname)[0], path_splitext(path_split(_pathname)[1])[0], path_splitext(path_split(_pathname)[1])[1]]
def do_vcs_install(manifest_in, versionfile_source_, ipy): # noinspection PyPep8Naming GITS = ['git'] files = [manifest_in, versionfile_source_, ipy] try: me = __file__ if me.endswith('.pyc') or me.endswith('.pyo'): me = path_splitext(me)[0] + '.py' versioneer_file = path_relpath(me) except NameError: versioneer_file = 'versioneer.py' files.append(versioneer_file) present = False try: f = open('.gitattributes', 'r') for line in f.readlines(): if line.strip().startswith(versionfile_source_): if 'export-subst' in line.strip().split()[1:]: present = True f.close() except EnvironmentError: pass if not present: f = open('.gitattributes', 'a+') f.write('{} export-subst\n'.format(versionfile_source_)) f.close() files.append('.gitattributes') run_command(GITS, ['add', '--'] + files)
def test_all_imports_pyx(): """ Tests: test_all_imports_pyx: for rebuild, syntax correctness and internal imports """ print('::: TEST: test_all_imports_pyx()') remove_files = [] remove_dirs = [] all_modules_path = [] for root, dirnames, filenames in walk(ROOT_PACKAGE_PATH): all_modules_path.extend(glob(root + '/*.pyx')) for pyx_module_file_path in all_modules_path: module_filename = path_basename(pyx_module_file_path) module_filename_no_ext = path_splitext(module_filename)[0] cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path = build_cython_extension( pyx_module_file_path, cython_force_rebuild=True ) so_loader = ExtensionFileLoader(module_filename_no_ext, cython_extension_module_path) so_loader.load_module(module_filename_no_ext) # add for cleanup remove_files.append(cython_module_c_file_path) remove_dirs.append(cython_build_dir_path) # Cleanup try: for file_ in remove_files: if path_exists(file_): os_remove(file_) for dir_ in remove_dirs: if path_exists(dir_): rmtree(dir_) except Exception as err: raise Exception('test_all_imports_pyx', 'Could not cython_clean_up: Exception: <{}>'.format(err))
def getfilenameexttup(_pathname: str) -> tuple: """Return the filename (as a tuple) without path >>> getfilenameexttup('/etc/mime.types') ('mime', '.types') >>> getfilenameexttup('/bin/sh') ('sh', '') """ return path_splitext(path_split(_pathname)[1])
def convert_all(filelist, header, outdir): """ converts a list of excel files into .csv files in the specified outdir """ for xlfile in filelist: print(xlfile, end='') pre, ext = path_splitext(xlfile) if ext not in (".xlsx", ".xls"): print("invlaid filetype: " + ext) continue ooconv(xlfile, header, outdir)
def getfilename(_pathname: str) -> str: """Return the filename without path or extension The path and extension are removed from the given string >>> getfilename('/etc/mime.types') 'mime' >>> getfilename('/bin/sh') 'sh' """ return path_splitext(path_split(_pathname)[1])[0]
def build_cython_extension(py_or_pyx_file_path, cython_force_rebuild=True): """ Build a cython extension from a `.py` or `.pyx` file - build will be done in a sub-folder named `_pyxbld` in the py_or_pyx_file_path :param py_or_pyx_file_path: (str) path to a `.py` or `.pyx` file :param cython_force_rebuild: (bool) If True the cython extension is rebuild even if it was already build :return: (tuple) cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path """ module_dir = path_dirname(py_or_pyx_file_path) module__cython_name = path_splitext(path_basename(py_or_pyx_file_path))[0] cython_module_c_file_path = path_join(module_dir, module__cython_name + '.c') cython_build_dir_path = path_join(module_dir, '_pyxbld') args = ['--quiet', 'build_ext', '--build-lib', module_dir] if cython_force_rebuild: args.append('--force') dist = Distribution({'script_name': None, 'script_args': args}) dist.ext_modules = [ Extension(name=module__cython_name, sources=[py_or_pyx_file_path]) ] dist.cmdclass = {'build_ext': cython_build_ext} build = dist.get_command_obj('build') build.build_base = cython_build_dir_path try: dist.parse_command_line() except DistutilsArgError as err: raise Err('utils.build_cython_extension', [ 'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path), ' DistutilsArgError: <{}>'.format(err) ]) try: obj_build_ext = dist.get_command_obj('build_ext') dist.run_commands() cython_extension_module_path = obj_build_ext.get_outputs()[0] if path_dirname(py_or_pyx_file_path) != module_dir: raise Err('utils.build_cython_extension', [ 'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path), ' <module_dir> differs from final <cython_module_dir>', ' module_dir: <{}>'.format(module_dir), ' cython_module_dir: <{}>'.format( path_dirname(py_or_pyx_file_path)) ]) except Exception as err: raise Err('utils.build_cython_extension', [ 'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path), ' Exception: <{}>'.format(err) ]) return cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path
def _get_cache_name(filename): """ @param filename: filename to get cache name for @type filename: str @return: the name of the corresponding pickle cache @rtype: str """ _, tail = path_split(filename) name, _ = path_splitext(tail) cache = '\\'.join((pickle_cache, name + '.pickle')) return cache
def test_all_imports_py(): """ Tests: test_all_imports_py: for syntax correctness and internal imports """ print('::: TEST: test_all_imports_py()') all_modules_path = [] for root, dirnames, filenames in os_walk(ROOT_PACKAGE_PATH): all_modules_path.extend(glob(root + '/*.py')) for py_module_file_path in all_modules_path: module_filename = path_basename(py_module_file_path) module_filename_no_ext = path_splitext(module_filename)[0] py_loader = SourceFileLoader(module_filename_no_ext, py_module_file_path) py_loader.load_module(module_filename_no_ext)
def build_cython_extension(py_or_pyx_file_path, cython_force_rebuild=True): """ Build a cython extension from a `.py` or `.pyx` file - build will be done in a sub-folder named `_pyxbld` in the py_or_pyx_file_path :param py_or_pyx_file_path: (str) path to a `.py` or `.pyx` file :param cython_force_rebuild: (bool) If True the cython extension is rebuild even if it was already build :return: (tuple) cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path """ module_dir = path_dirname(py_or_pyx_file_path) module__cython_name = path_splitext(path_basename(py_or_pyx_file_path))[0] cython_module_c_file_path = path_join(module_dir, module__cython_name + '.c') cython_build_dir_path = path_join(module_dir, '_pyxbld') args = ['--quiet', 'build_ext', '--build-lib', module_dir] if cython_force_rebuild: args.append('--force') dist = Distribution({'script_name': None, 'script_args': args}) dist.ext_modules = [Extension(name=module__cython_name, sources=[py_or_pyx_file_path])] dist.cmdclass = {'build_ext': cython_build_ext} build = dist.get_command_obj('build') build.build_base = cython_build_dir_path try: dist.parse_command_line() except DistutilsArgError as err: raise Err('utils.build_cython_extension', [ 'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path), ' DistutilsArgError: <{}>'.format(err) ]) try: obj_build_ext = dist.get_command_obj('build_ext') dist.run_commands() cython_extension_module_path = obj_build_ext.get_outputs()[0] if path_dirname(py_or_pyx_file_path) != module_dir: raise Err('utils.build_cython_extension', [ 'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path), ' <module_dir> differs from final <cython_module_dir>', ' module_dir: <{}>'.format(module_dir), ' cython_module_dir: <{}>'.format(path_dirname(py_or_pyx_file_path)) ]) except Exception as err: raise Err('utils.build_cython_extension', [ 'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path), ' Exception: <{}>'.format(err) ]) return cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path
def clean(env): try: rmdir(env['APP_STATICMAX']) rm(env['APP_MAPPING_TABLE']) # Aggressive root level cleaning for f in os.listdir(env['APP_ROOT']): (filename, ext) = path_splitext(f) # Also cleans previous SDK content e.g. .jsinc if ext in ['.jsinc', '.tzjs', '.html']: rm(f) if ext == '.js': #Only remove canvas js files, might have js in root folder (appname, target) = path_splitext(filename) if target == '.canvas': rm(f) else: warning('[Warning] target %s unknown, ignoring. Not cleaned: %s' % (target, f)) except OSError as e: error('Failed to remove: %s' % str(e)) return False return True
def import_py_vars(pyfile): """ @param pyfile: pyfile to import @type pyfile: str @return: dict @rtype: dict[str, RecipeVariable] """ from os.path import split as path_split, splitext as path_splitext from sys import path as sys_path from importlib import import_module py_vars_dir, py_vars_name = path_split(pyfile) py_vars_name, ext = path_splitext(py_vars_name) sys_path.append(py_vars_dir) var_module = import_module(py_vars_name) return {k : v for k, v in var_module.__dict__.items() if not k.startswith("_")}
def build_asset(src, dest, env, options): (_, ext) = path_splitext(src) try: tool = env['TOOLS'].get(ext, None) if tool: tool.build(env, options, src, dest) elif ext in env['COPY_EXTENSIONS']: copyfile(src, dest) else: warning('No tool for: %s (skipping)' % src) return False except CalledProcessError as e: error('Command failed: %s' % e) return False else: return True
def no_cythonize(extensions): """ Helper: based on https://github.com/ryanvolz/radarmodel(Copyright (c) 2014, Ryan Volz) """ dupextensions = deepcopy(extensions) for extension in dupextensions: sources = [] for sfile in extension.sources: path, ext = path_splitext(sfile) if ext in ('.pyx', '.py'): if extension.language == 'c++': ext = '.cpp' else: ext = '.c' sfile = path + ext sources.append(sfile) extension.sources[:] = sources return dupextensions
def _path_splitter(s, _d_match=re.compile(r'\.\d').match): """Split a string into its path components. Assumes a string is a path.""" # If a PathLib Object, use it's functionality to perform the split. if has_pathlib and isinstance(s, PurePath): s = py23_str(s) path_parts = deque() p_appendleft = path_parts.appendleft # Continue splitting the path from the back until we have reached # '..' or '.', or until there is nothing left to split. path_location = s while path_location != os_curdir and path_location != os_pardir: parent_path = path_location path_location, child_path = path_split(parent_path) if path_location == parent_path: break p_appendleft(child_path) # This last append is the base path. # Only append if the string is non-empty. if path_location: p_appendleft(path_location) # Now, split off the file extensions using a similar method to above. # Continue splitting off file extensions until we reach a decimal number # or there are no more extensions. # We are not using built-in functionality of PathLib here because of # the recursive splitting up to a decimal. base = path_parts.pop() base_parts = deque() b_appendleft = base_parts.appendleft while True: front = base base, ext = path_splitext(front) if _d_match(ext) or not ext: # Reset base to before the split if the split is invalid. base = front break b_appendleft(ext) b_appendleft(base) # Return the split parent paths and then the split basename. return ichain(path_parts, base_parts)
def test_all_py_to_cython_compiled(): """ Tests: test_all_py_to_cython_compiled: for syntax correctness and internal imports: all .py files compiled with cython: except '__init__' """ print('::: TEST: test_all_py_to_cython_compiled()') remove_files = [] remove_dirs = [] all_modules_path = [] for root, dirnames, filenames in walk(ROOT_PACKAGE_PATH): all_modules_path.extend(glob(root + '/*.py')) for py_module_file_path in all_modules_path: module_filename = path_basename(py_module_file_path) module_filename_no_ext = path_splitext(module_filename)[0] if '__init__' in module_filename: continue cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path = build_cython_extension( py_module_file_path, cython_force_rebuild=True ) # noinspection PyUnusedLocal so_loader = ExtensionFileLoader(module_filename_no_ext, cython_extension_module_path) # sometimes (if a extension module is build previously) the loading does not work with 'nose tests' # so_loader.load_module(module_filename_no_ext) # add for cleanup : inclusive the .so extension file remove_files.extend([cython_module_c_file_path, cython_extension_module_path]) remove_dirs.append(cython_build_dir_path) # Cleanup try: for file_ in remove_files: if path_exists(file_): os_remove(file_) for dir_ in remove_dirs: if path_exists(dir_): rmtree(dir_) except Exception as err: raise Exception('test_all_py_to_cython_compiled', 'Could not cython_clean_up: Exception: <{}>'.format(err))
def run(self): need_normal_clean = True exclude_files = [ 'lconf_classes.c', 'structure_classes.c', 'main_code.c', 'transform.c', 'utilities.c', 'validator.c', '_version.c', ] remove_files = [] remove_dirs = [] # remove also: DIRS: `build, dist, cover, *._pyxbld, *.egg-info` # and FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html if self.all: need_normal_clean = True for dir_ in {'build', 'dist', 'cover'}: dir_path = path_join(ROOT_PACKAGE_PATH, dir_) if path_exists(dir_path): remove_dirs.append(dir_path) for root, dirs_w, files_w in os_walk(ROOT_PACKAGE_PATH): for dir_ in dirs_w: if '_pyxbld' in dir_ or 'egg-info' in dir_: remove_dirs.append(path_join(root, dir_)) # remove FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html for root, dirs_w, files_w in os_walk(MAIN_PACKAGE_PATH): for file_ in files_w: if file_ not in exclude_files: if path_splitext(file_)[-1] in {'.so', '.c'}: remove_files.append(path_join(root, file_)) tmp_name, tmp_ext = path_splitext(file_) if tmp_ext == '.pyx': # Check if we have a html with the same name check_html_path = path_join(root, tmp_name + '.html') if path_isfile(check_html_path): remove_files.append(check_html_path) # remove also: all files defined in exclude_files if self.excludefiles: for root, dirs_w, files_w in os_walk(MAIN_PACKAGE_PATH): for file_ in files_w: if file_ in exclude_files: remove_files.append(path_join(root, file_)) # do the general clean if need_normal_clean: for file_ in {'.coverage', 'MANIFEST'}: if path_exists(file_): remove_files.append(file_) for root, dirs_w, files_w in os_walk(ROOT_PACKAGE_PATH): for file_ in files_w: if file_ not in exclude_files: if path_splitext(file_)[-1] in {'.pyc', '.pyo', '.pyd', '.o', '.orig'}: remove_files.append(path_join(root, file_)) for dir_ in dirs_w: if '__pycache__' in dir_: remove_dirs.append(path_join(root, dir_)) # REMOVE ALL SELECTED try: for file_ in remove_files: if path_exists(file_): os_remove(file_) for dir_ in remove_dirs: if path_exists(dir_): shutil_rmtree(dir_) except Exception: pass
def run(self): need_normal_clean = True exclude_files = [ 'benchmark_it.c', 'disassemble_it.c', 'line_memory_profile_it.c', 'profile_it.c', 'speed_it.c', 'utils.c', '_version.c', ] remove_files = [] remove_dirs = [] # remove ONLY: `build/sphinx` if self.onlydocs: need_normal_clean = False dir_path = path_join(ROOT_PACKAGE_PATH, 'build', 'sphinx') if path_exists(dir_path): remove_dirs.append(dir_path) # remove also: DIRS: `build, dist, cover, *.egg-info, *._pyxbld` # and FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html if self.all: need_normal_clean = True for dir_ in {'build', 'dist', 'cover'}: dir_path = path_join(ROOT_PACKAGE_PATH, dir_) if path_exists(dir_path): remove_dirs.append(dir_path) for root, dirs_w, files_w in os_walk(ROOT_PACKAGE_PATH): for dir_ in dirs_w: if '_pyxbld' in dir_ or 'egg-info' in dir_: remove_dirs.append(path_join(root, dir_)) # remove FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html for root, dirs_w, files_w in os_walk(MAIN_PACKAGE_PATH): for file_ in files_w: if file_ not in exclude_files: if path_splitext(file_)[-1] in {'.so', '.c'}: remove_files.append(path_join(root, file_)) tmp_name, tmp_ext = path_splitext(file_) if tmp_ext == '.pyx': # Check if we have a html with the same name check_html_path = path_join(root, tmp_name + '.html') if path_isfile(check_html_path): remove_files.append(check_html_path) # remove also: all files defined in exclude_files if self.excludefiles: for root, dirs_w, files_w in os_walk(MAIN_PACKAGE_PATH): for file_ in files_w: if file_ in exclude_files: remove_files.append(path_join(root, file_)) # do the general clean if need_normal_clean: for file_ in {'.coverage', 'MANIFEST'}: if path_exists(file_): remove_files.append(file_) for root, dirs_w, files_w in os_walk(ROOT_PACKAGE_PATH): for file_ in files_w: if file_ not in exclude_files: if path_splitext(file_)[-1] in {'.pyc', '.pyo', '.pyd', '.o', '.orig'}: remove_files.append(path_join(root, file_)) for dir_ in dirs_w: if '__pycache__' in dir_: remove_dirs.append(path_join(root, dir_)) # REMOVE ALL SELECTED # noinspection PyBroadException try: for file_ in remove_files: if path_exists(file_): os_remove(file_) for dir_ in remove_dirs: if path_exists(dir_): shutil_rmtree(dir_) except Exception: pass
def build_code(src, dst, env, options): input = path_basename(src) appname, _ = path_splitext(input) code = '%s.canvas.js' % appname tzjs = '%s.tzjs' % appname dependency_file = '%s.deps' % src templates_dirs = [env['APP_ROOT'], env['APP_TEMPLATES'], env['APP_JSLIB']] for t in templates_dirs: template = path_join(t, '%s.html' % appname) if path_exists(template): template = path_basename(template) break else: template = None if dst.endswith('.canvas.debug.html'): env['MAKEHTML'].build(env, options, input=input, output=dst, mode='canvas-debug', templates=templates_dirs, template=template) elif dst.endswith('.canvas.release.html'): env['MAKEHTML'].build(env, options, input=input, output=dst, mode='canvas', code=code, templates=templates_dirs, template=template) elif dst.endswith('.canvas.default.debug.html'): env['MAKEHTML'].build(env, options, input=input, output=dst, mode='canvas-debug', templates=templates_dirs) elif dst.endswith('.canvas.default.release.html'): env['MAKEHTML'].build(env, options, input=input, output=dst, mode='canvas', code=code, templates=templates_dirs) elif dst.endswith('.canvas.js'): if options.closure: env['MAKETZJS'].build(env, options, input=input, output=dst, mode='canvas', MF=dependency_file, templates=templates_dirs) google_compile(dependency_file, dst, options.closure) else: env['MAKETZJS'].build(env, options, input=input, output=dst, mode='canvas', templates=templates_dirs) elif dst.endswith('.debug.html'): env['MAKEHTML'].build(env, options, input=input, output=dst, mode='plugin-debug', templates=templates_dirs, template=template) elif dst.endswith('.release.html'): env['MAKEHTML'].build(env, options, input=input, output=dst, mode='plugin', code=tzjs, templates=templates_dirs, template=template) elif dst.endswith('.default.debug.html'): env['MAKEHTML'].build(env, options, input=input, output=dst, mode='plugin-debug', templates=templates_dirs) elif dst.endswith('.default.release.html'): env['MAKEHTML'].build(env, options, input=input, output=dst, mode='plugin', code=tzjs, templates=templates_dirs) elif dst.endswith('.tzjs'): if env['SDK_VERSION'] < StrictVersion('0.19.0'): run_js2tzjs({ 'inputs': [src], 'outputs': [dst], 'env': env, 'options': options }) else: env['MAKETZJS'].build(env, options, input=input, output=dst, mode='plugin', yui=options.yui, templates=templates_dirs) elif dst.endswith('.jsinc'): run_js2tzjs_jsinc({ 'inputs': [src], 'outputs': [dst], 'env': env, 'options': options }) else: return False return True
def main(): parser = OptionParser() parser.add_option('--clean', action='store_true', default=False, help="Clean build output") parser.add_option('--assets', action='store_true', default=False, help="Build assets") parser.add_option('--code', action='store_true', default=False, help="Build code") parser.add_option('--all', action='store_true', default=False, help="Build everything") parser.add_option('--find-non-ascii', action='store_true', default=False, help="Searches for non ascii characters in the scripts") parser.add_option('--template', dest='templateName', help="Specify the template to build") parser.add_option('--closure', default=None, help="Path to Closure") parser.add_option('--yui', default=None, help="Path to YUI") parser.add_option('--threads', default=4, help="Number of threads to use") parser.add_option('--verbose', action='store_true', help="Prints additional information about the build process") (options, args) = parser.parse_args() if options.verbose: logging_config(level='INFO', format='[%(levelname)s %(module)s@%(lineno)d] %(message)s') else: logging_config(format='[%(levelname)s] %(message)s') env = {} _log_stage('CONFIGURING') if not configure(env, options): error('Failed to configure build') return 1 if options.find_non_ascii: _log_stage('NON-ASCII CHARACTERS') count = find_non_ascii(env['APP_SCRIPTS'], env) if count > 0: error("Found non-ascii character in script") else: info("Only ASCII found!") return count if options.clean: _log_stage('CLEANING') success = clean(env) if not success: error('Failed to clean build') return 1 else: info('Cleaned') if options.assets or options.all: _log_stage("ASSET BUILD (may be slow - only build code with --code)") # Mapping table mkdir('staticmax') (mapping_table_obj, build_deps) = gen_mapping('assets', 'staticmax', ['.pdf', '.mtl', '.otf', '.txt', '.cgh', '.mb']) debug('assets:src:%s' % build_deps) urn_mapping = mapping_table_obj['urnmapping'] def _write_mapping_table(): print '%i assets -> %s' % (len(urn_mapping), env['MAPPING_TABLE']) with open(env['APP_MAPPING_TABLE'], 'w') as f: json_dump(mapping_table_obj, f, separators=(',', ':')) # Write mapping table _write_mapping_table() longest = len(max(build_deps, key=len)) + 2 def _log(src, dest, skipping=False): msg = '(skipping) ' if skipping else '' print '{0:-<{longest}}> {2}{1}'.format(src + ' ', dest, msg, longest=longest) metrics = dict(built=0, skipped=0, failed=0) def build(src): dest = build_deps[src] if path_exists(dest): _log(src, dest, True) metrics['skipped'] += 1 else: _log(src, dest) success = build_asset(src, dest, env, options) if not success: # Bit of a hack to remove the failed asset from the mapping table. asset = src[len('assets/'):] del urn_mapping[asset] info('Removing asset from mapping table: %s' % asset) metrics['failed'] += 1 else: metrics['built'] += 1 class Builder(Thread): def __init__(self, assets): Thread.__init__(self) self.assets = assets def run(self): map(build, self.assets) assets = build_deps.keys() num_threads = int(options.threads) threads = [Builder(assets[i::num_threads]) for i in range(num_threads)] for t in threads: t.start() for t in threads: t.join() else: del threads # Write mapping table _write_mapping_table() _log_stage("BUILT: %i - SKIPPED: %i - FAILED: %i" % (metrics['built'], metrics['skipped'], metrics['failed'])) if options.code or options.all: _log_stage('CODE BUILD') if options.templateName: code_files = ['%s.js' % path_join('templates', options.templateName)] else: code_files = glob('templates/*.js') debug("code:src:%s" % code_files) for src in code_files: (code_base, code_ext) = path_splitext(path_split(src)[1]) code_dests = [ code_base + ".canvas.debug.html", code_base + ".canvas.release.html", code_base + ".canvas.default.debug.html", code_base + ".canvas.default.release.html", code_base + ".canvas.js", code_base + ".debug.html", code_base + ".release.html", code_base + ".default.debug.html", code_base + ".default.release.html", code_base + ".tzjs" ] debug("code:dest:%s" % code_dests) for dest in code_dests: print '%s -> %s' % (src, dest) success = build_code(src, dest, env, options) if not success: warning('failed') _log_stage('END') return 0
def get_ext_filename(self, ext_name): return path_splitext(build_ext.get_ext_filename(self, ext_name))[0]
def path_splitter(s, _d_match=re.compile(r"\.\d").match): """ Split a string into its path components. Assumes a string is a path or is path-like. Parameters ---------- s : str | pathlib.Path Returns ------- split : tuple The path split by directory components and extensions. Examples -------- >>> tuple(path_splitter("this/thing.ext")) ({u}'this', {u}'thing', {u}'.ext') """ if has_pathlib and isinstance(s, PurePath): s = py23_str(s) path_parts = deque() p_appendleft = path_parts.appendleft # Continue splitting the path from the back until we have reached # '..' or '.', or until there is nothing left to split. path_location = s while path_location != os_curdir and path_location != os_pardir: parent_path = path_location path_location, child_path = path_split(parent_path) if path_location == parent_path: break p_appendleft(child_path) # This last append is the base path. # Only append if the string is non-empty. # Make sure the proper path separator for this OS is used # no matter what was actually given. if path_location: p_appendleft(py23_str(os_sep)) # Now, split off the file extensions using a similar method to above. # Continue splitting off file extensions until we reach a decimal number # or there are no more extensions. # We are not using built-in functionality of PathLib here because of # the recursive splitting up to a decimal. base = path_parts.pop() base_parts = deque() b_appendleft = base_parts.appendleft while True: front = base base, ext = path_splitext(front) if _d_match(ext) or not ext: # Reset base to before the split if the split is invalid. base = front break b_appendleft(ext) b_appendleft(base) # Return the split parent paths and then the split basename. return ichain(path_parts, base_parts)
def run(self): need_normal_clean = True exclude_files = [ 'lconf_classes.c', 'lconf_structure_classes.c', 'main_code.c', 'transform.c', 'utils.c', 'validator.c', '_version.c', ] remove_files = [] remove_dirs = [] # remove ONLY: `build/sphinx` if self.onlydocs: need_normal_clean = False dir_path = path_join(ROOT_PACKAGE_PATH, 'build', 'sphinx') if path_exists(dir_path): remove_dirs.append(dir_path) # remove also: DIRS: `build, dist, cover, *._pyxbld, *.egg-info` # and FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html if self.all: need_normal_clean = True for dir_ in {'build', 'dist', 'cover'}: dir_path = path_join(ROOT_PACKAGE_PATH, dir_) if path_exists(dir_path): remove_dirs.append(dir_path) for root, dirs_w, files_w in os_walk(ROOT_PACKAGE_PATH): for dir_ in dirs_w: if '_pyxbld' in dir_ or 'egg-info' in dir_: remove_dirs.append(path_join(root, dir_)) # remove FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html for root, dirs_w, files_w in os_walk(MAIN_PACKAGE_PATH): for file_ in files_w: if file_ not in exclude_files: if path_splitext(file_)[-1] in {'.so', '.c'}: remove_files.append(path_join(root, file_)) tmp_name, tmp_ext = path_splitext(file_) if tmp_ext == '.pyx': # Check if we have a html with the same name check_html_path = path_join( root, tmp_name + '.html') if path_isfile(check_html_path): remove_files.append(check_html_path) # remove also: all files defined in exclude_files if self.excludefiles: for root, dirs_w, files_w in os_walk(MAIN_PACKAGE_PATH): for file_ in files_w: if file_ in exclude_files: remove_files.append(path_join(root, file_)) # do the general clean if need_normal_clean: for file_ in {'.coverage', 'MANIFEST'}: if path_exists(file_): remove_files.append(file_) for root, dirs_w, files_w in os_walk(ROOT_PACKAGE_PATH): for file_ in files_w: if file_ not in exclude_files: if path_splitext(file_)[-1] in { '.pyc', '.pyo', '.pyd', '.o', '.orig' }: remove_files.append(path_join(root, file_)) for dir_ in dirs_w: if '__pycache__' in dir_: remove_dirs.append(path_join(root, dir_)) # REMOVE ALL SELECTED # noinspection PyBroadException try: for file_ in remove_files: if path_exists(file_): os_remove(file_) for dir_ in remove_dirs: if path_exists(dir_): shutil_rmtree(dir_) except Exception: pass