def __init__(self, path, recurse_filter=skip_vcs, visit_filter=only_cxx_files): path = find_root(path) # Traverse codebase to enumerate files that need processing. self.root = ioutil.norm_folder(path) self.by_folder = {} self.by_ext = {} self._discover(recurse_filter, visit_filter)
def main(argv): parser = _define_options() options, args = parser.parse_args(argv) errors = [] if args: errors.append('Unrecognized args: ' + ' '.join(args)) for x in 'dest|src|dep'.split('|'): err = _require(options, x) if err: errors.append(err) if errors: for err in errors: print(err) print('\nTry --help.') return 1 if options.skipdep: options.skipdep = pydep.RegexFilter(options.skipdep) if options.skipsrc: options.skipsrc = pydep.RegexFilter(options.skipsrc) sources = [ioutil.norm_folder(x) for x in options.src.split(',')] options.dest = ioutil.norm_folder(options.dest) depinfo = pydep.get_deps(sources, options.dep.split(','), options.skipsrc, options.skipdep) #depinfo.show() for module in depinfo.get_all_module_names(): path = depinfo.path_for_module(module) relpath = module + '.py' if not options.flatten: for src in sources: if path.startswith(src): relpath = path[len(src):] break target = os.path.join(options.dest, relpath) folder = os.path.dirname(target) if options.dry_run: print('%s --> %s' % (path, target)) else: if not os.path.isdir(folder): os.makedirs(folder) shutil.copy2(path, target) if options.flatten: flatten(target, depinfo.get_all_module_names())
def __init__(self, path, norecurse=None, novisit=None): if norecurse: norecurse = RecurseFilter(norecurse) else: norecurse = is_vcs_or_test_folder if novisit: novisit_regex = re.compile(novisit, re.IGNORECASE) novisit = lambda fname: bool(novisit_regex.match(fname)) # Traverse codebase to enumerate files that need processing. self.root = ioutil.norm_folder(path) self.by_folder = {} self.by_ext = {} self._discover(norecurse, novisit)
def remove(self): if self.persist: print('Leaving %s behind for analysis.' % self.path) else: if self.debug: print('Removing %s' % self.path) #print('Removing %s' % self.path) # Make sure we're nowhere inside the runnable assembly's path when we # remove it. cwd = ioutil.norm_folder(os.getcwd()) #print('path = %s; cwd = %s' % (self.path, cwd)) if cwd.startswith(self.path): os.chdir(os.path.abspath(os.path.join(self.path, '..'))) self._rmdir(self.path)
def __init__(self, root, testing=False): self._svnInfo = None self.trans = {} self.src = {} self.byLocale = {} self.pathsByComponent = {} self.possibleMisses = {} self.formatErrors = {} root = ioutil.norm_folder(root) self.conf = metadata.Conf(root, report=False) if type(root) == _STR_TYPE: root = unicode(root, "utf-8") self.root = root if not testing: fileCount, folderCount = metadata.visit(self.root, visitor=self, recurser=self, report=False) self._connect()
def __init__(self, root, testing=False): self._svnInfo = None self.trans = {} self.src = {} self.byLocale = {} self.pathsByComponent = {} self.possibleMisses = {} self.formatErrors = {} root = ioutil.norm_folder(root) self.conf = metadata.Conf(root, report=False) if type(root) == _STR_TYPE: root = unicode(root, 'utf-8') self.root = root if not testing: fileCount, folderCount = metadata.visit(self.root, visitor=self, recurser=self, report=False) self._connect()
def _discover(self, recurse_filter, visit_filter): for root, dirs, files in os.walk(self.root): root = ioutil.norm_folder(root) relative_root = root[len(self.root):] items = [] for d in dirs[:]: if recurse_filter and (not recurse_filter(relative_root + d)): dirs.remove(d) else: items.append(ioutil.norm_seps(relative_root + d, trailing=True)) for f in files: if C_EXTS_PAT.match(f): if (not visit_filter) or visit_filter(f): fname, ext = os.path.splitext(f) if not ext in self.by_ext: self.by_ext[ext] = [] self.by_ext[ext].append(relative_root + f) items.append(relative_root + f) self.by_folder[relative_root] = items
def __init__(self, comp, path, sb=None, debug=False): self.debug = debug if sb is None: sb = sandbox.current if not comp in sb.get_on_disk_components(): raise Exception('%s is not a component in %s.' % (comp, sb.get_name())) path = ioutil.norm_folder(path) self.comp = comp self.sb = sb self.path = path if not os.path.isdir(path): self.assemble() self.persist = False self.lockdir = os.path.join( tempfile.gettempdir(), 'sadm_lock' ) if not os.path.isdir( self.lockdir ): try: os.makedirs( self.lockdir ) except OSError: pass self.locks = {}
def export(self, folder): if os.path.exists(folder): assert (os.path.isdir(folder)) path = ioutil.norm_folder(folder) + self.getBatchName() + '/' print('exporting to %s' % path) if os.path.exists(path): ioutil.nuke(path) os.makedirs(path) for component in self.pathsByComponent: locales = self.getTargetedLocales(component) pathPats = self.pathsByComponent[component] for paths in pathPats: pathPat = paths[1] for loc in locales: if loc != 'en': relpath = pathPat % loc fullpath = path + relpath fldr = os.path.dirname(fullpath) if not os.path.exists(fldr): os.makedirs(fldr) self.exportFile(fullpath, relpath) self.zip(folder)
def export(self, folder): if os.path.exists(folder): assert os.path.isdir(folder) path = ioutil.norm_folder(folder) + self.getBatchName() + "/" print ("exporting to %s" % path) if os.path.exists(path): ioutil.nuke(path) os.makedirs(path) for component in self.pathsByComponent: locales = self.getTargetedLocales(component) pathPats = self.pathsByComponent[component] for paths in pathPats: pathPat = paths[1] for loc in locales: if loc != "en": relpath = pathPat % loc fullpath = path + relpath fldr = os.path.dirname(fullpath) if not os.path.exists(fldr): os.makedirs(fldr) self.exportFile(fullpath, relpath) self.zip(folder)
def get_container(self): """ Returns a fully qualified path to the folder that contains this sandbox, and possibly others that are siblings. """ return ioutil.norm_folder(os.path.abspath(self.get_root() + ".."))
def get_container(self): ''' Returns a fully qualified path to the folder that contains this sandbox, and possibly others that are siblings. ''' return ioutil.norm_folder(os.path.abspath(self.get_root() + '..'))
def get_deps(rough_sources, rough_dependent_items, rough_source_filter=None, rough_dependent_filter=None): ''' Get direct and indirect dependencies of all enumerated items on python modules in sources folders. @param rough_sources A folder, or a list of folders, that contain modules of interest (that might or might not be depended *on*). @param rough_dependent_items One or more items that have dependencies on the source. These items can be specific files, folders, or python module names within the source. They serve as the starting point of the dependency analysis. Can be either a sequence or a string. @param rough_source_filter A callable that takes a path to a python module. The module is a potential source module, and will be included in our analysis (though not necessarily in the final dependency graph) unless the callable returns False. Example of use: Suppose a folder of python code contains a subdirectory of unit tests that should be ignored in transitive dependency analysis. You could exclude this subdirectory with rough_source_filter. @param rough_dependent_filter A callable that takes a path to a python module. The module is a candidate for analysis to decide if it depends on any source modules. The callable returns True if the file should be included in our analysis and False if not. Example of use: suppose python folder D (dependent) depends somewhat on files in python folder S (source). Suppose further that most dependencies are run-time dependencies, but 2 scripts in D have build- time dependencies instead. If you only wanted a picture of run-time dependencies, you could pass D as one of the rough_dependent_items to this function, and use rough_dependent_filter to exclude the 2 scripts with build-time dependencies. Return a DependencyInfo named tuple: .module_info_by_module_name = dict of depended-on-module-name --> ModuleInfo named tuple: .full_path = path to depended-on-module .dependent_files = full paths of files dependent on the module .module_names_by_importer = dict of full-path-of-dependent-file --> depended-on module name Note that although DependencyInfo is a read-only, named tuple, it has a number of useful methods; it is not just a raw data container. See top of module for details. ''' sources = rough_sources # Allow a single string/unicode as sources as well as a list. if hasattr(sources, 'lower'): #string or unicode sources = [sources] if not sources: raise Exception('Must specify at least one folder of python source.') dependent_items = rough_dependent_items # Allow a single string/unicode as dependent_items as well as a list. if hasattr(sources, 'lower'): #string or unicode dependent_items = [dependent_items] if rough_dependent_filter: dependent_items = [di for di in dependent_items if rough_dependent_filter(di)] if not dependent_items: raise Exception('Must specify at least one folder, file, or module name to start the dependency analysis.') if _debug: print('sources = %s\n' % str(sources)) print('dependent_items = %s\n' % str(dependent_items)) sources = [os.path.abspath(src) for src in sources] bad = [] for src in sources: if not os.path.isdir(src): bad.append(src) if bad: raise Exception('The following source items are not folders:\n ' + '\n '.join(bad)) sources = [ioutil.norm_seps(src, trailing=True) for src in sources] possible_modules = find_all_modules(sources) if _debug: print('possible modules = %s\n' % ', '.join(sorted(possible_modules.keys()))) if rough_source_filter: for key in possible_modules.keys()[:]: if not rough_source_filter(possible_modules[key]): del(possible_modules[key]) start_folders = [di for di in dependent_items if os.path.isdir(di)] start_files = [di for di in dependent_items if os.path.isfile(di)] start_modules = [di for di in dependent_items if di not in start_folders and di not in start_files] bad = [x for x in start_modules if x not in possible_modules] if bad: raise Exception('The following start dependencies are neither folders, files, nor python modules:\n ' + '\n '.join(bad)) start_folders = [ioutil.norm_folder(sf) for sf in start_folders] start_files = [ioutil.norm_seps(os.path.abspath(sf)) for sf in start_files] # Guarantee uniqueness. Shouldn't be a problem unless someone was careless # on cmdline -- but just in case... modules = list(set(start_modules[:])) module_names_by_importer = {} dependent_files_by_module_name = {} for m in start_modules: expand(modules, possible_modules[m], possible_modules, module_names_by_importer, dependent_files_by_module_name) for sf in start_folders: for folder, dirs, files in os.walk(sf): for f in files: if f.endswith('.py'): start_files.append(ioutil.norm_seps(os.path.abspath(os.path.join(folder, f)))) for sf in start_files: if (not rough_dependent_filter) or rough_dependent_filter(sf): # If we haven't already analyzed a particular start file because we # saw it while expanding dependencies of something in start_modules... if sf not in module_names_by_importer: expand(modules, sf, possible_modules, module_names_by_importer, dependent_files_by_module_name) # If this file is in one of the sources directories, then include it # as a depended-on file. Otherwise, we just treat the file as a # source of dependencies, but not a depended on file itself. folder, fname = os.path.split(sf) #print('split yielded %s, %s' % (folder, fname)) #print('sources = %s' % sources) if is_in_sources(folder, sources): module, ext = os.path.splitext(fname) if module not in dependent_files_by_module_name: modules.append(module) dependent_files_by_module_name[module] = [] if '' not in module_names_by_importer: module_names_by_importer[''] = [] module_names_by_importer[''].append(module) # Convert data to output format. mibmn = {} for name in modules: mi = ModuleInfo(possible_modules[name], dependent_files_by_module_name.get(name, [])) mibmn[name] = mi return DependencyInfo(mibmn, module_names_by_importer)
def is_in_sources(folder, sources): folder = ioutil.norm_folder(folder) for src in sources: if folder.startswith(src): return True return False