def add(self, import_): try: raise NotImplementedError() except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def _initialize(self, fnp_config): """--init option handling""" try: fnp_config = fnp_config or os.path.join(self.workdir, self.FN_CONFIG) if os.path.isfile(fnp_config): print( "pip-stripper configuration file exists already at @ %s. leaving it alone" % (fnp_config)) return fnp_config # load the template file fnp_template = os.path.join(self.DN, "templates/pip-stripper.yaml") with open(fnp_template) as fi: tmpl = fi.read() seed = fill_template(tmpl, self) with open(fnp_config, "w") as fo: fo.write(seed) print("pip-stripper configuration generated @ %s" % (fnp_config)) return fnp_config except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def run(self): try: # raise NotImplementedError("%s.run(%s)" % (self, locals())) self.s_stdlib = self.mgr.s_stdlib for line in self.mgr.raw_imports: filebucket, packagename = self.parse(line) if packagename in self.s_stdlib: if self.mgr.options.verbose: logger.info("%s in std lib" % (packagename)) continue if packagename in self.s_untracked: if self.mgr.options.verbose: logger.info("%s in std lib" % (packagename)) continue self.packagetracker.add_import(packagename, filebucket.bucket.name) except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def __init__(self, mgr): try: self.mgr = mgr self.config = self.mgr.config.get(self.__class__.__name__) self.buckets = [] self.di_bucket = {} self.s_untracked = set(self.mgr.config.get("untracked", [])) bucketnames_tracker = self.config["buckets"]["precedence"] self.packagetracker = PackageBucketTracker(self, bucketnames_tracker) self.workdir = self.mgr.workdir self.patre_splitline = re.compile(self.config["pattern_splitline"]) for key, li_pattern in self.config["regex_dirs"].items(): bucket = DirectoryPartitionerBucket(key) for pattern in li_pattern: bucket.add(pattern) self.di_bucket[key] = bucket self.buckets.append(bucket) default_bucket_name = self.config["default_bucket"] self.default_bucket = self.di_bucket[default_bucket_name] except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def add_import(self, packagename, bucketname): try: bucketname_prev = self.di_packagename.get(packagename) if not bucketname_prev: # first time, just put it in self.di_packagename[packagename] = bucketname return if bucketname == bucketname_prev: # same as before, nothing to do return index_new = self.di_bucketindex[bucketname] index_old = self.di_bucketindex[bucketname_prev] if index_new < index_old: # higher precendence for new (ex: prod beats dev) self.di_packagename[packagename] = bucketname if rpdb(): pdb.set_trace() except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def process(self): try: if self.scan: self.scanner = Scanner(self) self.scanner.run() self.import_classifier.run() for name in self.import_classifier.packagetracker.di_packagename: self.matcher.imp.feed(name) pips = self.pip_classifier = ClassifierPip(self) for set_ in pips.di_bucket.values(): [self.matcher.pip.feed(name) for name in set_] pips.run(self.import_classifier.packagetracker) # for name in self.li_pip: self.scanwriter.write() if self.options.build: self.builder.process() except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def __init__(self, mgr): try: self.mgr = mgr self.config = self.mgr.config.get(self.__class__.__name__) self.patre_splitline = re.compile(self.config["pattern_splitline"]) self.di_bucket_association = self.config["buckets"] self.di_bucket = {} for k, v in self.di_bucket_association.items(): self.di_bucket[k] = set() s_ = self.di_bucket_association[k] = set(v) try: s_.remove("pass") except (KeyError, ) as e: pass self.di_freeze = {} self.bucket_precedence = self.config["bucket_precedence"] # remove entries in lower precedence positions that # exist higher up li = [] for bucketname in self.bucket_precedence: li.append(self.di_bucket_association[bucketname]) enforce_set_precedence(li) self.warnings = self.config["warnings"] except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def __init__(self, options): try: self.options = options pwd = os.getcwd() self.workdir = self.options.workdir or pwd self.config = None fnp_config = self.options.config if self.options.init: fnp_config = self._initialize(fnp_config) if not fnp_config: for dn in [self.workdir, pwd]: fnp_config = os.path.join(dn, self.FN_CONFIG) try: with open(fnp_config) as fi: self.config = yload(fi) break except (IOError, ) as e: pass else: msg = "missing configuration file. perhaps you wanted to use the --init option to create one?" print(msg) sys.exit(1) else: with open(fnp_config) as fi: self.config = yload(fi) self.scan = not self.options.noscan # self.vars = dict() self.vars["scandir"] = self.workdir sectionname = "filenames" section = self.config["vars"][sectionname] for k, v in section.items(): self.vars.update(**{"%s_%s" % (sectionname, k): v}) self.import_classifier = ClassifierImport(self) self.scanwriter = ScanWriter(self) self.matcher = Matcher() self.builder = Builder(self) except (ValueError, ) as e: raise except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def parse_requirement_line(self, line): try: packagename, version = self.patre_splitline.split(line) return packagename except ValueError: raise except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def _get_fnp(self, subject): try: if subject == "templatedir": return os.path.join(dn_script, "templates") else: fn = self.config["vars"]["filenames"][subject] return os.path.join(self.workdir, fn) except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def is_match(self, filename): try: for patre in self.li_pattern: hit = patre.search(filename) if hit: return True return False except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def classify_filename(self, filename_import): try: for bucket in self.buckets: found = bucket.is_match(filename_import) if found: return bucket return self.default_bucket except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def write(self): try: pips_buckets = self.mgr.pip_classifier.di_bucket.copy() li_items = list(pips_buckets.items()) for k, v in li_items: pips_buckets[k] = sorted(v) comment_key = k[:-1] + "_" comment_lookup = "comment_lookup_%s" % (k) pips_buckets[comment_key] = comment_lookup pips = dict(buckets=pips_buckets, freeze=self.mgr.all_freezes) warnings = sorted(self.mgr.pip_classifier.warnings) di = self.di = dict( import_="comment_lookup_imports", imports=self.mgr.import_classifier.packagetracker.classify(), pips=pips, aliase_="comment_lookup_aliases", aliases=self.mgr.aliases, warnings=warnings, ) if self.mgr.options.verbose: di_debug = dict( stdlib=sorted(self.mgr.s_stdlib), all_imports=sorted(self.mgr.all_imports), all_pips=sorted(self.mgr.all_pips), unknown_pip_aliases=self.mgr.matcher._debug, ) di["zzz_debug"] = di_debug fnp_tmp = self.mgr._get_fnp("tmp") with open(fnp_tmp, "w", encoding="utf-8") as fo: dump(self.di, fo, default_flow_style=False) commenter = Commenter(self.fnp_yaml_comments) commenter.comment(fnp_tmp, self.fnp_yaml) print("\n\nscan written to %s" % (self.fnp_yaml)) except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def run(self, packagetracker): packagename_ = packagename = packagename_ = None try: di_packagename2pip = self.mgr.imp2pip all_imports = self.mgr.all_imports self.s_missing_imports = all_imports.copy() pip2imp = self.mgr.pip2imp for pipname in self.mgr.all_pips: packagename = pip2imp.get(pipname, pipname) found = False for bucketname in self.bucket_precedence: if pipname in self.di_bucket_association[bucketname]: self.di_bucket[bucketname].add(pipname) found = True try: self.s_missing_imports.remove(packagename) except (KeyError, ) as e: pass break if found: continue bucketname = packagetracker.get_package(packagename) if bucketname: self.di_bucket[bucketname].add(pipname) try: self.s_missing_imports.remove(packagename) except (KeyError, ) as e: pass continue self.di_bucket["unknown"].add(pipname) for import_ in self.s_missing_imports: self.warnings.append("missing import:%s" % (import_)) except (Exception, ) as e: if cpdb(): ppp(dict(packagename=packagename, packagename_=packagename_)) pdb.set_trace() raise
def get(cls, mgr, directorypartitioner, filename): try: res = cls.di_filename.get(filename) if res: return res bucket = directorypartitioner.classify_filename(filename) res = cls.di_filename[filename] = cls(filename, bucket) return res except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def run(self): try: for taskname in self.tasknames: config = self.mgr.config.get("Command")["tasks"][taskname] command = Command(self.mgr, taskname, config) command.run() fnp_out = os.path.join( self.mgr.workdir, self.mgr.config["vars"]["filenames"]["liststdlib"]) except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def parse_import(self, import_: str) -> str: try: import_ = import_.strip() if import_.startswith("from ") or import_.startswith("import "): _packagename = import_.split()[1] packagename = _packagename.split(".")[0] return packagename raise NotImplementedError("parse_import(%s)" % (locals())) except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def parse(self, line_in): """ """ # /Users/jluc/kds2/py2/bemyerp/lib/_baseutils.py:6:from functools import partial, wraps try: line = line_in.replace(self.workdir, "").rstrip() filename, import_ = self.patre_splitline.split(line) packagename = self.parse_import(import_) pythonfile = PythonFile.get(self, self, filename) return pythonfile, packagename except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def comment(self, fnp_in, fnp_out, mode="w"): try: with open(fnp_in) as fi, open(fnp_out, mode) as fo: for line in fi.readlines(): # pdb.set_trace() hit = self.patre.search(line) if hit: indent = " " * (len(line) - len(line.lstrip())) key = hit.groups()[0] comment = self.lookup(key) fo.write("\n%s#%s\n" % (indent, comment)) else: fo.write(line) except (Exception, ) as e: if cpdb(): pdb.set_trace() raise
def process(self): try: with open(self.fnp_input_classifier) as fi: di_classifier = yload(fi) di_bucket = di_classifier["pips"]["buckets"] di_freeze = di_classifier["pips"]["freeze"] t_fn = self.config["t_filename_out"] print("\n\nbuild phase - generating requirements at:") for req, di in self.config["req_mapper"].items(): # pick up naming scheme from req, if present t_fn = di.get("t_filename_out") or t_fn fn_o = sub_template(t_fn, dict(req=req)) fnp_o = os.path.join(self.mgr.workdir, fn_o) requirements = [] for bucketname in di["buckets"]: for pipname in di_bucket[bucketname]: freeze_line = di_freeze[pipname] requirements.append(freeze_line) requirements.sort() with open(fnp_o, "w") as fo: for line in requirements: fo.write("%s\n" % (line)) fo.write("\n") print(" %s" % (fnp_o)) except (Exception,) as e: if cpdb(): pdb.set_trace() raise