def _rel_path(path1, path2): 'it return relative paths from path2 to path1' uncommon1, uncommon2 = _common_base(path1, path2) path = ['..'] * (len(uncommon2) - 1) path.extend(uncommon1) return os.sep.join(path)
def transfer_path(self, with_cwd = False): if with_cwd: path = [''] path.extend(sys.path) else: path = sys.path self.runcommand('if 1:\n import sys as _sys\n _sys.path = %r\n del _sys\n \n' % (path,))
def default_lib_path(data_dir: str, target: int, pyversion: int, python_path: bool) -> List[str]: """Return default standard library search paths.""" # IDEA: Make this more portable. path = [] # type: List[str] # Add MYPYPATH environment variable to library path, if defined. path_env = os.getenv('MYPYPATH') if path_env is not None: path[:0] = path_env.split(os.pathsep) # Add library stubs directory. By convention, they are stored in the # stubs/x.y directory of the mypy installation. version_dir = '3.2' if pyversion < 3: version_dir = '2.7' path.append(os.path.join(data_dir, 'stubs', version_dir)) path.append(os.path.join(data_dir, 'stubs-auto', version_dir)) if sys.version_info.major == 3: # Add additional stub directories. versions = ['3.3', '3.4', '3.5', '3.6'] for v in versions: stubdir = os.path.join(data_dir, 'stubs', v) if os.path.isdir(stubdir): path.append(stubdir) # Add fallback path that can be used if we have a broken installation. if sys.platform != 'win32': path.append('/usr/local/lib/mypy') # Contents of Python's sys.path go last, to prefer the stubs if python_path: path.extend(sys.path) return path
def pythonpath(dir,nodup=False,debug=False): new_dir=satkdir(dir,debug=debug) if nodup: for d in sys.path: if __debug__: if debug: print("satkutil.py - pythonpath() - PYTHONPATH dir checked "\ "for being duplicate of '%s': '%s'" % (new_dir,d)) # sys.path strings and strings generated here are not the same string # so have to use string comparison if d == new_dir: # Not a duplicate, check next directory if __debug__: if debug: print("satkutil.py - pythonpath() - directory alreaedy in " "PYTHONPATH, ignoring: %s" % new_dir) return # Add the new directory to PYTHONPATH path=[new_dir,] if __debug__: if debug: print("satkutil.py - pythonpath() - adding path: '%s'" % path[0]) path.extend(sys.path) sys.path=path if __debug__: if debug: print("satkutil.py - pythonpath() - sys.path=%s" % sys.path)
def default_lib_path(data_dir: str, pyversion: Tuple[int, int], python_path: bool) -> List[str]: """Return default standard library search paths.""" # IDEA: Make this more portable. path = [] # type: List[str] # Add MYPYPATH environment variable to library path, if defined. path.extend(mypy_path()) auto = os.path.join(data_dir, 'stubs-auto') if os.path.isdir(auto): data_dir = auto # We allow a module for e.g. version 3.5 to be in 3.4/. The assumption # is that a module added with 3.4 will still be present in Python 3.5. versions = ["%d.%d" % (pyversion[0], minor) for minor in reversed(range(pyversion[1] + 1))] # E.g. for Python 3.5, try 2and3/, then 3/, then 3.5/, then 3.4/, 3.3/, ... for v in ['2and3', str(pyversion[0])] + versions: for lib_type in ['stdlib', 'builtins', 'third_party']: stubdir = os.path.join(data_dir, 'typeshed', lib_type, v) if os.path.isdir(stubdir): path.append(stubdir) # Add fallback path that can be used if we have a broken installation. if sys.platform != 'win32': path.append('/usr/local/lib/mypy') # Contents of Python's sys.path go last, to prefer the stubs # TODO: To more closely model what Python actually does, builtins should # go first, then sys.path, then anything in stdlib and third_party. if python_path: path.extend(sys.path) return path
def default_lib_path(data_dir: str, pyversion: Tuple[int, int], python_path: bool) -> List[str]: """Return default standard library search paths.""" # IDEA: Make this more portable. path = [] # type: List[str] auto = os.path.join(data_dir, 'stubs-auto') if os.path.isdir(auto): data_dir = auto # We allow a module for e.g. version 3.5 to be in 3.4/. The assumption # is that a module added with 3.4 will still be present in Python 3.5. versions = ["%d.%d" % (pyversion[0], minor) for minor in reversed(range(pyversion[1] + 1))] # E.g. for Python 3.5, try 2and3/, then 3/, then 3.5/, then 3.4/, 3.3/, ... for v in ['2and3', str(pyversion[0])] + versions: for lib_type in ['stdlib', 'builtins', 'third_party']: stubdir = os.path.join(data_dir, 'typeshed', lib_type, v) if os.path.isdir(stubdir): path.append(stubdir) # Add fallback path that can be used if we have a broken installation. if sys.platform != 'win32': path.append('/usr/local/lib/mypy') # Contents of Python's sys.path go last, to prefer the stubs # TODO: To more closely model what Python actually does, builtins should # go first, then sys.path, then anything in stdlib and third_party. if python_path: path.extend(sys.path) return path
def pythonpath(dir,debug=False): path=[satkdir(dir,debug=debug),] if debug: print("satkutil.py - pythonpath() - adding path: '%s'" % path[0]) path.extend(sys.path) sys.path=path if debug: print("satkutil.py - pythonpath() - sys.path=%s" % sys.path)
def onShuttlingRoute(self): self.synchronize() if self.settings.shuttlingRoute: path = list() for start, transition, stop in triplet_iterator(self.settings.shuttlingRoute): if transition=="-": path.extend( self.shuttlingGraph.shuttlePath(start, stop) ) if path: self.shuttleOutput.emit( path*self.settings.shuttlingRepetitions, False )
def slice_at_z(self, z, layer_h): """Get paths outlines of where this model intersects the given Z level.""" def ptkey(pt): return "{0:.3f}, {1:.3f}".format(pt[0], pt[1]) layer = math.floor(z / layer_h + 0.5) paths = {} for facet in self.get_layer_facets(layer): line = facet.slice_at_z(z) if line is None: continue path = list(line) key1 = ptkey(path[0]) key2 = ptkey(path[-1]) if key2 in paths and paths[key2][-1] == path[0]: continue if key1 not in paths: paths[key1] = [] paths[key1].append(path) outpaths = [] deadpaths = [] while paths: path = paths[next(iter(paths))][0] key1 = ptkey(path[0]) key2 = ptkey(path[-1]) del paths[key1][0] if not paths[key1]: del paths[key1] if key1 == key2: outpaths.append(path) continue elif key2 in paths: opath = paths[key2][0] del paths[key2][0] if not paths[key2]: del paths[key2] path.extend(opath[1:]) elif key1 in paths: opath = paths[key1][0] del paths[key1][0] if not paths[key1]: del paths[key1] opath = list(reversed(opath)) opath.extend(path[1:]) path = opath else: deadpaths.append(path) continue key1 = ptkey(path[0]) if key1 not in paths: paths[key1] = [] paths[key1].append(path) if deadpaths: print("\nIncomplete Polygon at z=%s" % z) return (outpaths, deadpaths)
def __str__(self): """ Calculates, stores in cache and returns string representation of path. """ if self.string is None: path = [self.prefix] path.extend(self.segments) self.string = "/".join(path) return self.string
def __str__(self): path = [ '/'.join([self.url,self.handler_path,self.core,self.handler_name]), ] params = [] for k,v in self.data.items(): kv_lst = zip([k]*len(v),v) params.extend(l for l in kv_lst) params.append(('wt','json')) if params: path.extend(['?',urlencode(params)]) return ''.join(path)
def pythonpath(dir, debug=False): path = [ satkdir(dir, debug=debug), ] if debug: print("satkutil.py - pythonpath() - adding path: '%s'" % path[0]) path.extend(sys.path) sys.path = path if debug: print("satkutil.py - pythonpath() - sys.path=%s" % sys.path)
def onShuttlingRoute(self): self.synchronize() if self.settings.shuttlingRoute: path = list() for start, transition, stop in triplet_iterator( self.settings.shuttlingRoute): if transition == "-": path.extend(self.shuttlingGraph.shuttlePath(start, stop)) if path: self.shuttleOutput.emit( path * self.settings.shuttlingRepetitions, False)
def default_lib_path(data_dir: str, target: int, pyversion: Tuple[int, int], python_path: bool) -> List[str]: """Return default standard library search paths.""" # IDEA: Make this more portable. path = [] # type: List[str] # Add MYPYPATH environment variable to library path, if defined. path_env = os.getenv('MYPYPATH') if path_env is not None: path[:0] = path_env.split(os.pathsep) # Add library stubs directory. By convention, they are stored in the # stubs/x.y directory of the mypy installation. Additionally, stubs # for earlier versions in the same major version will be added, and # as a last resort, third-party stubs will be added. if pyversion == 2: major, minor = 2, 7 else: # See bug #886 major, minor = sys.version_info[0], sys.version_info[1] version_dir = '3.2' third_party_dir = 'third-party-3.2' if pyversion[0] < 3: version_dir = '2.7' third_party_dir = 'third-party-2.7' path.append(os.path.join(data_dir, 'stubs', version_dir)) path.append(os.path.join(data_dir, 'stubs', third_party_dir)) path.append(os.path.join(data_dir, 'stubs-auto', version_dir)) if major == 3: # Add additional stub directories. versions = ['3.3', '3.4', '3.5', '3.6'] if False: # Ick, we really should figure out how to use this again. versions = ['3.%d' % i for i in range(minor, -1, -1)] for v in versions: stubdir = os.path.join(data_dir, 'stubs', v) if os.path.isdir(stubdir): path.append(stubdir) third_party_stubdir = os.path.join(data_dir, 'stubs', 'third-party-' + v) if os.path.isdir(third_party_stubdir): path.append(third_party_stubdir) # Add fallback path that can be used if we have a broken installation. if sys.platform != 'win32': path.append('/usr/local/lib/mypy') # Contents of Python's sys.path go last, to prefer the stubs if python_path: path.extend(sys.path) return path
def transfer_path(self, with_cwd=False): if with_cwd: path = [''] path.extend(sys.path) else: path = sys.path self.runcommand("""if 1: import sys as _sys _sys.path = %r del _sys """ % (path, ))
def findpath(self, knot): if self.visited.count(knot.pos) > 0: return list() path = list() self.visited.append(knot.pos) path.append(knot.pos) knot.gen(self.start, SEED, SIZE) for i in knot.getNeightbor(): if self.visited.count(i.pos) == 0: path.extend(self.findpath(i)) path.append(knot.pos) return path
def get_path_to_node(target_node, path=[]): for c in self._children: next_step = get_path(c, path) if next_step is not None: path.extend(next_step) break if target_node == self: return [self] elif length(path) > 0: return path return None
def traversal_tree(self, tree): """ 遍历多叉树,获取模板列表 """ _nodes, path, sub_path = [], [], [] path.append(tree._head.get_data()) self._traversal(tree._head, path, sub_path) path.extend(sub_path) _path = [tuple(x) for x in path[1:]] return [path[0], list(set(_path))]
def make_process(*args, **kwargs): """Choose whether to use python built in process or jasper.""" process_cls = process.Process if config.SPAWN_USING == "jasper": process_cls = jasper_process.Process # Add the current working directory and /data/multiversion to the PATH. process_kwargs = kwargs.get("process_kwargs", {}).copy() env_vars = process_kwargs.get("env_vars", {}).copy() path = [env_vars.get("PATH", os.environ.get("PATH", ""))] path.extend([os.getcwd(), config.DEFAULT_MULTIVERSION_DIR]) env_vars["PATH"] = os.pathsep.join(path) kwargs["env_vars"] = env_vars return process_cls(*args, **kwargs)
def _resolve(self, ref) -> t.Tuple[dict, t.Callable[[], None]]: subresolver, query = self.resolver.resolve(ref) self.resolvers.append(subresolver) d = subresolver.doc if query: d = subresolver.access_by_json_pointer(query) path = [subresolver.name] if query: path.extend(query.lstrip("#/").split("/")) self.history.append(path) return d, query, self._resolve_teardown
def path(path1, path2, stop=False): path = [] for s in path1: path.append(s) for j,n in enumerate(path2): if stop and len(path)+j > stop: return False if n == s: p2 = path2[:j] p2.reverse() path.extend(p2) return path raise Exception("Path not found")
def __init__(self, gpg_binary=None, keyring=None): """Initialize an object instance. Options are: gpg_binary -- full pathname for GPG binary. If not supplied, the current value of PATH will be searched, falling back to the DEFAULT_PATH class variable if PATH isn't available. keyring -- full pathname to the public keyring to use in place of the default "~/.gnupg/pubring.gpg". """ # If needed, look for the gpg binary along the path if gpg_binary is None or gpg_binary == "": path = DEFAULT_PATH if "PATH" in os.environ: temppath = os.environ["PATH"] path.extend(temppath.split(os.pathsep)) # else: # path = self.DEFAULT_PATH for pathdir in path: pathdir = os.path.expandvars(pathdir) fullname = os.path.join(pathdir, "gpg") if self._check_file(fullname): gpg_binary = fullname break if self._check_file(fullname + ".exe"): gpg_binary = fullname + ".exe" break # gpg2 support fullname += "2" if self._check_file(fullname): gpg_binary = fullname break if self._check_file(fullname + ".exe"): gpg_binary = fullname + ".exe" break else: raise ValueError( _("Couldn't find 'gpg' binary on path %s.") % repr(path) ) self.gpg_binary = '"' + gpg_binary + '"' self.keyring = keyring
def find_spec(self, fullname, path, target=None): if path is None or path == "": path = [os.getcwd()] # top level import -- path.extend(sys.path) if "." in fullname: *parents, name = fullname.split(".") else: name = fullname for entry in path: filename = os.path.join(entry, name + ".wasm") if not os.path.exists(filename): continue return spec_from_file_location(fullname, filename, loader=MyLoader(filename)) return None
def __init__(self, gpg_binary=None, keyring=None): """Initialize an object instance. Options are: gpg_binary -- full pathname for GPG binary. If not supplied, the current value of PATH will be searched, falling back to the DEFAULT_PATH class variable if PATH isn't available. keyring -- full pathname to the public keyring to use in place of the default "~/.gnupg/pubring.gpg". """ # If needed, look for the gpg binary along the path if gpg_binary is None or gpg_binary == "": path = DEFAULT_PATH if 'PATH' in os.environ: temppath = os.environ['PATH'] path.extend(temppath.split(os.pathsep)) #else: # path = self.DEFAULT_PATH for pathdir in path: pathdir = os.path.expandvars(pathdir) fullname = os.path.join(pathdir, 'gpg') if self._check_file(fullname): gpg_binary = fullname break if self._check_file(fullname + ".exe"): gpg_binary = fullname + ".exe" break # gpg2 support fullname += '2' if self._check_file(fullname): gpg_binary = fullname break if self._check_file(fullname + ".exe"): gpg_binary = fullname + ".exe" break else: raise ValueError(_("Couldn't find 'gpg' binary on path %s.") % repr(path)) self.gpg_binary = "\"" + gpg_binary + "\"" self.keyring = keyring
def set_query_path(path): global __query_paths if isinstance(path, list): __query_paths = path.extend(__query_paths) elif isinstance(path, str) or isinstance(path, unicode): __query_paths.insert(0, path) __query_paths.append(os.path.join(base_path, 'queries'))
def standard_generator(n=0): """Produces the standard generator :math:`X_n` of :math:`F` as described in [CFP96]_. For instance, :math:`X_0` is the following: >>> print(standard_generator()) InfiniteAut: V(2, 1) -> V(2, 1) specified by 3 generators (after expansion and reduction). x1 a1 -> x1 a1 a1 x1 a2 a1 -> x1 a1 a2 x1 a2 a2 -> x1 a2 For :math:`n > 0` the element :math:`X_n` is a :class:`Mixed automorphism <thompson.mixed.MixedAut>`, consisting of a large fixed part and a smaller part which looks like :math:`X_0`. >>> from random import randint >>> n = randint(1, 20) >>> x_n = standard_generator(n) >>> type(x_n) <class 'thompson.mixed.MixedAut'> The :math:`X_n` generate :math:`F`; in fact just :math:`X_0` and :math:`X_1` are sufficient, due to the relation :math:`X_k^{-1} X_n X_k = X_{n+1}` for :math:`k < n`. See [CFP96]_ for more details. >>> x_k = standard_generator(randint(0, n-1)) >>> x_k * x_n * ~x_k == standard_generator(n+1) #operation is the other way round in Python True """ domain_basis = Generators((2, 1)) path = Word('x', (2, 1)) for i in range(n): domain_basis.append(path.alpha(1)) path = path.alpha(2) #At this stage, domain_basis is equal to the intersection of domain and range range_basis = domain_basis.copy() domain_basis.append(path.alpha(1)) domain_basis.append(path.extend('a2 a1')) domain_basis.append(path.extend('a2 a2')) range_basis.append(path.extend('a1 a1')) range_basis.append(path.extend('a1 a2')) range_basis.append(path.alpha(2)) return Automorphism(domain_basis, range_basis)
def get_bck_path(pp, dp): path = [] idx_in = pp.index(dp[0]) idx_out = pp.index(dp[-1]) path.extend(pp[0:idx_in]) path.extend(dp) path.extend(pp[idx_out+1:]) return path
def default_lib_path(data_dir: str, implementation: Implementation, python_path: bool) -> List[str]: """Return default standard library search paths.""" path = [] # type: List[str] # Add MYPYPATH environment variable to library path, if defined. path_env = os.getenv('MYPYPATH') if path_env is not None: path += path_env.split(os.pathsep) for component in [ 'stubs-override', 'typeshed/builtins', 'typeshed/stdlib', 'typeshed/third_party', ]: for version in get_versions(implementation.base_dialect): stubdir = os.path.join(data_dir, component, version) if os.path.isdir(stubdir): path.append(stubdir) path_env = os.getenv('MYPYPATH_APPEND') if path_env is not None: path += path_env.split(os.pathsep) for component in [ 'stubs-auto', ]: for version in get_versions(implementation.base_dialect): stubdir = os.path.join(data_dir, component, version) if os.path.isdir(stubdir): path.append(stubdir) # Contents of Python's sys.path go last, to prefer the stubs if python_path: path.extend(implementation.python_path) return path
def parse(self, args=None): # Parse arguments. args = Namespace(self.parser.parse_args(args)) # Get subparser configuration. path = [] for (arg, value) in sorted(args): if arg.startswith(self.keyword): path.extend([value, 'subparsers']) if path: path = ['subparsers'] + path[:-1] config = self.__get_config(path) parser = self.__parsers['/'.join(path)] # Post checks. for option, option_config in config['options'].items(): if args[option] is None: if 'type' in option_config and option_config['type'] == 'list': args[option] = [] continue for keyword in POST_KEYWORDS: if keyword in option_config: { 'need': lambda: self.__check_dependency( args, config, option, parser, True), 'conflict': lambda: self.__check_dependency( args, config, option, parser, False) }.get(keyword)() # Execute. if 'execute' in config: self.__execute(config['execute'], args) return args
def find_spec(self, fullname, path, target=None): # type: ignore if path is None or path == "": path = [os.getcwd()] # top level import -- path.extend(sys.path) if "." in fullname: *parents, name = fullname.split(".") else: name = fullname for entry in path: py = os.path.join(str(entry), name + ".py") if os.path.exists(py): continue wasm = os.path.join(str(entry), name + ".wasm") if os.path.exists(wasm): return spec_from_file_location(fullname, wasm, loader=_WasmtimeLoader(wasm)) wat = os.path.join(str(entry), name + ".wat") if os.path.exists(wat): return spec_from_file_location(fullname, wat, loader=_WasmtimeLoader(wat)) return None
def create_app_search_path(basedir=_f.LIBEXEC_DIR, syspath=False): """Create a search path for FreeFOAM applications. First directories in the list given by the environment variable 'FREEFOAM_PATH' (if it is defined) are used. The elements of 'FREEFOAM_PATH' must be separated by ':' (or optionally by ';' on Windows). Next the directory '<basedir>' is appended, where '<basedir>' defaults to 'LIBEXEC_DIR'. This can be overriden using the 'basedir' argument. If the argument 'syspath' is set to True, the directories in the system 'PATH' variable will appended to the list. Parameters ---------- basedir : Directory in which the FreeFOAM applications are located. syspath : If True, the system 'PATH' environment variable will also be searched. Returns ------- path : A list of directories comprising the search path. """ import os import os.path # set up the search path path = [] # start with FREEFOAM_PATH elements if present if 'FREEFOAM_PATH' in os.environ: path.extend(split_search_path(os.environ['FREEFOAM_PATH'])) # append with basedir path.append(os.path.abspath(basedir)) # append system PATH elements if present and desired by the user if syspath and 'PATH' in os.environ: path.extend(split_search_path(os.environ['PATH'])) # done return path
class StageScan(object): ''' This class generates a grid given the position,origin,%overlap and the dimensions of the grid. ''' def __init__(self): self.ORIGIN = 10 sys.exit(1) try: self.ORIGIN = kwargs['ORIGIN'] self.POS = kwargs['POS'] self.OVERLAP = kwargs['OVERLAP'] self.DIM = kwargs['DIM'] except KeyError: raise SystemExit( "Please define all: ORIGIN=(0,0),POS='D',OVERLAP=10,DIM=(200,20)" ) self.fovSize = 200 #Distance as 200 nm self.signX, self.signY = self.getSign() self.stagePath = list() self.indexPathDICT = dict() def getSign(self): return { 'D': (+1, +1), 'F': (-1, +1), 'A': (-1, -1), 'C': (+1, -1) }[self.POS] def stepsize(self): return self.fovSize - ((self.fovSize * self.OVERLAP) / 100) def getPath(self, (XTILES, YTILES)): xylist = [(x, y) for x in range(XTILES) for y in range(YTILES)] path = list() for j in range(YTILES): row = [xy for xy in xylist if xy[1] == j] if j % 2 == 1: row = sorted(row, key=itemgetter(0), reverse=True) path.extend(row) return path
parser.add_argument('target', help='Name of class or jar file to decompile') args = parser.parse_args() plugins = [] if args.plugin is not None: for name in args.plugin: mod = __import__('Krakatau.plugins.user' + name, globals(), locals(), ['create'], -1) plugins.append(mod.create()) path = [] if not args.nauto: print 'Attempting to automatically locate the standard library...' found = findJRE() if found: print 'Found at ', found path.append(found) else: print 'Unable to find the standard library' if args.path: for part in args.path: path.extend(part.split(';')) if args.target.endswith('.jar'): path.append(args.target) targets = script_util.findFiles(args.target, args.r, '.class') targets = map(script_util.normalizeClassname, targets) decompileClass(path, targets, args.out, plugins)
action="store_true", dest="profile", default=False, help="Ejecutar en modo profile (solo carga datos)") parser.add_option("-l", "--lazy", action="store_true", dest="lazy", default=False, help="Demora parte del proceso de los datos CSV a tiempo de ejecucion") parser.add_option("-w", "--warnings", action="store_true", dest="warnings", default=False, help="Continuar con la carga de datos incorrectos, generando warnings") (options, args) = parser.parse_args() options.warnings = dict() if options.warnings else None if options.debug: loglevel = logging.DEBUG if options.path: path.extend(options.path.split(os.pathsep)) geometry = "800x600" if not options.geometry else options.geometry logging.basicConfig(level=loglevel, format='%(asctime)s %(levelname)s %(message)s', stream=sys.stderr) # expando los nombres, que en windows me pueden venir con wildcards inputfiles = [] for name in args: globbed = glob.glob(name) if globbed: inputfiles.extend(globbed) else: inputfiles.append(name)
parser = argparse.ArgumentParser(description='Krakatau decompiler and bytecode analysis tool') parser.add_argument('-path',action='append',help='Semicolon seperated paths or jars to search when loading classes') parser.add_argument('-out',help='Path to generate source files in') parser.add_argument('-rename-classes', type=int, metavar='N', help='Rename classes with name of less than N characters (0 for all classes)') parser.add_argument('-nauto', action='store_true', help="Don't attempt to automatically locate the Java standard library. If enabled, you must specify the path explicitly.") parser.add_argument('-r', action='store_true', help="Process all files in the directory target and subdirectories") parser.add_argument('-skip', action='store_true', help="Upon errors, skip class or method and continue decompiling") parser.add_argument('target',help='Name of class or jar file to decompile') args = parser.parse_args() path = [] if not args.nauto: print 'Attempting to automatically locate the standard library...' found = findJRE() if found: print 'Found at ', found path.append(found) else: print 'Unable to find the standard library' if args.path: for part in args.path: path.extend(part.split(';')) if args.target.endswith('.jar'): path.append(args.target) targets = script_util.findFiles(args.target, args.r, '.class') targets = map(script_util.normalizeClassname, targets) decompileClass(path, targets, args.out, args.skip, args.rename_classes)
parser.add_argument( "-nauto", action="store_true", help="Don't attempt to automatically locate the Java standard library. If enabled, you must specify the path explicitly.", ) parser.add_argument("-r", action="store_true", help="Process all files in the directory target and subdirectories") parser.add_argument("-skip", action="store_true", help="Upon errors, skip class or method and continue decompiling") parser.add_argument("target", help="Name of class or jar file to decompile") args = parser.parse_args() path = [] if not args.nauto: print "Attempting to automatically locate the standard library..." found = findJRE() if found: print "Found at ", found path.append(found) else: print "Unable to find the standard library" if args.path: for part in args.path: path.extend(part.split(";")) if args.target.endswith(".jar"): path.append(args.target) targets = script_util.findFiles(args.target, args.r, ".class") targets = map(script_util.normalizeClassname, targets) decompileClass(path, targets, args.out, args.skip)
def _path_to_host(prefix, hostname): path = [prefix] path.extend(part for part in reversed(hostname.split('.'))) return '/'.join(path).replace('//', '/')
def main(): loglevel = logging.INFO path = ['.'] parser = OptionParser(usage=USAGE, version="%%prog %s"%VERSION) parser.add_option("-p", "--path", dest="path", metavar="PATH", help="""Ruta donde buscar los ficheros de datos y patrones. La ruta se especifica al estilo del sistema operativo, por ej.: C:\\ruta1;C:\\ruta2 (en windows) /ruta1:/ruta2 (en linux)""") parser.add_option("-o", "--output", dest="outpath", metavar="OUTPATH", help="""Ruta donde guardar los ficheros de resultado. por cada patron, crea en el directorio especificado un fichero con el mismo nombre, terminado en .cfg""") parser.add_option("-O", "--onefile", action="store_true", dest="collapse", default=False, help="""Vuelca el resultado de todos los patrones en un fichero unico. Si se usa esta opcion, debe utilizarse tambien -o para especificar el nombre del fichero de salida.""") parser.add_option("-D", "--define", action="append", dest="definitions", metavar="VAR=EXPR", help="""Define una variable para la ejecucion del patron""") parser.add_option("-d", "--debug", action="store_true", dest="debug", default=False, help="Vuelca los mensajes de debug en stderr") parser.add_option("-s", "--shell", action="store_true", dest="shell", default=False, help="Carga los datos y entra en un interprete de comandos") parser.add_option("-t", "--test-mode", action="store_true", dest="test", default=False, help="Itera sobre todos los posibles valores de los 'SELECT'") parser.add_option("-x", "--ext", dest="ext", metavar=".EXT", default=".cfg", help="Extension del fichero resultado (por defecto, .cfg)") parser.add_option("-l", "--lazy", action="store_true", dest="lazy", default=False, help="Demora parte del proceso de los datos CSV a tiempo de ejecucion") parser.add_option("-w", "--warnings", action="store_true", dest="warnings", default=False, help="Continuar con la carga de datos incorrectos, generando warnings") (options, args) = parser.parse_args() if len(args) < 1 and not options.shell: parser.print_help(sys.stderr) sys.exit(OPTIONS_ERRNO) if options.debug: loglevel = logging.DEBUG if options.path: path.extend(options.path.split(os.pathsep)) if options.collapse and not options.outpath: parser.print_help(sys.stderr) sys.exit(OPTIONS_ERRNO) logging.basicConfig(level=loglevel, format='%(asctime)s %(levelname)s %(message)s', stream=sys.stderr) # expando los nombres, que en windows me pueden venir con wildcards inputfiles = [] for name in args: globbed = glob.glob(name) if globbed: inputfiles.extend(globbed) else: inputfiles.append(name) plantillator = Consumer() plantillator.path = path plantillator.outpath = options.outpath plantillator.collapse = options.collapse plantillator.definitions = options.definitions or [] plantillator.inputfiles = inputfiles plantillator.ext = options.ext plantillator.overwrite = True plantillator.lazy = options.lazy plantillator.test = options.test plantillator.warnings = options.warnings try: plantillator.prepare() plantillator.dump_warnings() if options.shell: local = dict(plantillator.loader.data) code.interact("Shell de pruebas", local=local) exit(0) if not options.test: plantillator.render() else: while True: plantillator.render() if plantillator.actor.exhausted: break except ParseError as details: exit_with_errors(details) except TemplateError as details: exit_with_errors(details) except DataError as details: exit_with_errors(details) except Exception as detail: for detail in format_exception_only(sys.exc_type, sys.exc_value): sys.stderr.write(str(detail)) if options.debug: print_exc(file=sys.stderr) sys.exit(UNKNOWN_ERRNO)
def Main(script,sys_argv): global flags, version, frozen_main, compress_hook opts, args = GetMyOpts(sys_argv) OperateWithOpts(opts,args,flags) script_dir = os.path.dirname(os.path.abspath(script)) if not script_dir: script_dir = sys.prefix if not flags.get('no-logo'): print "py2cc "+version+" - .Py to .C & Compile" print "(c)2008 Alexey Sudachen, [email protected]" print "http://www.ethical-hacker.com/" print "~\n" if not args: print "error: there is no input file" print "~\n" print usage_msg sys.exit(-1) path = [os.path.normpath(os.path.dirname(args[0])),os.path.normpath(os.path.dirname(script))] path.extend(sys.path) path = flags['addpath'] + path path.extend(AppendPythonPath(flags.get('no-python'))) sys.path = path + sys.path #sys.path.extend(AppendPythonPath(False)) #print sys.path if flags.get('compress'): try: import _lzss compress_hook = _lzss.compress except: print 'failed to load _lzss extesion, compression is disabled' output_name = flags.get('output-name',None) if not output_name: output_name = os.path.splitext(os.path.basename(args[0]))[0] if sys.platform == 'win32': if not output_name.lower().endswith('.exe'): output_name = output_name + '.exe' C_file_name = output_name + '.c' C_file = open(C_file_name,"w+t") C_file.write(frozen_header) compiled = Compile(args,path,flags['exclude'],flags.get('debug'),flags.get('no-main'),flags.get('depth'),C_file) FreezeTable(compiled,C_file) C_file.write(frozen_main) if flags['subsystem'] == 'windows' and sys.platform == 'win32': C_file.write(build_argv) C_file.close() if not flags.get('c-only') and not flags.get('no-main'): if sys.platform == 'win32': flags['c-flags'] = flags.get('c-flags','') + ' -D_PY2CC_ON_WINDOWS' else: flags['c-flags'] = flags.get('c-flags','') + ' -D_PY2CC_ON_POSIX' if flags.get('python'): if flags['cc'] == 'msc': python_lib = flags['python']+'.lib' os.putenv('LIB',script_dir+'\\lib;'+os.environ['LIB']) else: python_lib = '-l'+flags['python']+' -L'+script_dir+'/lib'+' -L'+script_dir if flags.get('static'): python_lib = python_lib + ' -lm -lpthread -ldl -lc -lutil' else: if sys.platform == 'win32': if flags['cc'] == 'msc': python_lib = os.path.join(sys.exec_prefix,'libs','python%d%d.lib'%sys.version_info[:2]) else: python_lib = '-L"'+os.path.join(sys.exec_prefix,'libs')+'" -lpython%d%d'%sys.version_info[:2] else: if os.path.exists(os.path.join(sys.exec_prefix,'lib','libpython%d.%d.so'%sys.version_info[:2])): python_lib = '-lpython%d.%d'%sys.version_info[:2] else: python_lib = '-L'+os.path.join(sys.exec_prefix,'lib', 'python%d.%d'%sys.version_info[:2],'config') + \ ' -lpython%d.%d'%sys.version_info[:2] + ' -lpthread -lutil -lm -lc -ldl' #python_lib = '-lpython%d.%d'%sys.version_info[:2] if flags['subsystem'] == 'windows' and sys.platform == 'win32': flags['c-flags'] = flags['c-flags']+ ' -D_PY2CC_WINDOWS_SUBSYSTEM' else: flags['c-flags'] = flags['c-flags']+ ' -D_PY2CC_CONSOLE_SUBSYSTEM' if flags['cc'] == 'msc' and sys.platform == 'win32': CC = flags.get('cc-path','cl') libs = [ 'user32.lib', 'advapi32.lib', 'shell32.lib', 'ole32.lib', 'oleaut32.lib', 'gdi32.lib', 'ws2_32.lib'] c_flags = ' -nologo -MD' + flags['c-flags'] if flags.get('static'): c_flags = c_flags + ' -D_PY2CC_STATIC' cmd_S = '%s %s "%s" -o "%s" "%s" '% (CC,c_flags,C_file_name,output_name,python_lib) + ' '.join(libs) else: CC = flags.get('cc-path','gcc') cmd_S = '%s %s "%s" -s -o "%s" %s' % (CC,flags['c-flags'],C_file_name,output_name,python_lib) print cmd_S if os.system(cmd_S) != 0 : raise ErrorMessageException('failed to compile .C code to binary image')