def parse_url(url, pathkey, config, opts): # # Split the source up into the parts we need. # source = {} source['url'] = url colon = url.find(':') if url[colon + 1:colon + 3] != '//': raise error.general('malforned URL (no protocol prefix): %s' % (url)) source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:]) source['file'] = path.basename(url) source['name'], source['ext'] = path.splitext(source['file']) if source['name'].endswith('.tar'): source['name'] = source['name'][:-4] source['ext'] = '.tar' + source['ext'] # # Get the file. Checks the local source directory first. # source['local'] = None for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, pathkey, config, opts): break source['script'] = '' return source
def _check_exe(_opts, macro, value, constraint, silent = False): if len(value) == 0 or constraint == 'none': return True orig_value = value if path.isabspath(value): if path.isfile(value): return True if os.name == 'nt': if path.isfile('%s.exe' % (value)): return True value = path.basename(value) absexe = True else: absexe = False paths = os.environ['PATH'].split(os.pathsep) if _check_paths(value, paths): if absexe: if not silent: log.notice('warning: exe: absolute exe found in path: (%s) %s' % (macro, orig_value)) return True if constraint == 'optional': if not silent: log.trace('warning: exe: optional exe not found: (%s) %s' % (macro, orig_value)) return True if not silent: log.notice('error: exe: not found: (%s) %s' % (macro, orig_value)) return False
def get_config_files(self, config): # # Convert to shell paths and return shell paths. # # @fixme should this use a passed in set of defaults and not # not the initial set of values ? # config = path.shell(config) if '*' in config or '?' in config: print config configdir = path.dirname(config) configbase = path.basename(config) if len(configbase) == 0: configbase = '*' if not configbase.endswith('.cfg'): configbase = configbase + '.cfg' if len(configdir) == 0: configdir = self.macros.expand(self.defaults['_configdir']) configs = [] for cp in configdir.split(':'): hostconfigdir = path.host(cp) for f in glob.glob(os.path.join(hostconfigdir, configbase)): configs += path.shell(f) else: configs = [config] return configs
def config_start(self, name, _config): self.files['configs'] += [name] for cf in _config.includes(): cfbn = path.basename(cf) if cfbn not in self.files['configs']: self.files['configs'] += [cfbn] self.formatter.config_start(self.bset_nesting + 1, name)
def parse_url(url, pathkey, config, opts, file_override = None): # # Split the source up into the parts we need. # source = {} source['url'] = url source['options'] = [] colon = url.find(':') if url[colon + 1:colon + 3] != '//': raise error.general('malforned URL (no protocol prefix): %s' % (url)) source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:]) if file_override is None: source['file'] = path.basename(url) else: bad_chars = [c for c in ['/', '\\', '?', '*'] if c in file_override] if len(bad_chars) > 0: raise error.general('bad characters in file name: %s' % (file_override)) log.output('download: file-override: %s' % (file_override)) source['file'] = file_override source['options'] += ['file-override'] source['name'], source['ext'] = path.splitext(source['file']) if source['name'].endswith('.tar'): source['name'] = source['name'][:-4] source['ext'] = '.tar' + source['ext'] # # Get the file. Checks the local source directory first. # source['local'] = None for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, pathkey, config, opts): break source['script'] = '' return source
def __init__(self, argv, argopts, defaults): command_path = path.dirname(path.abspath(argv[1])) if len(command_path) == 0: command_path = '.' self.command_path = command_path self.command_name = path.basename(argv[0]) extras = ['--dry-run', '--with-download', '--quiet', '--without-log', '--without-error-report', '--without-release-url'] self.argv = argv self.args = argv[1:] + extras self.defaults = macros.macros(name = defaults, sbdir = command_path) self.load_overrides() self.opts = { 'params' : extras } self.sb_git() self.rtems_bsp() if argopts.download_dir is not None: self.defaults['_sourcedir'] = ('dir', 'optional', path.abspath(argopts.download_dir)) self.defaults['_patchdir'] = ('dir', 'optional', path.abspath(argopts.download_dir))
def parse_url(url, pathkey, config, opts): # # Split the source up into the parts we need. # source = {} source['url'] = url source['path'] = path.dirname(url) source['file'] = path.basename(url) source['name'], source['ext'] = path.splitext(source['file']) # # Get the file. Checks the local source directory first. # source['local'] = None for p in config.define(pathkey).split(':'): local = path.join(path.abspath(p), source['file']) if source['local'] is None: source['local_prefix'] = path.abspath(p) source['local'] = local if path.exists(local): source['local_prefix'] = path.abspath(p) source['local'] = local break source['script'] = '' for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, config, opts): break return source
def parse_url(url, pathkey, config, opts): # # Split the source up into the parts we need. # source = {} source["url"] = url colon = url.find(":") if url[colon + 1 : colon + 3] != "//": raise error.general("malforned URL: %s" % (url)) source["path"] = url[: colon + 3] + path.dirname(url[colon + 3 :]) source["file"] = path.basename(url) source["name"], source["ext"] = path.splitext(source["file"]) if source["name"].endswith(".tar"): source["name"] = source["name"][:-4] source["ext"] = ".tar" + source["ext"] # # Get the file. Checks the local source directory first. # source["local"] = None for p in config.define(pathkey).split(":"): local = path.join(path.abspath(p), source["file"]) if source["local"] is None: source["local_prefix"] = path.abspath(p) source["local"] = local if path.exists(local): source["local_prefix"] = path.abspath(p) source["local"] = local break source["script"] = "" for p in parsers: if url.startswith(p): source["type"] = p if parsers[p](source, config, opts): break return source
def get_config_files(self, config): # # Convert to shell paths and return shell paths. # # @fixme should this use a passed in set of defaults and not # not the initial set of values ? # config = path.shell(config) if '*' in config or '?' in config: print(config) configdir = path.dirname(config) configbase = path.basename(config) if len(configbase) == 0: configbase = '*' if not configbase.endswith('.cfg'): configbase = configbase + '.cfg' if len(configdir) == 0: configdir = self.macros.expand(self.defaults['_configdir']) configs = [] for cp in configdir.split(':'): hostconfigdir = path.host(cp) for f in glob.glob(os.path.join(hostconfigdir, configbase)): configs += path.shell(f) else: configs = [config] return configs
def result(self, id): global results_dir, uploads_dir, sett path = os.path.join(results_dir, id) params = {} params["run_id"] = id params["detailed"] = "Results not yet available" params["matrix"] = "Results not yet available" tmpl = lookup.get_template("master.mako") # Loads the progress file if it exists p = {"status":"","curprog":0,"progmax":0} progress_path = os.path.join(path,".prog") if os.path.exists(progress_path): with open(progress_path) as f: p = json.loads(f.read() ) params["log"] = "###Run Settings###\n" sett_path = os.path.join(path,".settings") organism = "" if os.path.exists(sett_path): with open(sett_path) as f: tmp = f.read() params["log"] = params["log"]+ tmp organism = [x.split("\t")[1] for x in tmp.split("\n") if x.split("\t")[0] == "Organism:"][0] params["organism"] = organism params["log"] = params["log"] + "\n###Run Log###\n" debug_path = os.path.join(path,".log") if os.path.exists(debug_path): with open(debug_path) as f: params["log"] = params["log"] + f.read() # loads results from results file detailed_path = os.path.join(path,"detailed.gr") if os.path.exists(detailed_path): with open(detailed_path) as f: params["detailed"] = f.read() foi_names_path = os.path.join(os.path.join(results_dir, id),".fois") if os.path.exists(foi_names_path): with open(foi_names_path) as f: params["fois"] = [basename(x).split(".")[0] for x in f.read().split("\n") if x != ""] else: params["fois"] = "" params["zipfile"] = os.path.join("results",id,"GR_{}.tar.gz").format(id) params["run_annotation"] = True if os.path.exists(os.path.join(results_dir,id,"annotations")) else False params.update(p) try: rend_template = tmpl.render(body=lookup.get_template("results.mako").render(**params),script= lookup.get_template("results.js").render(**params)) print "LOADED TEMPLATE" except Exception, e: traceback = MakoTraceback() str_error = "" for (filename, lineno, function, line) in traceback.traceback: str_error += "File %s, line %s, in %s" % (os.path.split(filename)[-1], lineno, function) str_error += "\n" str_error += line + "\n" str_error += "%s: %s" % (str(traceback.error.__class__.__name__), traceback.error) print str_error rend_template = str_error
def config_start(self, name, _config): self.files['configs'] += [name] for cf in _config.includes(): cfbn = path.basename(cf) if cfbn not in self.files['configs']: self.files['configs'] += [cfbn] first = not self.configs_active self.configs_active = True
def __init__(self, argv, optargs, _defaults, command_path): self._long_opts = { # key macro handler param defs init '--prefix': ('_prefix', self._lo_path, True, None, False), '--topdir': ('_topdir', self._lo_path, True, None, False), '--configdir': ('_configdir', self._lo_path, True, None, False), '--builddir': ('_builddir', self._lo_path, True, None, False), '--sourcedir': ('_sourcedir', self._lo_path, True, None, False), '--patchdir': ('_patchdir', self._lo_path, True, None, False), '--tmppath': ('_tmppath', self._lo_path, True, None, False), '--jobs': ('_jobs', self._lo_jobs, True, 'max', True), '--log': ('_logfile', self._lo_string, True, None, False), '--url': ('_url_base', self._lo_string, True, None, False), '--no-download': ('_disable_download', self._lo_bool, False, '0', True), '--macros': ('_macros', self._lo_string, True, None, False), '--source-only-download': ('_source_download', self._lo_bool, False, '0', True), '--targetcflags': ('_targetcflags', self._lo_string, True, None, False), '--targetcxxflags': ('_targetcxxflags', self._lo_string, True, None, False), '--libstdcxxflags': ('_libstdcxxflags', self._lo_string, True, None, False), '--force': ('_force', self._lo_bool, False, '0', True), '--quiet': ('_quiet', self._lo_bool, False, '0', True), '--trace': ('_trace', self._lo_bool, False, '0', True), '--dry-run': ('_dry_run', self._lo_bool, False, '0', True), '--warn-all': ('_warn_all', self._lo_bool, False, '0', True), '--no-clean': ('_no_clean', self._lo_bool, False, '0', True), '--keep-going': ('_keep_going', self._lo_bool, False, '0', True), '--always-clean': ('_always_clean', self._lo_bool, False, '0', True), '--no-install': ('_no_install', self._lo_bool, False, '0', True), '--regression': ('_regression', self._lo_bool, False, '0', True), '--host': ('_host', self._lo_triplets, True, None, False), '--build': ('_build', self._lo_triplets, True, None, False), '--target': ('_target', self._lo_triplets, True, None, False), '--rtems-tools': ('_rtems_tools', self._lo_string, True, None, False), '--rtems-bsp': ('_rtems_bsp', self._lo_string, True, None, False), '--rtems-version': ('_rtems_version', self._lo_string, True, None, False), '--help': (None, self._lo_help, False, None, False) } self.command_path = command_path self.command_name = path.basename(argv[0]) self.argv = argv self.args = argv[1:] self.optargs = optargs self.defaults = _defaults self.opts = {'params': []} for lo in self._long_opts: self.opts[lo[2:]] = self._long_opts[lo][3] if self._long_opts[lo][4]: self.defaults[self._long_opts[lo][0]] = ( 'none', 'none', self._long_opts[lo][3])
def usage(exc, msg): if sys.argv: progname = path.basename(sys.argv[0]) else: progname = 'mclock' # print progname + ':', if exc = string.atoi_error: print 'non-numeric argument:',
def _git_parser(source, config, opts): # # Symlink. # us = source["url"].split("?") source["path"] = path.dirname(us[0]) source["file"] = path.basename(us[0]) source["name"], source["ext"] = path.splitext(source["file"]) if len(us) > 1: source["args"] = us[1:] source["local"] = path.join(source["local_prefix"], "git", source["file"]) source["symlink"] = source["local"]
def cp(src_hdfs_path, dest_hdfs_path, **kwargs): """ Copy the contents of ``src_hdfs_path`` to ``dest_hdfs_path``. Additional keyword arguments, if any, are handled like in :func:`open`. If ``src_hdfs_path`` is a directory, its contents will be copied recursively. """ src, dest = {}, {} try: for d, p in ((src, src_hdfs_path), (dest, dest_hdfs_path)): d["host"], d["port"], d["path"] = path.split(p) d["fs"] = hdfs(d["host"], d["port"]) #--- does src exist? --- try: src["info"] = src["fs"].get_path_info(src["path"]) except IOError: raise IOError("no such file or directory: %r" % (src["path"])) #--- src exists. Does dest exist? --- try: dest["info"] = dest["fs"].get_path_info(dest["path"]) except IOError: if src["info"]["kind"] == "file": _cp_file(src["fs"], src["path"], dest["fs"], dest["path"], **kwargs) return else: dest["fs"].create_directory(dest["path"]) dest_hdfs_path = dest["fs"].get_path_info(dest["path"])["name"] for item in src["fs"].list_directory(src["path"]): cp(item["name"], dest_hdfs_path, **kwargs) return #--- dest exists. Is it a file? --- if dest["info"]["kind"] == "file": raise IOError("%r already exists" % (dest["path"])) #--- dest is a directory --- dest["path"] = path.join(dest["path"], path.basename(src["path"])) if dest["fs"].exists(dest["path"]): raise IOError("%r already exists" % (dest["path"])) if src["info"]["kind"] == "file": _cp_file(src["fs"], src["path"], dest["fs"], dest["path"], **kwargs) else: dest["fs"].create_directory(dest["path"]) dest_hdfs_path = dest["fs"].get_path_info(dest["path"])["name"] for item in src["fs"].list_directory(src["path"]): cp(item["name"], dest_hdfs_path, **kwargs) finally: for d in src, dest: try: d["fs"].close() except KeyError: pass
def _find(name, opts): ename = opts.defaults.expand(name) if ':' in ename: paths = path.dirname(ename).split(':') name = path.basename(name) else: paths = opts.defaults.get_value('_configdir').split(':') for p in paths: n = path.join(opts.defaults.expand(p), name) if path.exists(n): return n return None
def patch(self, macros): def err(msg): raise error.general('%s' % (msg)) _patches = {} for n in sources.get_patch_names(macros, err): if 'setup' in sources.get_patch_keys(n, macros, err): _patches[n] = \ [p for p in sources.get_patches(n, macros, err) if not p.startswith('%setup')] _patches[n] = [macros.expand(p.split()[-1]) for p in _patches[n]] patches = {} for n in _patches: patches[n] = [(p, sources.get_hash(path.basename(p).lower(), macros)) for p in _patches[n]] return patches
def _git_parser(source, config, opts): # # Symlink. # us = source['url'].split('?') source['path'] = path.dirname(us[0]) source['file'] = path.basename(us[0]) source['name'], source['ext'] = path.splitext(source['file']) if len(us) > 1: source['args'] = us[1:] source['local'] = \ path.join(source['local_prefix'], 'git', source['file']) source['symlink'] = source['local']
def source(self, macros): def err(msg): raise error.general('%s' % (msg)) _srcs = {} for p in sources.get_source_names(macros, err): if 'setup' in sources.get_source_keys(p, macros, err): _srcs[p] = \ [s for s in sources.get_sources(p, macros, err) if not s.startswith('%setup')] _srcs[p] = [macros.expand(s) for s in _srcs[p]] srcs = {} for p in _srcs: srcs[p] = [(s, sources.get_hash(path.basename(s).lower(), macros)) for s in _srcs[p]] return srcs
def treat_file(file): try: fp = open(file, 'r') except: print 'Cannot open', file return base = path.basename(file) if base[-3:] = '.py': base = base[:-3] s = base + '\t' + file + '\t' + '1\n' tags.append(s) while 1: line = fp.readline() if not line: break maketag(line, file)
def __init__(self, argv, optargs, _defaults, command_path): self._long_opts = { # key macro handler param defs init '--prefix' : ('_prefix', self._lo_path, True, None, False), '--topdir' : ('_topdir', self._lo_path, True, None, False), '--configdir' : ('_configdir', self._lo_path, True, None, False), '--builddir' : ('_builddir', self._lo_path, True, None, False), '--sourcedir' : ('_sourcedir', self._lo_path, True, None, False), '--patchdir' : ('_patchdir', self._lo_path, True, None, False), '--tmppath' : ('_tmppath', self._lo_path, True, None, False), '--jobs' : ('_jobs', self._lo_jobs, True, 'max', True), '--log' : ('_logfile', self._lo_string, True, None, False), '--url' : ('_url_base', self._lo_string, True, None, False), '--no-download' : ('_disable_download', self._lo_bool, False, '0', True), '--macros' : ('_macros', self._lo_string, True, None, False), '--source-only-download' : ('_source_download', self._lo_bool, False, '0', True), '--targetcflags' : ('_targetcflags', self._lo_string, True, None, False), '--targetcxxflags' : ('_targetcxxflags', self._lo_string, True, None, False), '--libstdcxxflags' : ('_libstdcxxflags', self._lo_string, True, None, False), '--force' : ('_force', self._lo_bool, False, '0', True), '--quiet' : ('_quiet', self._lo_bool, False, '0', True), '--trace' : ('_trace', self._lo_bool, False, '0', True), '--dry-run' : ('_dry_run', self._lo_bool, False, '0', True), '--warn-all' : ('_warn_all', self._lo_bool, False, '0', True), '--no-clean' : ('_no_clean', self._lo_bool, False, '0', True), '--keep-going' : ('_keep_going', self._lo_bool, False, '0', True), '--always-clean' : ('_always_clean', self._lo_bool, False, '0', True), '--no-install' : ('_no_install', self._lo_bool, False, '0', True), '--regression' : ('_regression', self._lo_bool, False, '0', True), '--host' : ('_host', self._lo_triplets, True, None, False), '--build' : ('_build', self._lo_triplets, True, None, False), '--target' : ('_target', self._lo_triplets, True, None, False), '--rtems-tools' : ('_rtems_tools', self._lo_string, True, None, False), '--rtems-bsp' : ('_rtems_bsp', self._lo_string, True, None, False), '--rtems-version' : ('_rtems_version', self._lo_string, True, None, False), '--help' : (None, self._lo_help, False, None, False) } self.command_path = command_path self.command_name = path.basename(argv[0]) self.argv = argv self.args = argv[1:] self.optargs = optargs self.defaults = _defaults self.opts = { 'params' : [] } for lo in self._long_opts: self.opts[lo[2:]] = self._long_opts[lo][3] if self._long_opts[lo][4]: self.defaults[self._long_opts[lo][0]] = ('none', 'none', self._long_opts[lo][3])
def tree_sources(self, name, tree, sources = []): if 'cfg' in tree: packages = {} if 'sources' in tree['cfg']: _merge(packages, tree['cfg']['sources']) if 'patches' in tree['cfg']: _merge(packages, tree['cfg']['patches']) for package in packages: for source in packages[package]: if not source[0].startswith('git') and not source[0].startswith('cvs'): sources += [(path.basename(source[0]), source[0], source[1])] if 'bset' in tree: for node in sorted(tree['bset'].keys()): self.tree_sources(_tree_name(node), tree['bset'][node], sources) return sources
def main(): sys.stdout = sys.stderr progname = path.basename(sys.argv[0]) args = sys.argv[1:] if not args: print 'usage:', progname, 'path ...' sys.exit(2) status = 0 for name in args: if not path.islink(name): print progname+':', name+':', 'not a symlink' status = 1 else: if path.isdir(name): mkrealdir(name) else: mkrealfile(name) sys.exit(status)
def main(): sys.stdout = sys.stderr progname = path.basename(sys.argv[0]) args = sys.argv[1:] if not args: print 'usage:', progname, 'path ...' sys.exit(2) status = 0 for name in args: if not path.islink(name): print progname + ':', name + ':', 'not a symlink' status = 1 else: if path.isdir(name): mkrealdir(name) else: mkrealfile(name) sys.exit(status)
def __init__(self, argv, argopts, defaults): command_path = path.dirname(path.abspath(argv[1])) if len(command_path) == 0: command_path = '.' self.command_path = command_path self.command_name = path.basename(argv[0]) extras = ['--dry-run', '--with-download', '--quiet', '--without-log', '--without-error-report', '--without-release-url'] self.argv = argv self.args = argv[1:] + extras self.defaults = macros.macros(name = defaults, sbdir = command_path) self.opts = { 'params' : extras } self.sb_git() if argopts.download_dir is not None: self.defaults['_sourcedir'] = ('dir', 'optional', path.abspath(argopts.download_dir)) self.defaults['_patchdir'] = ('dir', 'optional', path.abspath(argopts.download_dir))
def parse_url(url, pathkey, config, opts, file_override=None): # # Split the source up into the parts we need. # source = {} source['url'] = url source['options'] = [] colon = url.find(':') if url[colon + 1:colon + 3] != '//': raise error.general('malforned URL (no protocol prefix): %s' % (url)) source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:]) if file_override is None: source['file'] = path.basename(url) else: bad_chars = [c for c in ['/', '\\', '?', '*'] if c in file_override] if len(bad_chars) > 0: raise error.general('bad characters in file name: %s' % (file_override)) log.output('download: file-override: %s' % (file_override)) source['file'] = file_override source['options'] += ['file-override'] source['name'], source['ext'] = path.splitext(source['file']) if source['name'].endswith('.tar'): source['name'] = source['name'][:-4] source['ext'] = '.tar' + source['ext'] # # Get the file. Checks the local source directory first. # source['local'] = None for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, pathkey, config, opts): break source['script'] = '' return source
def parse_url(url, pathkey, config, opts): # # Split the source up into the parts we need. # source = {} source['url'] = url colon = url.find(':') if url[colon + 1:colon + 3] != '//': raise error.general('malforned URL: %s' % (url)) source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:]) source['file'] = path.basename(url) source['name'], source['ext'] = path.splitext(source['file']) if source['name'].endswith('.tar'): source['name'] = source['name'][:-4] source['ext'] = '.tar' + source['ext'] # # Get the file. Checks the local source directory first. # source['local'] = None for p in config.define(pathkey).split(':'): local = path.join(path.abspath(p), source['file']) if source['local'] is None: source['local_prefix'] = path.abspath(p) source['local'] = local if path.exists(local): source['local_prefix'] = path.abspath(p) source['local'] = local _hash_check(source['file'], local, config.macros) break source['script'] = '' for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, config, opts): break return source
def _name_line_msg(self, msg): return '%s:%d: %s' % (path.basename(self.name), self.lc, msg)
def load(self, name): def common_end(left, right): end = '' while len(left) and len(right): if left[-1] != right[-1]: return end end = left[-1] + end left = left[:-1] right = right[:-1] return end if self.load_depth == 0: self.in_error = False self.lc = 0 self.name = name self.conditionals = {} self.load_depth += 1 save_name = self.name save_lc = self.lc self.name = name self.lc = 0 # # Locate the config file. Expand any macros then add the # extension. Check if the file exists, therefore directly # referenced. If not see if the file contains ':' or the path # separator. If it does split the path else use the standard config dir # path in the defaults. # exname = self.expand(name) # # Macro could add an extension. # if exname.endswith('.cfg'): configname = exname else: configname = '%s.cfg' % (exname) name = '%s.cfg' % (name) if ':' in configname: cfgname = path.basename(configname) else: cfgname = common_end(configname, name) if not path.exists(configname): if ':' in configname: configdirs = path.dirname(configname).split(':') else: configdirs = self.define('_configdir').split(':') for cp in configdirs: configname = path.join(path.abspath(cp), cfgname) if path.exists(configname): break configname = None if configname is None: raise error.general('no config file found: %s' % (cfgname)) try: log.trace('config: %s: _open: %s' % (self.init_name, path.host(configname))) config = open(path.host(configname), 'r') except IOError as err: raise error.general('error opening config file: %s' % (path.host(configname))) self.configpath += [configname] self._includes += [configname] try: dir = None info = None data = [] while True: r = self._parse(config, dir, info) if r[0] == 'control': if r[1] == '%end': break log.warning("unexpected '%s'" % (r[1])) elif r[0] == 'directive': if r[1] == '%include': self.load(r[2][0]) continue dir, info, data = self._process_directive( r, dir, info, data) elif r[0] == 'data': dir, info, data = self._process_data(r, dir, info, data) else: self._error("%d: invalid parse state: '%s" % (self.lc, r[0])) if dir is not None: self._directive_extend(dir, data) except: config.close() raise config.close() self.name = save_name self.lc = save_lc self.load_depth -= 1
def _http_downloader(url, local, config, opts): if path.exists(local): return True # # Hack for GitHub. # if url.startswith('https://api.github.com'): url = urllib_parse.urljoin(url, config.expand('tarball/%{version}')) dst = os.path.relpath(path.host(local)) log.output('download: %s -> %s' % (url, dst)) log.notice('download: %s -> %s' % (_sensible_url(url, len(dst)), dst)) failed = False if _do_download(opts): _in = None _out = None _length = None _have = 0 _chunk_size = 256 * 1024 _chunk = None _last_percent = 200.0 _last_msg = '' _have_status_output = False _url = url try: try: _in = None _ssl_context = None # See #2656 _req = urllib_request.Request(_url) _req.add_header('User-Agent', 'Wget/1.16.3 (freebsd10.1)') try: import ssl _ssl_context = ssl._create_unverified_context() _in = urllib_request.urlopen(_req, context = _ssl_context) except: _ssl_context = None if _ssl_context is None: _in = urllib_request.urlopen(_req) if _url != _in.geturl(): _url = _in.geturl() log.output(' redirect: %s' % (_url)) log.notice(' redirect: %s' % (_sensible_url(_url))) _out = open(path.host(local), 'wb') try: _length = int(_in.info()['Content-Length'].strip()) except: pass while True: _msg = '\rdownloading: %s - %s ' % (dst, _humanize_bytes(_have)) if _length: _percent = round((float(_have) / _length) * 100, 2) if _percent != _last_percent: _msg += 'of %s (%0.0f%%) ' % (_humanize_bytes(_length), _percent) if _msg != _last_msg: extras = (len(_last_msg) - len(_msg)) log.stdout_raw('%s%s' % (_msg, ' ' * extras + '\b' * extras)) _last_msg = _msg _have_status_output = True _chunk = _in.read(_chunk_size) if not _chunk: break _out.write(_chunk) _have += len(_chunk) log.stdout_raw('\n\r') except: if _have_status_output: log.stdout_raw('\n\r') raise except IOError as err: log.notice('download: %s: error: %s' % (_sensible_url(_url), str(err))) if path.exists(local): os.remove(path.host(local)) failed = True except ValueError as err: log.notice('download: %s: error: %s' % (_sensible_url(_url), str(err))) if path.exists(local): os.remove(path.host(local)) failed = True except: msg = 'download: %s: error' % (_sensible_url(_url)) log.stderr(msg) log.notice(msg) if _in is not None: _in.close() if _out is not None: _out.close() raise if _out is not None: _out.close() if _in is not None: _in.close() del _in if not failed: if not path.isfile(local): raise error.general('source is not a file: %s' % (path.host(local))) if not _hash_check(path.basename(local), local, config.macros, False): raise error.general('checksum failure file: %s' % (dst)) return not failed
def load(self, name): def common_end(left, right): end = '' while len(left) and len(right): if left[-1] != right[-1]: return end end = left[-1] + end left = left[:-1] right = right[:-1] return end if self.load_depth == 0: self.in_error = False self.lc = 0 self.name = name self.conditionals = {} self._packages = {} self.package = 'main' self._packages[self.package] = package(self.package, self.define('%{_arch}'), self) self.load_depth += 1 save_name = self.name save_lc = self.lc # # Locate the config file. Expand any macros then add the # extension. Check if the file exists, therefore directly # referenced. If not see if the file contains ':' or the path # separator. If it does split the path else use the standard config dir # path in the defaults. # exname = self.expand(name) # # Macro could add an extension. # if exname.endswith('.cfg'): configname = exname else: configname = '%s.cfg' % (exname) name = '%s.cfg' % (name) if ':' in configname: cfgname = path.basename(configname) else: cfgname = common_end(configname, name) if not path.exists(configname): if ':' in configname: configdirs = path.dirname(configname).split(':') else: configdirs = self.define('_configdir').split(':') for cp in configdirs: configname = path.join(path.abspath(cp), cfgname) if path.exists(configname): break configname = None if configname is None: raise error.general('no config file found: %s' % (cfgname)) try: log.trace('config: %s: _open: %s' % (self.name, path.host(configname))) config = open(path.host(configname), 'r') except IOError, err: raise error.general('error opening config file: %s' % (path.host(configname)))
log.notice(msg) if _in is not None: _in.close() if _out is not None: _out.close() raise if _out is not None: _out.close() if _in is not None: _in.close() del _in if not failed: if not path.isfile(local): raise error.general('source is not a file: %s' % (path.host(local))) if not _hash_check(path.basename(local), local, config.macros, False): raise error.general('checksum failure file: %s' % (dst)) return not failed def _git_downloader(url, local, config, opts): repo = git.repo(local, opts, config.macros) rlp = os.path.relpath(path.host(local)) us = url.split('?') # # Handle the various git protocols. # # remove 'git' from 'git://xxxx/xxxx?protocol=...' # url_base = us[0][len('git'):]
def _tree_name(path_): return path.splitext(path.basename(path_))[0]
import os import path files = [ src + '\\' + file_ for src, dir_, files in os.walk(os.getcwd() + '\\music') for file_ in files if file_.endswith('.mp3') ] print(files, '\n') f = open('music\\music.txt', 'r', encoding='utf-8') txt = [line.rstrip('\n') for line in f] index = 0 for line in txt: name = path.splitext(path.basename(files[index]))[0] if name in line: os.rename(f'music\\{name}.mp3', f'music\\{line}.mp3') index += 1 files = [ src + '\\' + file_ for src, dir_, files in os.walk(os.getcwd() + '\\music') for file_ in files if file_.endswith('.mp3') ] print(files) f.close()
def result(self, id): global results_dir, uploads_dir, sett path = os.path.join(results_dir, id) params = {} params["run_id"] = id params["detailed"] = "Results not yet available" params["matrix"] = "Results not yet available" tmpl = lookup.get_template("master.mako") # Loads the progress file if it exists p = {"status": "", "curprog": 0, "progmax": 0} progress_path = os.path.join(path, ".prog") if os.path.exists(progress_path): with open(progress_path) as f: p = json.loads(f.read()) params["log"] = "###Run Settings###\n" sett_path = os.path.join(path, ".settings") organism = "" if os.path.exists(sett_path): with open(sett_path) as f: tmp = f.read() params["log"] = params["log"] + tmp organism = [ x.split("\t")[1] for x in tmp.split("\n") if x.split("\t")[0] == "Organism:" ][0] params["organism"] = organism params["log"] = params["log"] + "\n###Run Log###\n" debug_path = os.path.join(path, ".log") if os.path.exists(debug_path): with open(debug_path) as f: params["log"] = params["log"] + f.read() # loads results from results file detailed_path = os.path.join(path, "detailed.gr") if os.path.exists(detailed_path): with open(detailed_path) as f: params["detailed"] = f.read() foi_names_path = os.path.join(os.path.join(results_dir, id), ".fois") if os.path.exists(foi_names_path): with open(foi_names_path) as f: params["fois"] = [ basename(x).split(".")[0] for x in f.read().split("\n") if x != "" ] else: params["fois"] = "" params["zipfile"] = os.path.join("results", id, "GR_{}.tar.gz").format(id) params["run_annotation"] = True if os.path.exists( os.path.join(results_dir, id, "annotations")) else False params.update(p) try: rend_template = tmpl.render( body=lookup.get_template("results.mako").render(**params), script=lookup.get_template("results.js").render(**params)) print "LOADED TEMPLATE" except Exception, e: traceback = MakoTraceback() str_error = "" for (filename, lineno, function, line) in traceback.traceback: str_error += "File %s, line %s, in %s" % ( os.path.split(filename)[-1], lineno, function) str_error += "\n" str_error += line + "\n" str_error += "%s: %s" % (str( traceback.error.__class__.__name__), traceback.error) print str_error rend_template = str_error
def __init__(self, base_path=None, argv=None, optargs=None, defaults=None, long_opts=None, long_opts_help=None, command_path='', log_default=None): if argv is None: return global basepath if long_opts == None: long_opts = {} basepath = base_path if log_default is not None and type(log_default) is not list: raise error.general('log default is a list') self.log_default = log_default if defaults is None: defaults = macros.macros() self.long_opts = { # key macro handler param defs init '--jobs': ('_jobs', self._lo_jobs, True, 'default', True), '--log': ('_logfile', self._lo_string, True, None, False), '--macros': ('_macros', self._lo_string, True, None, False), '--force': ('_force', self._lo_bool, False, '0', True), '--quiet': ('_quiet', self._lo_bool, False, '0', True), '--trace': ('_trace', self._lo_bool, False, '0', True), '--dry-run': ('_dry_run', self._lo_bool, False, '0', True), '--warn-all': ('_warn_all', self._lo_bool, False, '0', True), '--no-clean': ('_no_clean', self._lo_bool, False, '0', True), '--keep-going': ('_keep_going', self._lo_bool, False, '0', True), '--always-clean': ('_always_clean', self._lo_bool, False, '0', True), '--no-install': ('_no_install', self._lo_bool, False, '0', True), '--help': (None, self._lo_help, False, None, False) } self.long_opts_help = { '--force': 'Force the build to proceed', '--quiet': 'Quiet output (not used)', '--trace': 'Trace the execution', '--dry-run': 'Do everything but actually run the build', '--warn-all': 'Generate warnings', '--no-clean': 'Do not clean up the build tree', '--always-clean': 'Always clean the build tree, even with an error', '--keep-going': 'Do not stop on an error.', '--jobs=[0..n,none,half,full]': 'Run with specified number of jobs, default: num CPUs.', '--macros file[,file]': 'Macro format files to load after the defaults', '--log file': 'Log file where all build output is written to', } self.opts = {'params': []} self.command_path = command_path self.command_name = path.basename(argv[0]) self.argv = argv self.args = argv[1:] self.optargs = optargs self.defaults = defaults for lo in self.long_opts: self.opts[lo[2:]] = self.long_opts[lo][3] if self.long_opts[lo][4]: self.defaults[self.long_opts[lo][0]] = ('none', 'none', self.long_opts[lo][3]) for lo in long_opts: if lo in self.long_opts: raise error.general('suplicate option: %s' % (lo)) self.opts[lo[2:]] = long_opts[lo][3] if long_opts[lo][4]: self.defaults[long_opts[lo][0]] = ('none', 'none', long_opts[lo][3]) if long_opts[lo][1] == 'int': handler = self._lo_int elif long_opts[lo][1] == 'string': handler = self._lo_string elif long_opts[lo][1] == 'path': handler = self._lo_path elif long_opts[lo][1] == 'jobs': handler = self._lo_jobs elif long_opts[lo][1] == 'bool': handler = self._lo_bool elif long_opts[lo][1] == 'triplet': handler = self._lo_triplets else: raise error.general('invalid option handler: %s: %s' % (lo, long_opts[lo][1])) self.long_opts[lo] = (long_opts[lo][0], handler, long_opts[lo][2], long_opts[lo][3], long_opts[lo][4]) if long_opts_help is not None: if lo not in long_opts_help: raise error.general('no help for option: %s' % (lo)) self.long_opts_help[lo] = long_opts_help[lo]
def load(self, name): def common_end(left, right): end = '' while len(left) and len(right): if left[-1] != right[-1]: return end end = left[-1] + end left = left[:-1] right = right[:-1] return end if self.load_depth == 0: self._reset(name) self._packages[self.package] = package(self.package, self.define('%{_arch}'), self) self.load_depth += 1 save_name = self.name save_lc = self.lc # # Locate the config file. Expand any macros then add the # extension. Check if the file exists, therefore directly # referenced. If not see if the file contains ':' or the path # separator. If it does split the path else use the standard config dir # path in the defaults. # exname = self.expand(name) # # Macro could add an extension. # if exname.endswith('.cfg'): configname = exname else: configname = '%s.cfg' % (exname) name = '%s.cfg' % (name) if ':' in configname: cfgname = path.basename(configname) else: cfgname = common_end(configname, name) if not path.exists(configname): if ':' in configname: configdirs = path.dirname(configname).split(':') else: configdirs = self.define('_configdir').split(':') for cp in configdirs: configname = path.join(path.abspath(cp), cfgname) if path.exists(configname): break configname = None if configname is None: raise error.general('no config file found: %s' % (cfgname)) try: log.trace('config: %s: _open: %s' % (self.name, path.host(configname))) config = open(path.host(configname), 'r') except IOError, err: raise error.general('error opening config file: %s' % (path.host(configname)))
def get_file(url, local, opts, config): if local is None: raise error.general('source/patch path invalid') if not path.isdir(path.dirname(local)) and not opts.download_disabled(): log.notice('Creating source directory: %s' % \ (os.path.relpath(path.host(path.dirname(local))))) log.output('making dir: %s' % (path.host(path.dirname(local)))) if _do_download(opts): path.mkdir(path.dirname(local)) if not path.exists(local) and opts.download_disabled(): raise error.general('source not found: %s' % (path.host(local))) # # Check if a URL has been provided on the command line. If the package is # released push to the start the RTEMS URL unless overrided by the command # line option --with-release-url. The variant --without-release-url can # override the released check. # url_bases = opts.urls() try: rtems_release_url_value = config.macros.expand('%{release_path}') except: rtems_release_url_value = None rtems_release_url = None if version.released() and rtems_release_url_value: rtems_release_url = rtems_release_url_value with_rel_url = opts.with_arg('release-url') if with_rel_url[1] == 'not-found': if config.defined('without_release_url'): with_rel_url = ('without_release-url', 'yes') if with_rel_url[0] == 'with_release-url': if with_rel_url[1] == 'yes': if rtems_release_url_value is None: raise error.general('no valid release URL') rtems_release_url = rtems_release_url_value elif with_rel_url[1] == 'no': pass else: rtems_release_url = with_rel_url[1] elif with_rel_url[0] == 'without_release-url' and with_rel_url[1] == 'yes': rtems_release_url = None if rtems_release_url is not None: log.trace('release url: %s' % (rtems_release_url)) # # If the URL being fetched is under the release path do not add the # sources release path because it is already there. # if not url.startswith(rtems_release_url): if url_bases is None: url_bases = [rtems_release_url] else: url_bases.append(rtems_release_url) urls = [] if url_bases is not None: # # Split up the URL we are being asked to download. # url_path = urllib_parse.urlsplit(url)[2] slash = url_path.rfind('/') if slash < 0: url_file = url_path else: url_file = url_path[slash + 1:] log.trace('url_file: %s' % (url_file)) for base in url_bases: # # Hack to fix #3064 where --rsb-file is being used. This code is a # mess and should be refactored. # if version.released() and base == rtems_release_url: url_file = path.basename(local) if base[-1:] != '/': base += '/' next_url = urllib_parse.urljoin(base, url_file) log.trace('url: %s' % (next_url)) urls.append(next_url) urls += url.split() log.trace('_url: %s -> %s' % (','.join(urls), local)) for url in urls: for dl in downloaders: if url.startswith(dl): if downloaders[dl](url, local, config, opts): return if _do_download(opts): raise error.general( 'downloading %s: all paths have failed, giving up' % (url))
def __init__(self, base_path = None, argv = None, optargs = None, defaults = None, long_opts = None, long_opts_help = None, command_path = '', log_default = None): if argv is None: return global basepath if long_opts == None: long_opts = {} basepath = base_path if log_default is not None and type(log_default) is not list: raise error.general('log default is a list') self.log_default = log_default if defaults is None: defaults = macros.macros() self.long_opts = { # key macro handler param defs init '--jobs' : ('_jobs', self._lo_jobs, True, 'default', True), '--log' : ('_logfile', self._lo_string, True, None, False), '--macros' : ('_macros', self._lo_string, True, None, False), '--force' : ('_force', self._lo_bool, False, '0', True), '--quiet' : ('_quiet', self._lo_bool, False, '0', True), '--trace' : ('_trace', self._lo_bool, False, '0', True), '--dry-run' : ('_dry_run', self._lo_bool, False, '0', True), '--warn-all' : ('_warn_all', self._lo_bool, False, '0', True), '--no-clean' : ('_no_clean', self._lo_bool, False, '0', True), '--keep-going' : ('_keep_going', self._lo_bool, False, '0', True), '--always-clean' : ('_always_clean', self._lo_bool, False, '0', True), '--no-install' : ('_no_install', self._lo_bool, False, '0', True), '--help' : (None, self._lo_help, False, None, False) } self.long_opts_help = { '--force': 'Force the build to proceed', '--quiet': 'Quiet output (not used)', '--trace': 'Trace the execution', '--dry-run': 'Do everything but actually run the build', '--warn-all': 'Generate warnings', '--no-clean': 'Do not clean up the build tree', '--always-clean': 'Always clean the build tree, even with an error', '--keep-going': 'Do not stop on an error.', '--jobs=[0..n,none,half,full]': 'Run with specified number of jobs, default: num CPUs.', '--macros file[,file]': 'Macro format files to load after the defaults', '--log file': 'Log file where all build output is written to', } self.opts = { 'params' : [] } self.command_path = command_path self.command_name = path.basename(argv[0]) self.argv = argv self.args = argv[1:] self.optargs = optargs self.defaults = defaults for lo in self.long_opts: self.opts[lo[2:]] = self.long_opts[lo][3] if self.long_opts[lo][4]: self.defaults[self.long_opts[lo][0]] = ('none', 'none', self.long_opts[lo][3]) for lo in long_opts: if lo in self.long_opts: raise error.general('suplicate option: %s' % (lo)) self.opts[lo[2:]] = long_opts[lo][3] if long_opts[lo][4]: self.defaults[long_opts[lo][0]] = ('none', 'none', long_opts[lo][3]) if long_opts[lo][1] == 'int': handler = self._lo_int elif long_opts[lo][1] == 'string': handler = self._lo_string elif long_opts[lo][1] == 'path': handler = self._lo_path elif long_opts[lo][1] == 'jobs': handler = self._lo_jobs elif long_opts[lo][1] == 'bool': handler = self._lo_bool elif long_opts[lo][1] == 'triplet': handler = self._lo_triplets else: raise error.general('invalid option handler: %s: %s' % (lo, long_opts[lo][1])) self.long_opts[lo] = (long_opts[lo][0], handler, long_opts[lo][2], long_opts[lo][3], long_opts[lo][4]) if long_opts_help is not None: if lo not in long_opts_help: raise error.general('no help for option: %s' % (lo)) self.long_opts_help[lo] = long_opts_help[lo]
def buildset_start(self, nest_level, name): if name.endswith('.cfg'): self.name = path.basename(name[:-4]) if self.name not in self.ini_pkg: self.ini_pkg[self.name] = {}
def interfaceBridge(ifname): brlink = os.path.join(_ifacePath(ifname), "brport/bridge") if not path.exists(brlink): return None return path.basename(path.readlink(brlink))
log.stderr(msg) log.notice(msg) if _in is not None: _in.close() if _out is not None: _out.close() raise if _out is not None: _out.close() if _in is not None: _in.close() del _in if not failed: if not path.isfile(local): raise error.general('source is not a file: %s' % (path.host(local))) if not _hash_check(path.basename(local), local, config.macros, False): raise error.general('checksum failure file: %s' % (dst)) return not failed def _git_downloader(url, local, config, opts): repo = git.repo(local, opts, config.macros) rlp = os.path.relpath(path.host(local)) us = url.split('?') # # Handle the various git protocols. # # remove 'git' from 'git://xxxx/xxxx?protocol=...' # url_base = us[0][len('git'):] for a in us[1:]: _as = a.split('=')
def get_file(url, local, opts, config): if local is None: raise error.general('source/patch path invalid') if not path.isdir(path.dirname(local)) and not opts.download_disabled(): log.notice('Creating source directory: %s' % \ (os.path.relpath(path.host(path.dirname(local))))) log.output('making dir: %s' % (path.host(path.dirname(local)))) if _do_download(opts): path.mkdir(path.dirname(local)) if not path.exists(local) and opts.download_disabled(): raise error.general('source not found: %s' % (path.host(local))) # # Check if a URL has been provided on the command line. If the package is # released push to the start the RTEMS URL unless overrided by the command # line option --with-release-url. The variant --without-release-url can # override the released check. # url_bases = opts.urls() try: rtems_release_url_value = config.macros.expand('%{release_path}') except: rtems_release_url_value = None rtems_release_url = None if version.released() and rtems_release_url_value: rtems_release_url = rtems_release_url_value with_rel_url = opts.with_arg('release-url') if with_rel_url[1] == 'not-found': if config.defined('without_release_url'): with_rel_url = ('without_release-url', 'yes') if with_rel_url[0] == 'with_release-url': if with_rel_url[1] == 'yes': if rtems_release_url_value is None: raise error.general('no valid release URL') rtems_release_url = rtems_release_url_value elif with_rel_url[1] == 'no': pass else: rtems_release_url = with_rel_url[1] elif with_rel_url[0] == 'without_release-url' and with_rel_url[1] == 'yes': rtems_release_url = None if rtems_release_url is not None: log.trace('release url: %s' % (rtems_release_url)) # # If the URL being fetched is under the release path do not add the # sources release path because it is already there. # if not url.startswith(rtems_release_url): if url_bases is None: url_bases = [rtems_release_url] else: url_bases.append(rtems_release_url) urls = [] if url_bases is not None: # # Split up the URL we are being asked to download. # url_path = urllib_parse.urlsplit(url)[2] slash = url_path.rfind('/') if slash < 0: url_file = url_path else: url_file = url_path[slash + 1:] log.trace('url_file: %s' %(url_file)) for base in url_bases: # # Hack to fix #3064 where --rsb-file is being used. This code is a # mess and should be refactored. # if version.released() and base == rtems_release_url: url_file = path.basename(local) if base[-1:] != '/': base += '/' next_url = urllib_parse.urljoin(base, url_file) log.trace('url: %s' %(next_url)) urls.append(next_url) urls += url.split() log.trace('_url: %s -> %s' % (','.join(urls), local)) for url in urls: for dl in downloaders: if url.startswith(dl): if downloaders[dl](url, local, config, opts): return if _do_download(opts): raise error.general('downloading %s: all paths have failed, giving up' % (url))
def load(self, name): def common_end(left, right): end = '' while len(left) and len(right): if left[-1] != right[-1]: return end end = left[-1] + end left = left[:-1] right = right[:-1] return end if self.load_depth == 0: self.in_error = False self.lc = 0 self.name = name self.conditionals = {} self.load_depth += 1 save_name = self.name save_lc = self.lc self.name = name self.lc = 0 # # Locate the config file. Expand any macros then add the # extension. Check if the file exists, therefore directly # referenced. If not see if the file contains ':' or the path # separator. If it does split the path else use the standard config dir # path in the defaults. # exname = self.expand(name) # # Macro could add an extension. # if exname.endswith('.cfg'): configname = exname else: configname = '%s.cfg' % (exname) name = '%s.cfg' % (name) if ':' in configname: cfgname = path.basename(configname) else: cfgname = common_end(configname, name) if not path.exists(configname): if ':' in configname: configdirs = path.dirname(configname).split(':') else: configdirs = self.define('_configdir').split(':') for cp in configdirs: configname = path.join(path.abspath(cp), cfgname) if path.exists(configname): break configname = None if configname is None: raise error.general('no config file found: %s' % (cfgname)) try: log.trace('config: %s: _open: %s' % (self.init_name, path.host(configname))) config = open(path.host(configname), 'r') except IOError as err: raise error.general('error opening config file: %s' % (path.host(configname))) self.configpath += [configname] self._includes += [configname] try: dir = None info = None data = [] while True: r = self._parse(config, dir, info) if r[0] == 'control': if r[1] == '%end': break log.warning("unexpected '%s'" % (r[1])) elif r[0] == 'directive': if r[1] == '%include': self.load(r[2][0]) continue dir, info, data = self._process_directive(r, dir, info, data) elif r[0] == 'data': dir, info, data = self._process_data(r, dir, info, data) else: self._error("%d: invalid parse state: '%s" % (self.lc, r[0])) if dir is not None: self._directive_extend(dir, data) except: config.close() raise config.close() self.name = save_name self.lc = save_lc self.load_depth -= 1