def __init__(self, topdir, makefile): self.topdir = topdir self.makefile = makefile self.preinstall = path.join(path.dirname(makefile), 'preinstall.am') self.command = command([path.join(topdir, 'ampolish3'), makefile], self.topdir) self.command.run()
def post_process(self, logfile=True): # Handle the log first. logctrl = self.parse_args('--without-log') if logctrl is None: if logfile: logfiles = self.logfiles() else: logfiles = None log.default = log.log(streams=logfiles) if self.trace(): log.tracing = True if self.quiet(): log.quiet = True # Must have a host if self.defaults['_host'] == self.defaults['nil']: raise error.general('--host not set') # Must have a host if self.defaults['_build'] == self.defaults['nil']: raise error.general('--build not set') # Default prefix prefix = self.parse_args('--prefix') if prefix is None: value = path.join(self.defaults['_prefix'], 'rtems', str(self.defaults['rtems_version'])) self.opts['prefix'] = value self.defaults['_prefix'] = value # Manage the regression option if self.opts['regression'] != '0': self.opts['no-install'] = '1' self.defaults['_no_install'] = '1' self.opts['keep-going'] = '1' self.defaults['_keep_going'] = '1' self.opts['always-clean'] = '1' self.defaults['_always_clean'] = '1' # Handle the jobs for make if '_ncpus' not in self.defaults: raise error.general('host number of CPUs not set') ncpus = self.jobs(self.defaults['_ncpus']) if ncpus > 1: self.defaults['_smp_mflags'] = '-j %d' % (ncpus) else: self.defaults['_smp_mflags'] = self.defaults['nil'] # Load user macro files um = self.user_macros() if um: checked = path.exists(um) if False in checked: raise error.general('macro file not found: %s' % \ (um[checked.index(False)])) for m in um: self.defaults.load(m) # Check if the user has a private set of macros to load if 'RSB_MACROS' in os.environ: if path.exists(os.environ['RSB_MACROS']): self.defaults.load(os.environ['RSB_MACROS']) if 'HOME' in os.environ: rsb_macros = path.join(os.environ['HOME'], '.rsb_macros') if path.exists(rsb_macros): self.defaults.load(rsb_macros)
def post_process(self, logfile = True): # Handle the log first. logctrl = self.parse_args('--without-log') if logctrl is None: if logfile: logfiles = self.logfiles() else: logfiles = None log.default = log.log(streams = logfiles) if self.trace(): log.tracing = True if self.quiet(): log.quiet = True # Must have a host if self.defaults['_host'] == self.defaults['nil']: raise error.general('--host not set') # Must have a host if self.defaults['_build'] == self.defaults['nil']: raise error.general('--build not set') # Default prefix prefix = self.parse_args('--prefix') if prefix is None: value = path.join(self.defaults['_prefix'], 'rtems', str(self.defaults['rtems_version'])) self.opts['prefix'] = value self.defaults['_prefix'] = value # Manage the regression option if self.opts['regression'] != '0': self.opts['no-install'] = '1' self.defaults['_no_install'] = '1' self.opts['keep-going'] = '1' self.defaults['_keep_going'] = '1' self.opts['always-clean'] = '1' self.defaults['_always_clean'] = '1' # Handle the jobs for make if '_ncpus' not in self.defaults: raise error.general('host number of CPUs not set') ncpus = self.jobs(self.defaults['_ncpus']) if ncpus > 1: self.defaults['_smp_mflags'] = '-j %d' % (ncpus) else: self.defaults['_smp_mflags'] = self.defaults['nil'] # Load user macro files um = self.user_macros() if um: checked = path.exists(um) if False in checked: raise error.general('macro file not found: %s' % (um[checked.index(False)])) for m in um: self.defaults.load(m) # Check if the user has a private set of macros to load if 'RSB_MACROS' in os.environ: if path.exists(os.environ['RSB_MACROS']): self.defaults.load(os.environ['RSB_MACROS']) if 'HOME' in os.environ: rsb_macros = path.join(os.environ['HOME'], '.rsb_macros') if path.exists(rsb_macros): self.defaults.load(rsb_macros)
def _load_released_version_config(): top = _top() for ver in [top, '..']: if path.exists(path.join(ver, 'VERSION')): import ConfigParser v = ConfigParser.SafeConfigParser() v.read(path.join(ver, 'VERSION')) return v return None
def report(self, _config, _build, opts, macros, format=None): if len(_build.main_package().name()) > 0 \ and not _build.macros.get('%{_disable_reporting}') \ and (not _build.opts.get_arg('--no-report') \ or _build.opts.get_arg('--mail')): if format is None: format = _build.opts.get_arg('--report-format') if format is not None: if len(format) != 2: raise error.general( 'invalid report format option: %s' % ('='.join(format))) format = format[1] if format is None: format = 'text' if format == 'text': ext = '.txt' elif format == 'asciidoc': ext = '.txt' elif format == 'html': ext = '.html' elif format == 'xml': ext = '.xml' elif format == 'ini': ext = '.ini' else: raise error.general('invalid report format: %s' % (format)) buildroot = _build.config.abspath('%{buildroot}') prefix = _build.macros.expand('%{_prefix}') name = _build.main_package().name() + ext log.notice('reporting: %s -> %s' % (_config, name)) if not _build.opts.get_arg('--no-report'): outpath = path.host( path.join(buildroot, prefix, 'share', 'rtems', 'rsb')) if not _build.opts.dry_run(): outname = path.host(path.join(outpath, name)) else: outname = None r = reports.report(format, self.configs, copy.copy(opts), copy.copy(macros)) r.introduction(_build.config.file_name()) r.generate(_build.config.file_name()) r.epilogue(_build.config.file_name()) if not _build.opts.dry_run(): _build.mkdir(outpath) r.write(outname) del r if _build.opts.get_arg('--mail'): r = reports.report('text', self.configs, copy.copy(opts), copy.copy(macros)) r.introduction(_build.config.file_name()) r.generate(_build.config.file_name()) r.epilogue(_build.config.file_name()) self.write_mail_report(r.out) del r
def report(self, _config, _build, opts, macros, format = None, mail = None): if len(_build.main_package().name()) > 0 \ and not _build.macros.get('%{_disable_reporting}') \ and (not _build.opts.get_arg('--no-report') \ or _build.opts.get_arg('--mail')): if format is None: format = _build.opts.get_arg('--report-format') if format is not None: if len(format) != 2: raise error.general('invalid report format option: %s' % \ ('='.join(format))) format = format[1] if format is None: format = 'text' if format == 'text': ext = '.txt' elif format == 'asciidoc': ext = '.txt' elif format == 'html': ext = '.html' elif format == 'xml': ext = '.xml' elif format == 'ini': ext = '.ini' else: raise error.general('invalid report format: %s' % (format)) buildroot = _build.config.abspath('%{buildroot}') prefix = _build.macros.expand('%{_prefix}') name = _build.main_package().name() + ext log.notice('reporting: %s -> %s' % (_config, name)) if not _build.opts.get_arg('--no-report'): outpath = path.host(path.join(buildroot, prefix, 'share', 'rtems', 'rsb')) if not _build.opts.dry_run(): outname = path.host(path.join(outpath, name)) else: outname = None r = reports.report(format, self.configs, copy.copy(opts), copy.copy(macros)) r.introduction(_build.config.file_name()) r.generate(_build.config.file_name()) r.epilogue(_build.config.file_name()) if not _build.opts.dry_run(): _build.mkdir(outpath) r.write(outname) del r if mail: r = reports.report('text', self.configs, copy.copy(opts), copy.copy(macros)) r.introduction(_build.config.file_name()) r.generate(_build.config.file_name()) r.epilogue(_build.config.file_name()) self.write_mail_report(r.get_output()) del r
def _cvs_parser(source, pathkey, config, opts): # # Check local path # _local_path(source, pathkey, config) # # Symlink. # if not source['url'].startswith('cvs://'): raise error.general('invalid cvs path: %s' % (source['url'])) us = source['url'].split('?') try: url = us[0] source['file'] = url[url[6:].index(':') + 7:] source['cvsroot'] = ':%s:' % (url[6:url[6:].index('/') + 6:]) except: raise error.general('invalid cvs path: %s' % (source['url'])) for a in us[1:]: _as = a.split('=') if _as[0] == 'module': if len(_as) != 2: raise error.general('invalid cvs module: %s' % (a)) source['module'] = _as[1] elif _as[0] == 'src-prefix': if len(_as) != 2: raise error.general('invalid cvs src-prefix: %s' % (a)) source['src_prefix'] = _as[1] elif _as[0] == 'tag': if len(_as) != 2: raise error.general('invalid cvs tag: %s' % (a)) source['tag'] = _as[1] elif _as[0] == 'date': if len(_as) != 2: raise error.general('invalid cvs date: %s' % (a)) source['date'] = _as[1] if 'date' in source and 'tag' in source: raise error.general('cvs URL cannot have a date and tag: %s' % (source['url'])) # Do here to ensure an ordered path, the URL can include options in any order if 'module' in source: source['file'] += '_%s' % (source['module']) if 'tag' in source: source['file'] += '_%s' % (source['tag']) if 'date' in source: source['file'] += '_%s' % (source['date']) for c in '/@#%.-': source['file'] = source['file'].replace(c, '_') source['local'] = path.join(source['local_prefix'], 'cvs', source['file']) if 'src_prefix' in source: source['symlink'] = path.join(source['local'], source['src_prefix']) else: source['symlink'] = source['local']
def report(self, _config, _build): if not _build.opts.get_arg('--no-report') \ and not _build.macros.get('%{_disable_reporting}') \ and _build.opts.get_arg('--mail'): format = _build.opts.get_arg('--report-format') if format is None: format = 'html' ext = '.html' else: if len(format) != 2: raise error.general('invalid report format option: %s' % ('='.join(format))) if format[1] == 'text': format = 'text' ext = '.txt' elif format[1] == 'asciidoc': format = 'asciidoc' ext = '.txt' elif format[1] == 'html': format = 'html' ext = '.html' else: raise error.general('invalid report format: %s' % (format[1])) buildroot = _build.config.abspath('%{buildroot}') prefix = _build.macros.expand('%{_prefix}') name = _build.main_package().name() + ext log.notice('reporting: %s -> %s' % (_config, name)) if not _build.opts.get_arg('--no-report'): outpath = path.host( path.join(buildroot, prefix, 'share', 'rtems-source-builder')) outname = path.host(path.join(outpath, name)) r = reports.report(format, self.configs, _build.opts, _build.macros) r.setup() r.introduction(_build.config.file_name()) r.config(_build.config, _build.opts, _build.macros) if not _build.opts.dry_run(): _build.mkdir(outpath) r.write(outname) del r if not _build.macros.get('%{_disable_reporting}') \ and _build.opts.get_arg('--mail'): r = reports.report('text', self.configs, _build.opts, _build.macros) r.setup() r.introduction(_build.config.file_name()) r.config(_build.config, _build.opts, _build.macros) self.write_mail_report(r.out) del r
def report(self, _config, _build, opts, macros, format=None): if ( len(_build.main_package().name()) > 0 and not _build.macros.get("%{_disable_reporting}") and (not _build.opts.get_arg("--no-report") or _build.opts.get_arg("--mail")) ): if format is None: format = _build.opts.get_arg("--report-format") if format is not None: if len(format) != 2: raise error.general("invalid report format option: %s" % ("=".join(format))) format = format[1] if format is None: format = "text" if format == "text": ext = ".txt" elif format == "asciidoc": ext = ".txt" elif format == "html": ext = ".html" elif format == "xml": ext = ".xml" elif format == "ini": ext = ".ini" else: raise error.general("invalid report format: %s" % (format)) buildroot = _build.config.abspath("%{buildroot}") prefix = _build.macros.expand("%{_prefix}") name = _build.main_package().name() + ext log.notice("reporting: %s -> %s" % (_config, name)) if not _build.opts.get_arg("--no-report"): outpath = path.host(path.join(buildroot, prefix, "share", "rtems", "rsb")) if not _build.opts.dry_run(): outname = path.host(path.join(outpath, name)) else: outname = None r = reports.report(format, self.configs, copy.copy(opts), copy.copy(macros)) r.introduction(_build.config.file_name()) r.generate(_build.config.file_name()) r.epilogue(_build.config.file_name()) if not _build.opts.dry_run(): _build.mkdir(outpath) r.write(outname) del r if _build.opts.get_arg("--mail"): r = reports.report("text", self.configs, copy.copy(opts), copy.copy(macros)) r.introduction(_build.config.file_name()) r.generate(_build.config.file_name()) r.epilogue(_build.config.file_name()) self.write_mail_report(r.out) del r
def make(self): package = self.main_package() if package.disabled(): log.notice('package: nothing to build') else: try: name = package.name() if self.canadian_cross(): cxc_label = '(Cxc) ' else: cxc_label = '' log.notice('package: %s%s' % (cxc_label, name)) log.trace('---- macro maps %s' % ('-' * 55)) log.trace('%s' % (str(self.config.macros))) log.trace('-' * 70) self.script_build.reset() self.script_build.append( self.config.expand('%{___build_template}')) self.script_build.append('echo "=> ' + name + ': BUILD"') self.prep(package) self.build_package(package) if not self.opts.dry_run(): self.builddir() build_sn = path.join(self.config.expand('%{_builddir}'), 'do-build') log.output('write script: ' + build_sn) self.script_build.write(build_sn) clean_sn = path.join(self.config.expand('%{_builddir}'), 'do-clean') log.output('write script: ' + clean_sn) self.script_clean.write(clean_sn) log.notice('building: %s%s' % (cxc_label, name)) self.run(build_sn) self.sizes(package) log.notice('cleaning: %s%s' % (cxc_label, name)) self.run(clean_sn) except error.general as gerr: log.notice(str(gerr)) log.stderr('Build FAILED') self._generate_report_('Build: %s' % (gerr)) raise except error.internal as ierr: log.notice(str(ierr)) log.stderr('Internal Build FAILED') self._generate_report_('Build: %s' % (ierr)) raise except: raise if self.opts.dry_run(): self._generate_report_('Build: dry run (no actual error)', 'Build: dry run (no actual error)')
def post_process(self): if self.command is not None: if self.command.exit_code != 0: raise error.general('error: autoreconf: %s' % (' '.join(self.command.cmd))) makefile = path.join(self.cwd, 'Makefile.am') if path.exists(makefile): if _grep(makefile, 'stamp-h\.in'): stamp_h = path.join(self.cwd, 'stamp-h.in') try: t = open(path.host(stamp_h), 'w') t.write('timestamp') t.close() except IOError, err: raise error.general('error writing: %s' % (stamp_h))
def _load_released_version_config(): top = _top() for ver in [top, '..']: if path.exists(path.join(ver, 'VERSION')): try: import configparser except ImportError: import ConfigParser as configparser v = configparser.SafeConfigParser() try: v.read(path.join(ver, 'VERSION')) except: raise error.general('Invalid VERSION file') return v return None
def _load_released_version(): global _released global _version_str at = _at() for ver in [at, path.join(at, '..')]: if path.exists(path.join(ver, 'VERSION')): try: import configparser except ImportError: import ConfigParser as configparser v = configparser.SafeConfigParser() v.read(path.join(ver, 'VERSION')) _version_str = v.get('version', 'release') _released = True return _released
def make(self): package = self.main_package() if package.disabled(): log.notice('package: nothing to build') else: try: name = package.name() if self.canadian_cross(): cxc_label = '(Cxc) ' else: cxc_label = '' log.notice('package: %s%s' % (cxc_label, name)) log.trace('---- macro maps %s' % ('-' * 55)) log.trace('%s' % (str(self.config.macros))) log.trace('-' * 70) self.script_build.reset() self.script_build.append(self.config.expand('%{___build_template}')) self.script_build.append('echo "=> ' + name + ': BUILD"') self.prep(package) self.build_package(package) if not self.opts.dry_run(): self.builddir() build_sn = path.join(self.config.expand('%{_builddir}'), 'do-build') log.output('write script: ' + build_sn) self.script_build.write(build_sn) clean_sn = path.join(self.config.expand('%{_builddir}'), 'do-clean') log.output('write script: ' + clean_sn) self.script_clean.write(clean_sn) log.notice('building: %s%s' % (cxc_label, name)) self.run(build_sn) self.sizes(package) log.notice('cleaning: %s%s' % (cxc_label, name)) self.run(clean_sn) except error.general as gerr: log.notice(str(gerr)) log.stderr('Build FAILED') self._generate_report_('Build: %s' % (gerr)) raise except error.internal as ierr: log.notice(str(ierr)) log.stderr('Internal Build FAILED') self._generate_report_('Build: %s' % (ierr)) raise except: raise if self.opts.dry_run(): self._generate_report_('Build: dry run (no actual error)', 'Build: dry run (no actual error)')
def from_address(self): def _clean(l): if '#' in l: l = l[:l.index('#')] if '\r' in l: l = l[:l.index('r')] if '\n' in l: l = l[:l.index('\n')] return l.strip() addr = self.opts.get_arg('--mail-from') if addr is not None: return addr[1] mailrc = None if 'MAILRC' in os.environ: mailrc = os.environ['MAILRC'] if mailrc is None and 'HOME' in os.environ: mailrc = path.join(os.environ['HOME'], '.mailrc') if mailrc is not None and path.exists(mailrc): # set from="Joe Blow <*****@*****.**>" try: mrc = open(mailrc, 'r') lines = mrc.readlines() mrc.close() except IOError, err: raise error.general('error reading: %s' % (mailrc)) for l in lines: l = _clean(l) if 'from' in l: fa = l[l.index('from') + len('from'):] if '=' in fa: addr = fa[fa.index('=') + 1:].replace('"', ' ').strip() if addr is not None: return addr
def parse_url(url, pathkey, config, opts): # # Split the source up into the parts we need. # source = {} source['url'] = url source['path'] = path.dirname(url) source['file'] = path.basename(url) source['name'], source['ext'] = path.splitext(source['file']) # # Get the file. Checks the local source directory first. # source['local'] = None for p in config.define(pathkey).split(':'): local = path.join(path.abspath(p), source['file']) if source['local'] is None: source['local_prefix'] = path.abspath(p) source['local'] = local if path.exists(local): source['local_prefix'] = path.abspath(p) source['local'] = local break source['script'] = '' for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, config, opts): break return source
def pre_process_categories(): global out_path global raw_path with open(os.path.join(out_path, "test_features.csv"), "w", newline=''): pass with open(os.path.join(out_path, "test_labels.csv"), "w", newline=''): pass pd.DataFrame(device_categories).to_csv("device_categories.csv", "w", header=False, index=False) for name in [ 'train_features', 'train_labels', 'valid_labels', 'valid_features' ]: for dc in device_categories: with open(os.path.join(out_path + f"{name}_{dc}.csv"), "w", newline=''): pass for partition_suffix in files_name: if partition_suffix.__contains__("test"): filename = path.join(raw_path, partition_suffix) chunk_size = 10**7 check_num = 1 for chunk in pd.read_csv(filename, chunksize=chunk_size): print(check_num) check_num = check_num + 1 chunk = chunk.drop(columns=['start', 'start_original']) chunk = chunk.loc[:, ~chunk.columns.duplicated()] pre_process_categories_file(chunk, partition_suffix) return None
def parse_url(url, pathkey, config, opts): # # Split the source up into the parts we need. # source = {} source["url"] = url colon = url.find(":") if url[colon + 1 : colon + 3] != "//": raise error.general("malforned URL: %s" % (url)) source["path"] = url[: colon + 3] + path.dirname(url[colon + 3 :]) source["file"] = path.basename(url) source["name"], source["ext"] = path.splitext(source["file"]) if source["name"].endswith(".tar"): source["name"] = source["name"][:-4] source["ext"] = ".tar" + source["ext"] # # Get the file. Checks the local source directory first. # source["local"] = None for p in config.define(pathkey).split(":"): local = path.join(path.abspath(p), source["file"]) if source["local"] is None: source["local_prefix"] = path.abspath(p) source["local"] = local if path.exists(local): source["local_prefix"] = path.abspath(p) source["local"] = local break source["script"] = "" for p in parsers: if url.startswith(p): source["type"] = p if parsers[p](source, config, opts): break return source
def openlistwindow(dirname): list = posix.listdir(dirname) list.sort() i = 0 while i < len(list): if list[i] == '.' or list[i] == '..': del list[i] else: i = i+1 for i in range(len(list)): name = list[i] if path.isdir(path.join(dirname, name)): list[i] = list[i] + '/' width = maxwidth(list) width = width + stdwin.textwidth(' ') # XXX X11 stdwin bug workaround height = len(list) * stdwin.lineheight() stdwin.setdefwinsize(width, min(height, 500)) w = stdwin.open(dirname) stdwin.setdefwinsize(0, 0) w.setdocsize(width, height) w.drawproc = drawlistwindow w.mouse = mouselistwindow w.close = closelistwindow w.dirname = dirname w.list = list w.selected = -1 return w
def _lo_triplets(self, opt, macro, value): # # This is a target triplet. Run it past config.sub to make make sure it # is ok. The target triplet is 'cpu-vendor-os'. # e = execute.capture_execution() config_sub = path.join(self.command_path, basepath, 'config.sub') exit_code, proc, output = e.shell(config_sub + ' ' + value) if exit_code == 0: value = output self.defaults[macro] = ('triplet', 'none', value) self.opts[opt[2:]] = value _cpu = macro + '_cpu' _arch = macro + '_arch' _vendor = macro + '_vendor' _os = macro + '_os' _arch_value = '' _vendor_value = '' _os_value = '' dash = value.find('-') if dash >= 0: _arch_value = value[:dash] value = value[dash + 1:] dash = value.find('-') if dash >= 0: _vendor_value = value[:dash] value = value[dash + 1:] if len(value): _os_value = value self.defaults[_cpu] = _arch_value self.defaults[_arch] = _arch_value self.defaults[_vendor] = _vendor_value self.defaults[_os] = _os_value
def make(self): package = self.main_package() if package.disabled(): log.notice('package: nothing to build') else: name = package.name() if self.canadian_cross(): log.notice('package: (Cxc) %s' % (name)) else: log.notice('package: %s' % (name)) log.trace('---- macro maps %s' % ('-' * 55)) log.trace('%s' % (str(self.config.macros))) log.trace('-' * 70) self.script.reset() self.script.append(self.config.expand('%{___build_template}')) self.script.append('echo "=> ' + name + ':"') self.prep(package) self.build_package(package) if not self.opts.dry_run(): self.builddir() sn = path.join(self.config.expand('%{_builddir}'), 'doit') log.output('write script: ' + sn) self.script.write(sn) if self.canadian_cross(): log.notice('building: (Cxc) %s' % (name)) else: log.notice('building: %s' % (name)) self.run(sn)
def parse_url(url, pathkey, config, opts): # # Split the source up into the parts we need. # source = {} source['url'] = url colon = url.find(':') if url[colon + 1:colon + 3] != '//': raise error.general('malforned URL: %s' % (url)) source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:]) source['file'] = path.basename(url) source['name'], source['ext'] = path.splitext(source['file']) if source['name'].endswith('.tar'): source['name'] = source['name'][:-4] source['ext'] = '.tar' + source['ext'] # # Get the file. Checks the local source directory first. # source['local'] = None for p in config.define(pathkey).split(':'): local = path.join(path.abspath(p), source['file']) if source['local'] is None: source['local_prefix'] = path.abspath(p) source['local'] = local if path.exists(local): source['local_prefix'] = path.abspath(p) source['local'] = local break source['script'] = '' for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, config, opts): break return source
def _collect(path_, file): confs = [] for root, dirs, files in os.walk(path.host(path_), topdown=True): for f in files: if f == file: confs += [path.shell(path.join(root, f))] return confs
def parse(self, bset): def _clean(line): line = line[0:-1] b = line.find('#') if b >= 0: line = line[1:b] return line.strip() bsetname = bset if not path.exists(bsetname): for cp in self.macros.expand('%{_configdir}').split(':'): configdir = path.abspath(cp) bsetname = path.join(configdir, bset) if path.exists(bsetname): break bsetname = None if bsetname is None: raise error.general('no build set file found: %s' % (bset)) try: log.trace('_bset: %s: open: %s' % (self.bset, bsetname)) bset = open(path.host(bsetname), 'r') except IOError, err: raise error.general('error opening bset file: %s' % (bsetname))
def _execvpe(file, args, env=None): if env is not None: func = execve argrest = (args, env) else: func = execv argrest = (args,) env = environ head, tail = path.split(file) if head: func(file, *argrest) return if 'PATH' in env: envpath = env['PATH'] else: envpath = defpath PATH = envpath.split(pathsep) saved_exc = None saved_tb = None for dir in PATH: fullname = path.join(dir, file) try: func(fullname, *argrest) except error, e: tb = sys.exc_info()[2] if (e.errno != errno.ENOENT and e.errno != errno.ENOTDIR and saved_exc is None): saved_exc = e saved_tb = tb
def post_process(self): # Handle the log first. log.default = log.log(self.logfiles()) if self.trace(): log.tracing = True if self.quiet(): log.quiet = True # Handle the jobs for make if '_ncpus' not in self.defaults: raise error.general('host number of CPUs not set') ncpus = self.jobs(self.defaults['_ncpus']) if ncpus > 1: self.defaults['_smp_mflags'] = '-j %d' % (ncpus) else: self.defaults['_smp_mflags'] = self.defaults['nil'] # Load user macro files um = self.user_macros() if um: checked = path.exists(um) if False in checked: raise error.general('macro file not found: %s' % (um[checked.index(False)])) for m in um: self.defaults.load(m) # Check if the user has a private set of macros to load if 'RSB_MACROS' in os.environ: if path.exists(os.environ['RSB_MACROS']): self.defaults.load(os.environ['RSB_MACROS']) if 'HOME' in os.environ: rsb_macros = path.join(os.environ['HOME'], '.rsb_macros') if path.exists(rsb_macros): self.defaults.load(rsb_macros)
def _collect(path_, file): confs = [] for root, dirs, files in os.walk(path.host(path_), topdown = True): for f in files: if f == file: confs += [path.shell(path.join(root, f))] return confs
def _cvs_parser(source, config, opts): # # Symlink. # if not source["url"].startswith("cvs://"): raise error.general("invalid cvs path: %s" % (source["url"])) us = source["url"].split("?") try: url = us[0] source["file"] = url[url[6:].index(":") + 7 :] source["cvsroot"] = ":%s:" % (url[6 : url[6:].index("/") + 6 :]) except: raise error.general("invalid cvs path: %s" % (source["url"])) for a in us[1:]: _as = a.split("=") if _as[0] == "module": if len(_as) != 2: raise error.general("invalid cvs module: %s" % (a)) source["module"] = _as[1] elif _as[0] == "src-prefix": if len(_as) != 2: raise error.general("invalid cvs src-prefix: %s" % (a)) source["src_prefix"] = _as[1] elif _as[0] == "tag": if len(_as) != 2: raise error.general("invalid cvs tag: %s" % (a)) source["tag"] = _as[1] elif _as[0] == "date": if len(_as) != 2: raise error.general("invalid cvs date: %s" % (a)) source["date"] = _as[1] if "date" in source and "tag" in source: raise error.general("cvs URL cannot have a date and tag: %s" % (source["url"])) # Do here to ensure an ordered path, the URL can include options in any order if "module" in source: source["file"] += "_%s" % (source["module"]) if "tag" in source: source["file"] += "_%s" % (source["tag"]) if "date" in source: source["file"] += "_%s" % (source["date"]) for c in "/@#%.-": source["file"] = source["file"].replace(c, "_") source["local"] = path.join(source["local_prefix"], "cvs", source["file"]) if "src_prefix" in source: source["symlink"] = path.join(source["local"], source["src_prefix"]) else: source["symlink"] = source["local"]
def report(self, _config, _build): if not _build.opts.get_arg('--no-report') \ and not _build.macros.get('%{_disable_reporting}') \ and _build.opts.get_arg('--mail'): format = _build.opts.get_arg('--report-format') if format is None: format = 'html' ext = '.html' else: if len(format) != 2: raise error.general('invalid report format option: %s' % ('='.join(format))) if format[1] == 'text': format = 'text' ext = '.txt' elif format[1] == 'asciidoc': format = 'asciidoc' ext = '.txt' elif format[1] == 'html': format = 'html' ext = '.html' else: raise error.general('invalid report format: %s' % (format[1])) buildroot = _build.config.abspath('%{buildroot}') prefix = _build.macros.expand('%{_prefix}') name = _build.main_package().name() + ext log.notice('reporting: %s -> %s' % (_config, name)) if not _build.opts.get_arg('--no-report'): outpath = path.host(path.join(buildroot, prefix, 'share', 'rtems-source-builder')) outname = path.host(path.join(outpath, name)) r = reports.report(format, self.configs, _build.opts, _build.macros) r.setup() r.introduction(_build.config.file_name()) r.config(_build.config, _build.opts, _build.macros) if not _build.opts.dry_run(): _build.mkdir(outpath) r.write(outname) del r if not _build.macros.get('%{_disable_reporting}') \ and _build.opts.get_arg('--mail'): r = reports.report('text', self.configs, _build.opts, _build.macros) r.setup() r.introduction(_build.config.file_name()) r.config(_build.config, _build.opts, _build.macros) self.write_mail_report(r.out) del r
def _check_paths(name, paths): for p in paths: exe = path.join(p, name) if path.isfile(exe): return True if os.name == 'nt': if path.isfile('%s.exe' % (exe)): return True return False
def _scan(_path, ext): configs = [] for root, dirs, files in os.walk(_path): prefix = root[len(_path) + 1:] for file in files: for e in ext: if file.endswith(e): configs += [path.join(prefix, file)] return configs
def _load_released_version_config(): '''Local worker to load a configuration file.''' top = _top() for ver in [path.join(top, 'VERSION'), path.join('..', 'VERSION')]: if path.exists(path.join(ver)): try: import configparser except ImportError: import ConfigParser as configparser v = configparser.SafeConfigParser() try: v.read(path.host(ver)) except Exception as e: raise error.general('Invalid version config format: %s: %s' % (ver, e)) return ver, v return None, None
def _local_path(source, pathkey, config): for p in config.define(pathkey).split(':'): local = path.join(path.abspath(p), source['file']) if source['local'] is None: source['local_prefix'] = path.abspath(p) source['local'] = local if path.exists(local): source['local_prefix'] = path.abspath(p) source['local'] = local _hash_check(source['file'], local, config.macros) break
def read_file(filename): # 获取文件的绝对路径 filepath = path.join(path.dirname(__file__), filename) # 判断文件是否存在 assert_msg(path.exists(filepath), 'file not exist') # 读取CSV文件并返回 return pd.read_csv(filepath, index_col=0, parse_dates=True, infer_datetime_format=True)
def _find(name, opts): ename = opts.defaults.expand(name) if ':' in ename: paths = path.dirname(ename).split(':') name = path.basename(name) else: paths = opts.defaults.get_value('_configdir').split(':') for p in paths: n = path.join(opts.defaults.expand(p), name) if path.exists(n): return n return None
def bset_tar(self, _build): tardir = _build.config.expand('%{_tardir}') if (self.opts.get_arg('--bset-tar-file') or self.opts.canadian_cross()) \ and not _build.macros.get('%{_disable_packaging}'): path.mkdir(tardir) tar = path.join(tardir, _build.config.expand('%s.tar.bz2' % (_build.main_package().name()))) log.notice('tarball: %s' % (os.path.relpath(path.host(tar)))) if not self.opts.dry_run(): tmproot = _build.config.expand('%{_tmproot}') cmd = _build.config.expand('"cd ' + tmproot + \ ' && %{__tar} -cf - . | %{__bzip2} > ' + tar + '"') _build.run(cmd, shell_opts = '-c', cwd = tmproot)
def bset_tar(self, _build): tardir = _build.config.expand('%{_tardir}') if self.opts.get_arg('--bset-tar-file') \ and not _build.macros.get('%{_disable_packaging}'): path.mkdir(tardir) tar = path.join(tardir, _build.config.expand('%s.tar.bz2' % (self.bset_pkg))) log.notice('tarball: %s' % (os.path.relpath(path.host(tar)))) if not self.opts.dry_run(): tmproot = _build.config.expand('%{_tmproot}') cmd = _build.config.expand("'cd " + tmproot + \ " && %{__tar} -cf - . | %{__bzip2} > " + tar + "'") _build.run(cmd, shell_opts = '-c', cwd = tmproot)
def cp(src_hdfs_path, dest_hdfs_path, **kwargs): """ Copy the contents of ``src_hdfs_path`` to ``dest_hdfs_path``. Additional keyword arguments, if any, are handled like in :func:`open`. If ``src_hdfs_path`` is a directory, its contents will be copied recursively. """ src, dest = {}, {} try: for d, p in ((src, src_hdfs_path), (dest, dest_hdfs_path)): d["host"], d["port"], d["path"] = path.split(p) d["fs"] = hdfs(d["host"], d["port"]) #--- does src exist? --- try: src["info"] = src["fs"].get_path_info(src["path"]) except IOError: raise IOError("no such file or directory: %r" % (src["path"])) #--- src exists. Does dest exist? --- try: dest["info"] = dest["fs"].get_path_info(dest["path"]) except IOError: if src["info"]["kind"] == "file": _cp_file(src["fs"], src["path"], dest["fs"], dest["path"], **kwargs) return else: dest["fs"].create_directory(dest["path"]) dest_hdfs_path = dest["fs"].get_path_info(dest["path"])["name"] for item in src["fs"].list_directory(src["path"]): cp(item["name"], dest_hdfs_path, **kwargs) return #--- dest exists. Is it a file? --- if dest["info"]["kind"] == "file": raise IOError("%r already exists" % (dest["path"])) #--- dest is a directory --- dest["path"] = path.join(dest["path"], path.basename(src["path"])) if dest["fs"].exists(dest["path"]): raise IOError("%r already exists" % (dest["path"])) if src["info"]["kind"] == "file": _cp_file(src["fs"], src["path"], dest["fs"], dest["path"], **kwargs) else: dest["fs"].create_directory(dest["path"]) dest_hdfs_path = dest["fs"].get_path_info(dest["path"])["name"] for item in src["fs"].list_directory(src["path"]): cp(item["name"], dest_hdfs_path, **kwargs) finally: for d in src, dest: try: d["fs"].close() except KeyError: pass
def bset_tar(self, _build): tardir = _build.config.expand("%{_tardir}") if (self.opts.get_arg("--bset-tar-file") or self.opts.canadian_cross()) and not _build.macros.get( "%{_disable_packaging}" ): path.mkdir(tardir) tar = path.join(tardir, _build.config.expand("%s.tar.bz2" % (self.bset_pkg))) log.notice("tarball: %s" % (os.path.relpath(path.host(tar)))) if not self.opts.dry_run(): tmproot = _build.config.expand("%{_tmproot}") cmd = _build.config.expand("'cd " + tmproot + " && %{__tar} -cf - . | %{__bzip2} > " + tar + "'") _build.run(cmd, shell_opts="-c", cwd=tmproot)
def _git_parser(source, config, opts): # # Symlink. # us = source["url"].split("?") source["path"] = path.dirname(us[0]) source["file"] = path.basename(us[0]) source["name"], source["ext"] = path.splitext(source["file"]) if len(us) > 1: source["args"] = us[1:] source["local"] = path.join(source["local_prefix"], "git", source["file"]) source["symlink"] = source["local"]
def _run(self, args, check = False, cwd = None): e = execute.capture_execution() if cwd is None: cwd = path.join(self.path, self.prefix) if not path.exists(cwd): raise error.general('cvs path needs to exist: %s' % (cwd)) cmd = [self.cvs, '-z', '9', '-q'] + args log.output('cmd: (%s) %s' % (str(cwd), ' '.join(cmd))) exit_code, proc, output = e.spawn(cmd, cwd = path.host(cwd)) log.trace(output) if check: self._cvs_exit_code(cmd, exit_code, output) return exit_code, output
def _git_parser(source, config, opts): # # Symlink. # us = source['url'].split('?') source['path'] = path.dirname(us[0]) source['file'] = path.basename(us[0]) source['name'], source['ext'] = path.splitext(source['file']) if len(us) > 1: source['args'] = us[1:] source['local'] = \ path.join(source['local_prefix'], 'git', source['file']) source['symlink'] = source['local']
def bset_tar(self, _build): tardir = _build.config.expand('%{_tardir}') if self.opts.get_arg('--bset-tar-file') \ and not _build.macros.get('%{_disable_packaging}'): path.mkdir(tardir) tar = path.join( tardir, _build.config.expand('%s.tar.bz2' % (self.bset_pkg))) log.notice('tarball: %s' % (os.path.relpath(path.host(tar)))) if not self.opts.dry_run(): tmproot = _build.config.expand('%{_tmproot}') cmd = _build.config.expand("'cd " + tmproot + \ " && %{__tar} -cf - . | %{__bzip2} > " + tar + "'") _build.run(cmd, shell_opts='-c', cwd=tmproot)