def make(self): package = self.main_package() if package.disabled(): log.notice('package: nothing to build') else: name = package.name() if self.canadian_cross(): log.notice('package: (Cxc) %s' % (name)) else: log.notice('package: %s' % (name)) log.trace('---- macro maps %s' % ('-' * 55)) log.trace('%s' % (str(self.config.macros))) log.trace('-' * 70) self.script.reset() self.script.append(self.config.expand('%{___build_template}')) self.script.append('echo "=> ' + name + ':"') self.prep(package) self.build_package(package) if not self.opts.dry_run(): self.builddir() sn = path.join(self.config.expand('%{_builddir}'), 'doit') log.output('write script: ' + sn) self.script.write(sn) if self.canadian_cross(): log.notice('building: (Cxc) %s' % (name)) else: log.notice('building: %s' % (name)) self.run(sn)
def run(self, command, shell_opts = '', cwd = None): e = execute.capture_execution(log = log.default, dump = self.opts.quiet()) cmd = self.config.expand('%{___build_shell} -ex ' + shell_opts + ' ' + command) log.output('run: ' + cmd) exit_code, proc, output = e.shell(cmd, cwd = path.host(cwd)) if exit_code != 0: raise error.general('shell cmd failed: %s' % (cmd))
def _process_data(self, results, directive, info, data): log.trace('config: %s: %3d: _process_data: result=#%r# directive=#%s# info=#%r# data=#%r#' % \ (self.name, self.lc, results, directive, info, data)) new_data = [] for l in results[1]: if l.startswith('%error'): l = self._expand(l) raise error.general('config error: %s' % (l[7:])) elif l.startswith('%log'): l = self._expand(l) log.output(l[4:]) elif l.startswith('%warning'): l = self._expand(l) log.warning(l[9:]) if not directive: l = self._expand(l) ls = self.tags.split(l, 1) log.trace('config: %s: %3d: _tag: %s %s' % (self.name, self.lc, l, ls)) if len(ls) > 1: info = ls[0].lower() if info[-1] == ':': info = info[:-1] info_data = ls[1].strip() else: info_data = ls[0].strip() if info is not None: self._info_append(info, info_data) else: log.warning("invalid format: '%s'" % (info_data[:-1])) else: l = self._expand(l) log.trace('config: %s: %3d: _data: %s %s' % (self.name, self.lc, l, new_data)) new_data.append(l) return (directive, info, data + new_data)
def __init__ (self): print "------ MainFileClassA __init__" error( ' -- MainFileClassA __init__: Error logging\n' ) warn( ' -- MainFileClassA __init__: warn loggging\n' ) debug( ' -- MainFileClassA __init__: debug logging\n' ) info( ' -- MainFileClassA __init__: info logging\n' ) output( ' -- MainFileClassA __init__: output logging\n' )
def userFunction (self): print "------ MainFileClassA: userFunction" error( ' -- MainFileClass userFunction: Error logging\n' ) warn( ' -- MainFileClass userFunction: warn loggging\n' ) debug( ' -- MainFileClass userFunction: debug logging\n' ) info( ' -- MainFileClass userFunction: info logging\n' ) output( ' -- MainFileClass userFunction: output logging\n' )
def userFunction (self): print "++++++ Helper2FileClass: userFunction" error( ' ++ Helper2FileClass userFunction: Error logging\n' ) warn( ' ++ Helper2FileClass userFunction: warn loggging\n' ) debug( ' ++ Helper2FileClass userFunction: debug logging\n' ) info( ' ++ Helper2FileClass userFunction: info logging\n' ) output( ' ++ Helper2FileClas userFunction: output logging\n' )
def parse_url(url, pathkey, config, opts, file_override = None): # # Split the source up into the parts we need. # source = {} source['url'] = url source['options'] = [] colon = url.find(':') if url[colon + 1:colon + 3] != '//': raise error.general('malforned URL (no protocol prefix): %s' % (url)) source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:]) if file_override is None: source['file'] = path.basename(url) else: bad_chars = [c for c in ['/', '\\', '?', '*'] if c in file_override] if len(bad_chars) > 0: raise error.general('bad characters in file name: %s' % (file_override)) log.output('download: file-override: %s' % (file_override)) source['file'] = file_override source['options'] += ['file-override'] source['name'], source['ext'] = path.splitext(source['file']) if source['name'].endswith('.tar'): source['name'] = source['name'][:-4] source['ext'] = '.tar' + source['ext'] # # Get the file. Checks the local source directory first. # source['local'] = None for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, pathkey, config, opts): break source['script'] = '' return source
def get_file(url, local, opts, config): if local is None: raise error.general("source/patch path invalid") if not path.isdir(path.dirname(local)) and not opts.download_disabled(): log.notice("Creating source directory: %s" % (os.path.relpath(path.host(path.dirname(local))))) log.output("making dir: %s" % (path.host(path.dirname(local)))) if not opts.dry_run(): path.mkdir(path.dirname(local)) if not path.exists(local) and opts.download_disabled(): raise error.general("source not found: %s" % (path.host(local))) # # Check if a URL has been provided on the command line. # url_bases = opts.urls() urls = [] if url_bases is not None: for base in url_bases: if base[-1:] != "/": base += "/" url_path = urlparse.urlsplit(url)[2] slash = url_path.rfind("/") if slash < 0: url_file = url_path else: url_file = url_path[slash + 1 :] urls.append(urlparse.urljoin(base, url_file)) urls += url.split() log.trace("_url: %s -> %s" % (",".join(urls), local)) for url in urls: for dl in downloaders: if url.startswith(dl): if downloaders[dl](url, local, config, opts): return if not opts.dry_run(): raise error.general("downloading %s: all paths have failed, giving up" % (url))
def __init__ (self): print "++++++ Helper2FileClass __init__" error( ' ++ Helper2FileClass __init__: Error logging\n' ) warn( ' ++ Helper2FileClass __init__: warn loggging\n' ) debug( ' ++ Helper2FileClass __init__: debug logging\n' ) info( ' ++ HelperFileClass __init__: info logging\n' ) output( ' ++ Helper2FileClass __init__: output logging\n' )
def runCpuLimitTest( self, cpu, duration=5 ): """run CPU limit test with 'while true' processes. cpu: desired CPU fraction of each host duration: test duration in seconds returns a single list of measured CPU fractions as floats. """ pct = cpu * 100 info('*** Testing CPU %.0f%% bandwidth limit\n' % pct) hosts = self.hosts for h in hosts: h.cmd( 'while true; do a=1; done &' ) pids = [h.cmd( 'echo $!' ).strip() for h in hosts] pids_str = ",".join(["%s" % pid for pid in pids]) cmd = 'ps -p %s -o pid,%%cpu,args' % pids_str # It's a shame that this is what pylint prefers outputs = [] for _ in range( duration ): sleep( 1 ) outputs.append( quietRun( cmd ).strip() ) for h in hosts: h.cmd( 'kill %1' ) cpu_fractions = [] for test_output in outputs: # Split by line. Ignore first line, which looks like this: # PID %CPU COMMAND\n for line in test_output.split('\n')[1:]: r = r'\d+\s*(\d+\.\d+)' m = re.search( r, line ) if m is None: error( '*** Error: could not extract CPU fraction: %s\n' % line ) return None cpu_fractions.append( float( m.group( 1 ) ) ) output( '*** Results: %s\n' % cpu_fractions ) return cpu_fractions
def __init__ (self): print "***** HelperFileClass __init__" error( ' ** HelperFileClass __init__: Error logging\n' ) warn( ' ** HelperFileClass __init__: warn loggging\n' ) debug( ' ** HelperFileClass __init__: debug logging\n' ) info( ' ** HelperFileClass __init__: info logging\n' ) output( ' ** MainFileClassA __init__: output logging\n' ) obj_help = Helper2FileClass()
def run(self, command, shell_opts="", cwd=None): e = execute.capture_execution(log=log.default, dump=self.opts.quiet()) cmd = self.config.expand("%{___build_shell} -ex " + shell_opts + " " + command) log.output("run: " + cmd) exit_code, proc, output = e.shell(cmd, cwd=path.host(cwd)) if exit_code != 0: log.output("shell cmd failed: %s" % (cmd)) raise error.general("building %s" % (self.macros["buildname"]))
def source_setup(self, package, args): log.output('source setup: %s: %s' % (package.name(), ' '.join(args))) setup_name = args[1] args = args[1:] try: opts, args = getopt.getopt(args[1:], 'qDcn:ba') except getopt.GetoptError, ge: raise error.general('source setup error: %s' % str(ge))
def _error(self, msg): err = 'error: %s' % (self._name_line_msg(msg)) log.stderr(err) log.output(err) self.in_error = True if not self.opts.dry_run(): log.stderr('warning: switched to dry run due to errors') self.opts.set_dry_run()
def run(self, command, shell_opts='', cwd=None): e = execute.capture_execution(log=log.default, dump=self.opts.quiet()) cmd = self.config.expand('%{___build_shell} -ex ' + shell_opts + ' ' + command) log.output('run: ' + cmd) exit_code, proc, output = e.shell(cmd, cwd=path.host(cwd)) if exit_code != 0: log.output('shell cmd failed: %s' % (cmd)) raise error.general('building %s' % (self.macros['buildname']))
def source_setup(self, package, args): log.output('source setup: %s: %s' % (package.name(), ' '.join(args))) setup_name = args[1] args = args[1:] try: opts, args = getopt.getopt(args[1:], 'qDcn:ba') except getopt.GetoptError as ge: raise error.general('source setup error: %s' % str(ge)) quiet = False unpack_before_chdir = True delete_before_unpack = True create_dir = False deleted_dir = False created_dir = False changed_dir = False opt_name = None for o in opts: if o[0] == '-q': quiet = True elif o[0] == '-D': delete_before_unpack = False elif o[0] == '-c': create_dir = True elif o[0] == '-n': opt_name = o[1] elif o[0] == '-b': unpack_before_chdir = True elif o[0] == '-a': unpack_before_chdir = False name = None for source in self.source(setup_name): if name is None: if opt_name is None: if source: opt_name = source['name'] else: raise error.general('setup source tag not found: %d' % (source_tag)) else: name = opt_name self.script_build.append(self.config.expand('cd %{_builddir}')) if not deleted_dir and delete_before_unpack: self.script_build.append( self.config.expand('%{__rm} -rf ' + name)) deleted_dir = True if not created_dir and create_dir: self.script_build.append( self.config.expand('%{__mkdir_p} ' + name)) created_dir = True if not changed_dir and (not unpack_before_chdir or create_dir): self.script_build.append(self.config.expand('cd ' + name)) changed_dir = True self.script_build.append(self.config.expand(source['script'])) if not changed_dir and (unpack_before_chdir and not create_dir): self.script_build.append(self.config.expand('cd ' + name)) changed_dir = True self.script_build.append(self.config.expand('%{__setup_post}'))
def _hash_check(file_, absfile, macros, remove=True): failed = False hash = sources.get_hash(file_.lower(), macros) if hash is not None: hash = hash.split() if len(hash) != 2: raise error.internal('invalid hash format: %s' % (file_)) try: hashlib_algorithms = hashlib.algorithms except: hashlib_algorithms = [ 'md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512' ] if hash[0] not in hashlib_algorithms: raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0])) hasher = None _in = None try: hasher = hashlib.new(hash[0]) _in = open(path.host(absfile), 'rb') hasher.update(_in.read()) except IOError as err: log.notice('hash: %s: read error: %s' % (file_, str(err))) failed = True except: msg = 'hash: %s: error' % (file_) log.stderr(msg) log.notice(msg) if _in is not None: _in.close() raise if _in is not None: _in.close() log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1])) if hasher.hexdigest() != hash[1]: log.warning('checksum error: %s' % (file_)) failed = True if failed and remove: log.warning('removing: %s' % (file_)) if path.exists(absfile): try: os.remove(path.host(absfile)) except IOError as err: raise error.general('hash: %s: remove: %s' % (absfile, str(err))) except: raise error.general('hash: %s: remove error' % (file_)) if hasher is not None: del hasher else: if version.released(): raise error.general('%s: no hash found in released RSB' % (file_)) log.warning('%s: no hash found' % (file_)) return not failed
def _output_line(line, exe, prefix, out, count): #exe.lock.acquire() #exe.outputting = True #exe.lock.release() if out: out(prefix + line) else: log.output(prefix + line) if count > 10: log.flush()
def append(self, text): if type(text) is str: text = text.splitlines() if not log.quiet: i = 0 for l in text: i += 1 log.output('script:%3d: %s' % (self.lc + i, l)) self.lc += len(text) self.body.extend(text)
async def on_message_edit(before, after): if before.author == bot.user: return if before.content == after.content: return msg = '{0} edit the following message: \nBefore: {1}\n After: {2}'.format( before.author.name, before.content, after.content) modlog = find(lambda c: c.name == "modlog", before.server.channels) await bot.send_message(modlog, msg) log.output(msg)
def _error(self, msg): if not self.opts.dry_run(): if self.opts.keep_going(): err = 'error: %s' % (self._name_line_msg(msg)) log.stderr(err) log.output(err) self.in_error = True log.stderr('warning: switched to dry run due to errors') self.opts.set_dry_run() raise error.general(self._name_line_msg(msg))
def get_file(url, local, opts, config): if local is None: raise error.general('source/patch path invalid') if not path.isdir(path.dirname(local)) and not opts.download_disabled(): log.notice('Creating source directory: %s' % \ (os.path.relpath(path.host(path.dirname(local))))) log.output('making dir: %s' % (path.host(path.dirname(local)))) if _do_download(opts): path.mkdir(path.dirname(local)) if not path.exists(local) and opts.download_disabled(): raise error.general('source not found: %s' % (path.host(local))) # # Check if a URL has been provided on the command line. If the package is # release push to the start the RTEMS URL. # url_bases = opts.urls() if version.released(): rtems_release_url = config.macros.expand('%{rtems_release_url}/%{rsb_version}/sources') log.trace('release url: %s' % (rtems_release_url)) # # If the URL being fetched is under the release path do not add the # sources release path because it is already there. # if not url.startswith(rtems_release_url): if url_bases is None: url_bases = [rtems_release_url] else: url_bases.append(rtems_release_url) urls = [] if url_bases is not None: # # Split up the URL we are being asked to download. # url_path = urlparse.urlsplit(url)[2] slash = url_path.rfind('/') if slash < 0: url_file = url_path else: url_file = url_path[slash + 1:] log.trace('url_file: %s' %(url_file)) for base in url_bases: if base[-1:] != '/': base += '/' next_url = urlparse.urljoin(base, url_file) log.trace('url: %s' %(next_url)) urls.append(next_url) urls += url.split() log.trace('_url: %s -> %s' % (','.join(urls), local)) for url in urls: for dl in downloaders: if url.startswith(dl): if downloaders[dl](url, local, config, opts): return if _do_download(opts): raise error.general('downloading %s: all paths have failed, giving up' % (url))
def jaccard_preprocess(self, threshold): # track which two pairs have been tested test_edge_list = set([]) test_vertices_list = set([]) queue = deque([]) for x in self.graph.vertices: queue.append(x) break while len(queue) != 0: cur = queue.popleft() if cur in test_vertices_list: continue test_vertices_list.add(cur) neighbors = self.graph.vertices_matrix[cur] for node in neighbors: if (node, cur) in test_edge_list or (cur, node) in test_edge_list: continue test_edge_list.add((cur, node)) if node not in test_vertices_list: queue.append(node) score = dis.jaccard(self.graph.vertices_matrix[cur], self.graph.vertices_matrix[node]) if score < threshold: self.graph.vertices_matrix[cur].remove(node) self.graph.vertices_matrix[node].remove(cur) if (cur, node) in self.graph.edges: self.graph.edges.remove((cur, node)) else: self.graph.edges.remove((node, cur)) second_neighbors = self.graph.vertices_matrix[node] for second_node in second_neighbors: if (cur, second_node) in test_edge_list or ( second_node, cur) in test_edge_list or ( cur, second_node) in self.graph.edges or ( second_node, cur) in self.graph.edges: continue test_edge_list.add((cur, second_node)) second_score = dis.jaccard( self.graph.vertices_matrix[cur], self.graph.vertices_matrix[second_node]) if second_score > threshold: self.graph.vertices_matrix[cur].append(second_node) self.graph.vertices_matrix[second_node].append(cur) self.graph.edges.add((cur, second_node)) for vertex in self.graph.vertices: if len(self.graph.vertices_matrix[vertex]) < 1: self.graph.vertice.remove(vertex) output('after jaccard', len(self.graph.edges))
def source_setup(self, package, args): log.output('source setup: %s: %s' % (package.name(), ' '.join(args))) setup_name = args[1] args = args[1:] try: opts, args = getopt.getopt(args[1:], 'qDcn:ba') except getopt.GetoptError as ge: raise error.general('source setup error: %s' % str(ge)) quiet = False unpack_before_chdir = True delete_before_unpack = True create_dir = False deleted_dir = False created_dir = False changed_dir = False opt_name = None for o in opts: if o[0] == '-q': quiet = True elif o[0] == '-D': delete_before_unpack = False elif o[0] == '-c': create_dir = True elif o[0] == '-n': opt_name = o[1] elif o[0] == '-b': unpack_before_chdir = True elif o[0] == '-a': unpack_before_chdir = False name = None for source in self.source(setup_name): if name is None: if opt_name is None: if source: opt_name = source['name'] else: raise error.general('setup source tag not found: %d' % (source_tag)) else: name = opt_name self.script.append(self.config.expand('cd %{_builddir}')) if not deleted_dir and delete_before_unpack: self.script.append(self.config.expand('%{__rm} -rf ' + name)) deleted_dir = True if not created_dir and create_dir: self.script.append(self.config.expand('%{__mkdir_p} ' + name)) created_dir = True if not changed_dir and (not unpack_before_chdir or create_dir): self.script.append(self.config.expand('cd ' + name)) changed_dir = True self.script.append(self.config.expand(source['script'])) if not changed_dir and (unpack_before_chdir and not create_dir): self.script.append(self.config.expand('cd ' + name)) changed_dir = True self.script.append(self.config.expand('%{__setup_post}'))
def _output_line(line, exe, prefix, out, count): #print 'LINE:%d: %s' % (count, line) exe.lock.acquire() #exe.outputting = True exe.lock.release() if out: out(prefix + line) else: log.output(prefix + line) if count > 10: log.flush()
def _hash_check(file_, absfile, macros, remove = True): failed = False hash = sources.get_hash(file_.lower(), macros) if hash is not None: hash = hash.split() if len(hash) != 2: raise error.internal('invalid hash format: %s' % (file_)) try: hashlib_algorithms = hashlib.algorithms except: hashlib_algorithms = ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'] if hash[0] not in hashlib_algorithms: raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0])) if hash[0] in ['md5', 'sha1']: raise error.general('hash: %s: insecure: %s' % (file_, hash[0])) hasher = None _in = None try: hasher = hashlib.new(hash[0]) _in = open(path.host(absfile), 'rb') hasher.update(_in.read()) except IOError as err: log.notice('hash: %s: read error: %s' % (file_, str(err))) failed = True except: msg = 'hash: %s: error' % (file_) log.stderr(msg) log.notice(msg) if _in is not None: _in.close() raise if _in is not None: _in.close() log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1])) if hasher.hexdigest() != hash[1]: log.warning('checksum error: %s' % (file_)) failed = True if failed and remove: log.warning('removing: %s' % (file_)) if path.exists(absfile): try: os.remove(path.host(absfile)) except IOError as err: raise error.general('hash: %s: remove: %s' % (absfile, str(err))) except: raise error.general('hash: %s: remove error' % (file_)) if hasher is not None: del hasher else: if version.released(): raise error.general('%s: no hash found in released RSB' % (file_)) log.warning('%s: no hash found' % (file_)) return not failed
def _run(self, args, check = False, cwd = None): e = execute.capture_execution() if cwd is None: cwd = path.join(self.path, self.prefix) if not path.exists(cwd): raise error.general('cvs path needs to exist: %s' % (cwd)) cmd = [self.cvs, '-z', '9', '-q'] + args log.output('cmd: (%s) %s' % (str(cwd), ' '.join(cmd))) exit_code, proc, output = e.spawn(cmd, cwd = path.host(cwd)) log.trace(output) if check: self._cvs_exit_code(cmd, exit_code, output) return exit_code, output
def make(self): package = self.main_package() if package.disabled(): log.notice('package: nothing to build') else: try: name = package.name() if self.canadian_cross(): cxc_label = '(Cxc) ' else: cxc_label = '' log.notice('package: %s%s' % (cxc_label, name)) log.trace('---- macro maps %s' % ('-' * 55)) log.trace('%s' % (str(self.config.macros))) log.trace('-' * 70) self.script_build.reset() self.script_build.append( self.config.expand('%{___build_template}')) self.script_build.append('echo "=> ' + name + ': BUILD"') self.prep(package) self.build_package(package) if not self.opts.dry_run(): self.builddir() build_sn = path.join(self.config.expand('%{_builddir}'), 'do-build') log.output('write script: ' + build_sn) self.script_build.write(build_sn) clean_sn = path.join(self.config.expand('%{_builddir}'), 'do-clean') log.output('write script: ' + clean_sn) self.script_clean.write(clean_sn) log.notice('building: %s%s' % (cxc_label, name)) self.run(build_sn) self.sizes(package) log.notice('cleaning: %s%s' % (cxc_label, name)) self.run(clean_sn) except error.general as gerr: log.notice(str(gerr)) log.stderr('Build FAILED') self._generate_report_('Build: %s' % (gerr)) raise except error.internal as ierr: log.notice(str(ierr)) log.stderr('Internal Build FAILED') self._generate_report_('Build: %s' % (ierr)) raise except: raise if self.opts.dry_run(): self._generate_report_('Build: dry run (no actual error)', 'Build: dry run (no actual error)')
def process(label, args, macros, error): if label != 'source' and label != 'patch': error('invalid source type: %s' % (label)) args = _args(args) log.output('sources: %s' % (' '.join(args))) if len(args) < 3: error('%%%s requires at least 3 arguments: %s' % (label, ' '.join(args))) if args[0] == 'set': return set(label, args[1:], macros, error) elif args[0] == 'add': return add(label, args[1:], macros, error) elif args[0] == 'setup': return setup(label, args[1:], macros, error) error('invalid %%%s command: %s' % (label, args[0]))
def process(label, args, macros, error): if label != "source" and label != "patch": error("invalid source type: %s" % (label)) args = _args(args) log.output("sources: %s" % (" ".join(args))) if len(args) < 3: error("%%%s requires at least 3 arguments: %s" % (label, " ".join(args))) if args[0] == "set": return set(label, args[1:], macros, error) elif args[0] == "add": return add(label, args[1:], macros, error) elif args[0] == "setup": return setup(label, args[1:], macros, error) error("invalid %%%s command: %s" % (label, args[0]))
def make(self): package = self.main_package() if package.disabled(): log.notice('package: nothing to build') else: try: name = package.name() if self.canadian_cross(): cxc_label = '(Cxc) ' else: cxc_label = '' log.notice('package: %s%s' % (cxc_label, name)) log.trace('---- macro maps %s' % ('-' * 55)) log.trace('%s' % (str(self.config.macros))) log.trace('-' * 70) self.script_build.reset() self.script_build.append(self.config.expand('%{___build_template}')) self.script_build.append('echo "=> ' + name + ': BUILD"') self.prep(package) self.build_package(package) if not self.opts.dry_run(): self.builddir() build_sn = path.join(self.config.expand('%{_builddir}'), 'do-build') log.output('write script: ' + build_sn) self.script_build.write(build_sn) clean_sn = path.join(self.config.expand('%{_builddir}'), 'do-clean') log.output('write script: ' + clean_sn) self.script_clean.write(clean_sn) log.notice('building: %s%s' % (cxc_label, name)) self.run(build_sn) self.sizes(package) log.notice('cleaning: %s%s' % (cxc_label, name)) self.run(clean_sn) except error.general as gerr: log.notice(str(gerr)) log.stderr('Build FAILED') self._generate_report_('Build: %s' % (gerr)) raise except error.internal as ierr: log.notice(str(ierr)) log.stderr('Internal Build FAILED') self._generate_report_('Build: %s' % (ierr)) raise except: raise if self.opts.dry_run(): self._generate_report_('Build: dry run (no actual error)', 'Build: dry run (no actual error)')
def _readthread(fh, out, prefix = ''): """Read from a file handle and write to the output handler until the file closes.""" count = 0 while True: line = fh.readline() count += 1 if len(line) == 0: break if out: out(prefix + line) else: log.output(prefix + line) if count > 10: log.flush() count = 0
def connectDevice(self, guiObject): i, portPath = self.scanSerialPorts(guiObject) #i = True #portPath = '/dev/tty.SLAB_USBtoUART' if i == True: log.output("Connecting") guiObject.writeLogText("Connecting...\n") self.serialPort = serial.Serial(portPath, baudrate=self.baudRate, timeout=1) guiObject.writeLogText("Connected to " + self.serialPort.name + "\n") log.output("Connected to: " + self.serialPort.name) return True, self.serialPort.name else: return False, ""
def append(self, text): is_str = False if type(text) is str: is_str = True try: if type(text) is unicode: is_str = True except: pass if is_str: text = text.splitlines() if not log.quiet: i = 0 for l in text: i += 1 log.output('script:%3d: %s' % (self.lc + i, l)) self.lc += len(text) self.body.extend(text)
def cmdSetByPID(self, mode=SET): pid = self.entPID.get() pidValue = int(self.entPIDValue.get(), base=10) if mode == self.INC: pidValue = pidValue + 1 self.entPIDValue.delete(0, END) self.entPIDValue.insert(0, pidValue) if mode == self.DEC: pidValue = pidValue - 1 self.entPIDValue.delete(0, END) self.entPIDValue.insert(0, pidValue) self.device.setPIDValue(self, pid, pidValue) ###pid = self.entPID.get() ###pidValue = self.entPIDValue.get() log.output("PID = " + pid) log.output("PID Value= " + str(pidValue))
async def on_ready(): log.output('Logged in as') log.output("Username " + bot.user.name) log.output("ID: " + bot.user.id) if not discord.opus.is_loaded() and os.name == 'nt': discord.opus.load_opus("opus.dll") if not discord.opus.is_loaded() and os.name == 'posix': discord.opus.load_opus("/usr/local/lib/libopus.so") log.output("Loaded Opus Library")
def _readthread(fh, out, prefix = ''): """Read from a file handle and write to the output handler until the file closes.""" count = 0 while True: line = fh.readline() # str and bytes are the same type in Python2 if type(line) is not str and type(line) is bytes: line = line.decode(sys.stdout.encoding) count += 1 if len(line) == 0: break if out: out(prefix + line) else: log.output(prefix + line) if count > 10: log.flush() count = 0
def paramControl(self, index, label, callback, callbackDecParameter, callbackIncParameter, row, column): log.output("Label = " + label) self.label = label self.callback = callback # Parameter Label self.lbl[index] = ttk.Label(self.frmParameters, style="Bold.TLabel") self.lbl[index]["text"] = self.label self.lbl[index].grid(sticky="W", row=row, column=column) # Parameter Decrement Button self.btnDec[index] = ttk.Button(self.frmParameters, style="Bold1.TButton") self.btnDec[index]["text"] = "-" self.btnDec[index]["command"] = callbackDecParameter self.btnDec[index].grid(sticky="E", row=row, column=column + 2) # Parameter Entry self.ent[index] = ttk.Entry(self.frmParameters, validate="focusout", validatecommand=callback) self.ent[index].grid(sticky="NSWE", row=row, column=column + 3, columnspan=2) # Parameter Increment Button self.btnInc[index] = ttk.Button(self.frmParameters, style="Bold1.TButton") self.btnInc[index]["text"] = "+" self.btnInc[index]["command"] = callbackIncParameter self.btnInc[index].grid(sticky="W", row=row, column=column + 5) # Parameter Setting Apply Button self.btnSet[index] = ttk.Button(self.frmParameters) self.btnSet[index]["text"] = "Apply" self.btnSet[index]["command"] = callback self.btnSet[index].grid(row=row, column=column + 6, sticky=N + S + E + W)
def scanSerialPorts(self, guiObject): log.output("Scanning serial ports") portlist = serial.tools.list_ports iterator = serial.tools.list_ports.grep(self.portIdString) #for i in portlist: # log.output (i.ListPortInfo) devicesFound = 0 for c, (port, desc, hwid) in enumerate(iterator): devicesFound = devicesFound + 1 portPath = format(port) ''' port = '/dev/tty.SLAB_USBtoUART' portPath = format(port) devicesFound = 1 ''' if devicesFound == 1: log.output("Found OBD-II device {}".format(portPath)) return True, portPath else: log.output("{} OBD-II devices found".format(devicesFound)) return False, ""
def open(self, command, capture = True, shell = False, cwd = None, env = None, stdin = None, stdout = None, stderr = None, timeout = None): """Open a command with arguments. Provide the arguments as a list or a string.""" if self.output is None: raise error.general('capture needs an output handler') cs = command if type(command) is list: def add(x, y): return x + ' ' + str(y) cs = functools.reduce(add, command, '')[1:] what = 'spawn' if shell: what = 'shell' cs = what + ': ' + cs if self.verbose: log.output(what + ': ' + cs) log.trace('exe: %s' % (cs)) if shell and self.shell_exe: command = arg_list(command) command[:0] = self.shell_exe if not stdin and self.input: stdin = subprocess.PIPE if not stdout: stdout = subprocess.PIPE if not stderr: stderr = subprocess.PIPE proc = None if cwd is None: cwd = self.path if env is None: env = self.environment try: # Work around a problem on Windows with commands that # have a '.' and no extension. Windows needs the full # command name. if sys.platform == "win32" and type(command) is list: if command[0].find('.') >= 0: r, e = os.path.splitext(command[0]) if e not in ['.exe', '.com', '.bat']: command[0] = command[0] + '.exe' log.trace('exe: %s' % (command)) proc = subprocess.Popen(command, shell = shell, cwd = cwd, env = env, stdin = stdin, stdout = stdout, stderr = stderr, close_fds = False) if not capture: return (0, proc) if self.output is None: raise error.general('capture needs an output handler') exit_code = self.capture(proc, command, timeout) if self.verbose: log.output('exit: ' + str(exit_code)) except OSError as ose: exit_code = ose.errno if self.verbose: log.output('exit: ' + str(ose)) return (exit_code, proc)
def make(self): package = self.main_package() if package.disabled(): log.notice('package: nothing to build') else: try: name = package.name() if self.canadian_cross(): log.notice('package: (Cxc) %s' % (name)) else: log.notice('package: %s' % (name)) log.trace('---- macro maps %s' % ('-' * 55)) log.trace('%s' % (str(self.config.macros))) log.trace('-' * 70) self.script.reset() self.script.append(self.config.expand('%{___build_template}')) self.script.append('echo "=> ' + name + ':"') self.prep(package) self.build_package(package) if not self.opts.dry_run(): self.builddir() sn = path.join(self.config.expand('%{_builddir}'), 'doit') log.output('write script: ' + sn) self.script.write(sn) if self.canadian_cross(): log.notice('building: (Cxc) %s' % (name)) else: log.notice('building: %s' % (name)) self.run(sn) except error.general, gerr: log.notice(str(gerr)) log.stderr('Build FAILED') self._generate_report_('Build: %s' % (gerr)) raise except error.internal, ierr: log.notice(str(ierr)) log.stderr('Internal Build FAILED') self._generate_report_('Build: %s' % (ierr)) raise
def open(self, command, capture = True, shell = False, cwd = None, env = None, stdin = None, stdout = None, stderr = None, timeout = None): """Open a command with arguments. Provide the arguments as a list or a string.""" if self.verbose: s = command if type(command) is list: def add(x, y): return x + ' ' + str(y) s = functools.reduce(add, command, '')[1:] what = 'spawn' if shell: what = 'shell' log.output(what + ': ' + s) if self.output is None: raise error.general('capture needs an output handler') if shell and self.shell_exe: command = arg_list(command) command[:0] = self.shell_exe if not stdin and self.input: stdin = subprocess.PIPE if not stdout: stdout = subprocess.PIPE if not stderr: stderr = subprocess.PIPE proc = None if cwd is None: cwd = self.path if env is None: env = self.environment try: # Work around a problem on Windows with commands that # have a '.' and no extension. Windows needs the full # command name. if sys.platform == "win32" and type(command) is list: if command[0].find('.') >= 0: r, e = os.path.splitext(command[0]) if e not in ['.exe', '.com', '.bat']: command[0] = command[0] + '.exe' log.trace('exe: %s' % (command)) proc = subprocess.Popen(command, shell = shell, cwd = cwd, env = env, stdin = stdin, stdout = stdout, stderr = stderr, close_fds = False) if not capture: return (0, proc) if self.output is None: raise error.general('capture needs an output handler') exit_code = self.capture(proc, command, timeout) if self.verbose: log.output('exit: ' + str(exit_code)) except OSError as ose: exit_code = ose.errno if self.verbose: log.output('exit: ' + str(ose)) return (exit_code, proc)
def parse_url(url, pathkey, config, opts, file_override=None): # # Split the source up into the parts we need. # source = {} source['url'] = url source['options'] = [] colon = url.find(':') if url[colon + 1:colon + 3] != '//': raise error.general('malforned URL (no protocol prefix): %s' % (url)) source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:]) if file_override is None: source['file'] = path.basename(url) else: bad_chars = [c for c in ['/', '\\', '?', '*'] if c in file_override] if len(bad_chars) > 0: raise error.general('bad characters in file name: %s' % (file_override)) log.output('download: file-override: %s' % (file_override)) source['file'] = file_override source['options'] += ['file-override'] source['name'], source['ext'] = path.splitext(source['file']) if source['name'].endswith('.tar'): source['name'] = source['name'][:-4] source['ext'] = '.tar' + source['ext'] # # Get the file. Checks the local source directory first. # source['local'] = None for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, pathkey, config, opts): break source['script'] = '' return source
def get_file(url, local, opts, config): if local is None: raise error.general('source/patch path invalid') if not path.isdir(path.dirname(local)) and not opts.download_disabled(): log.notice('Creating source directory: %s' % \ (os.path.relpath(path.host(path.dirname(local))))) log.output('making dir: %s' % (path.host(path.dirname(local)))) if _do_download(opts): path.mkdir(path.dirname(local)) if not path.exists(local) and opts.download_disabled(): raise error.general('source not found: %s' % (path.host(local))) # # Check if a URL has been provided on the command line. # url_bases = opts.urls() urls = [] if url_bases is not None: for base in url_bases: if base[-1:] != '/': base += '/' url_path = urlparse.urlsplit(url)[2] slash = url_path.rfind('/') if slash < 0: url_file = url_path else: url_file = url_path[slash + 1:] urls.append(urlparse.urljoin(base, url_file)) urls += url.split() log.trace('_url: %s -> %s' % (','.join(urls), local)) for url in urls: for dl in downloaders: if url.startswith(dl): if downloaders[dl](url, local, config, opts): return if _do_download(opts): raise error.general( 'downloading %s: all paths have failed, giving up' % (url))
def iperf( self, hosts=None, l4Type='TCP', udpBw='10M' ): """Run iperf between two hosts. hosts: list of hosts; if None, uses opposite hosts l4Type: string, one of [ TCP, UDP ] returns: results two-element array of server and client speeds""" if not quietRun( 'which telnet' ): error( 'Cannot find telnet in $PATH - required for iperf test' ) return if not hosts: hosts = [ self.hosts[ 0 ], self.hosts[ -1 ] ] else: assert len( hosts ) == 2 client, server = hosts output( '*** Iperf: testing ' + l4Type + ' bandwidth between ' ) output( "%s and %s\n" % ( client.name, server.name ) ) server.cmd( 'killall -9 iperf' ) iperfArgs = 'iperf ' bwArgs = '' if l4Type == 'UDP': iperfArgs += '-u ' bwArgs = '-b ' + udpBw + ' ' elif l4Type != 'TCP': raise Exception( 'Unexpected l4 type: %s' % l4Type ) server.sendCmd( iperfArgs + '-s', printPid=True ) servout = '' while server.lastPid is None: servout += server.monitor() if l4Type == 'TCP': while 'Connected' not in client.cmd( 'sh -c "echo A | telnet -e A %s 5001"' % server.IP()): output('waiting for iperf to start up...') sleep(.5) cliout = client.cmd( iperfArgs + '-t 5 -c ' + server.IP() + ' ' + bwArgs ) debug( 'Client output: %s\n' % cliout ) server.sendInt() servout += server.waitOutput() debug( 'Server output: %s\n' % servout ) result = [ self._parseIperf( servout ), self._parseIperf( cliout ) ] if l4Type == 'UDP': result.insert( 0, udpBw ) output( '*** Results: %s\n' % result ) return result
def iperf(self, hosts=None, l4Type='TCP', udpBw='10M'): """Run iperf between two hosts. hosts: list of hosts; if None, uses opposite hosts l4Type: string, one of [ TCP, UDP ] returns: results two-element array of server and client speeds""" if not quietRun('which telnet'): error('Cannot find telnet in $PATH - required for iperf test') return if not hosts: hosts = [self.hosts[0], self.hosts[-1]] else: assert len(hosts) == 2 client, server = hosts output('*** Iperf: testing ' + l4Type + ' bandwidth between ') output("%s and %s\n" % (client.name, server.name)) server.cmd('killall -9 iperf') iperfArgs = 'iperf ' bwArgs = '' if l4Type == 'UDP': iperfArgs += '-u ' bwArgs = '-b ' + udpBw + ' ' elif l4Type != 'TCP': raise Exception('Unexpected l4 type: %s' % l4Type) server.sendCmd(iperfArgs + '-s', printPid=True) servout = '' while server.lastPid is None: servout += server.monitor() if l4Type == 'TCP': while 'Connected' not in client.cmd( 'sh -c "echo A | telnet -e A %s 5001"' % server.IP()): output('waiting for iperf to start up...') sleep(.5) cliout = client.cmd(iperfArgs + '-t 5 -c ' + server.IP() + ' ' + bwArgs) debug('Client output: %s\n' % cliout) server.sendInt() servout += server.waitOutput() debug('Server output: %s\n' % servout) result = [self._parseIperf(servout), self._parseIperf(cliout)] if l4Type == 'UDP': result.insert(0, udpBw) output('*** Results: %s\n' % result) return result
def log_info(self): log.output(self.info())
hasher = hashlib.new(hash[0]) _in = open(path.host(absfile), 'rb') hasher.update(_in.read()) except IOError, err: log.notice('hash: %s: read error: %s' % (file_, str(err))) failed = True except: msg = 'hash: %s: error' % (file_) log.stderr(msg) log.notice(msg) if _in is not None: _in.close() raise if _in is not None: _in.close() log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1])) if hasher.hexdigest() != hash[1]: log.warning('checksum error: %s' % (file_)) failed = True if failed and remove: log.warning('removing: %s' % (file_)) if path.exists(absfile): try: os.remove(path.host(absfile)) except IOError, err: raise error.general('hash: %s: remove: %s' % (absfile, str(err))) except: raise error.general('hash: %s: remove error' % (file_)) if hasher is not None: del hasher else:
def copy(self, src, dst): log.output('copy: %s => %s' % (path.host(src), path.host(dst))) if not self.opts.dry_run(): path.copy_tree(src, dst)
def get_file(url, local, opts, config): if local is None: raise error.general('source/patch path invalid') if not path.isdir(path.dirname(local)) and not opts.download_disabled(): log.notice('Creating source directory: %s' % \ (os.path.relpath(path.host(path.dirname(local))))) log.output('making dir: %s' % (path.host(path.dirname(local)))) if _do_download(opts): path.mkdir(path.dirname(local)) if not path.exists(local) and opts.download_disabled(): raise error.general('source not found: %s' % (path.host(local))) # # Check if a URL has been provided on the command line. If the package is # released push to the start the RTEMS URL unless overrided by the command # line option --with-release-url. The variant --without-release-url can # override the released check. # url_bases = opts.urls() try: rtems_release_url_value = config.macros.expand('%{rtems_release_url}/%{rsb_version}/sources') except: rtems_release_url_value = None log.output('RTEMS release URL could not be expanded') rtems_release_url = None if version.released() and rtems_release_url_value: rtems_release_url = rtems_release_url_value with_rel_url = opts.with_arg('release-url') if with_rel_url[1] == 'not-found': if config.defined('without_release_url'): with_rel_url = ('without_release-url', 'yes') if with_rel_url[0] == 'with_release-url': if with_rel_url[1] == 'yes': if rtems_release_url_value is None: raise error.general('no valid release URL') rtems_release_url = rtems_release_url_value elif with_rel_url[1] == 'no': pass else: rtems_release_url = with_rel_url[1] elif with_rel_url[0] == 'without_release-url' and with_rel_url[1] == 'yes': rtems_release_url = None if rtems_release_url is not None: log.trace('release url: %s' % (rtems_release_url)) # # If the URL being fetched is under the release path do not add the # sources release path because it is already there. # if not url.startswith(rtems_release_url): if url_bases is None: url_bases = [rtems_release_url] else: url_bases.append(rtems_release_url) urls = [] if url_bases is not None: # # Split up the URL we are being asked to download. # url_path = urllib_parse.urlsplit(url)[2] slash = url_path.rfind('/') if slash < 0: url_file = url_path else: url_file = url_path[slash + 1:] log.trace('url_file: %s' %(url_file)) for base in url_bases: if base[-1:] != '/': base += '/' next_url = urllib_parse.urljoin(base, url_file) log.trace('url: %s' %(next_url)) urls.append(next_url) urls += url.split() log.trace('_url: %s -> %s' % (','.join(urls), local)) for url in urls: for dl in downloaders: if url.startswith(dl): if downloaders[dl](url, local, config, opts): return if _do_download(opts): raise error.general('downloading %s: all paths have failed, giving up' % (url))
def _http_downloader(url, local, config, opts): if path.exists(local): return True # # Hack for GitHub. # if url.startswith('https://api.github.com'): url = urllib_parse.urljoin(url, config.expand('tarball/%{version}')) dst = os.path.relpath(path.host(local)) log.output('download: %s -> %s' % (url, dst)) log.notice('download: %s -> %s' % (_sensible_url(url, len(dst)), dst)) failed = False if _do_download(opts): _in = None _out = None _length = None _have = 0 _chunk_size = 256 * 1024 _chunk = None _last_percent = 200.0 _last_msg = '' _have_status_output = False _url = url try: try: _in = None _ssl_context = None # See #2656 _req = urllib_request.Request(_url) _req.add_header('User-Agent', 'Wget/1.16.3 (freebsd10.1)') try: import ssl _ssl_context = ssl._create_unverified_context() _in = urllib_request.urlopen(_req, context = _ssl_context) except: _ssl_context = None if _ssl_context is None: _in = urllib_request.urlopen(_req) if _url != _in.geturl(): _url = _in.geturl() log.output(' redirect: %s' % (_url)) log.notice(' redirect: %s' % (_sensible_url(_url))) _out = open(path.host(local), 'wb') try: _length = int(_in.info()['Content-Length'].strip()) except: pass while True: _msg = '\rdownloading: %s - %s ' % (dst, _humanize_bytes(_have)) if _length: _percent = round((float(_have) / _length) * 100, 2) if _percent != _last_percent: _msg += 'of %s (%0.0f%%) ' % (_humanize_bytes(_length), _percent) if _msg != _last_msg: extras = (len(_last_msg) - len(_msg)) log.stdout_raw('%s%s' % (_msg, ' ' * extras + '\b' * extras)) _last_msg = _msg _have_status_output = True _chunk = _in.read(_chunk_size) if not _chunk: break _out.write(_chunk) _have += len(_chunk) log.stdout_raw('\n\r') except: if _have_status_output: log.stdout_raw('\n\r') raise except IOError as err: log.notice('download: %s: error: %s' % (_sensible_url(_url), str(err))) if path.exists(local): os.remove(path.host(local)) failed = True except ValueError as err: log.notice('download: %s: error: %s' % (_sensible_url(_url), str(err))) if path.exists(local): os.remove(path.host(local)) failed = True except: msg = 'download: %s: error' % (_sensible_url(_url)) log.stderr(msg) log.notice(msg) if _in is not None: _in.close() if _out is not None: _out.close() raise if _out is not None: _out.close() if _in is not None: _in.close() del _in if not failed: if not path.isfile(local): raise error.general('source is not a file: %s' % (path.host(local))) if not _hash_check(path.basename(local), local, config.macros, False): raise error.general('checksum failure file: %s' % (dst)) return not failed
def rmdir(self, rmpath): log.output('removing: %s' % (path.host(rmpath))) if not self.opts.dry_run(): if path.exists(rmpath): path.removeall(rmpath)
def mkdir(self, mkpath): log.output('making dir: %s' % (path.host(mkpath))) if not self.opts.dry_run(): path.mkdir(mkpath)