def checkout(package): proxy = get_proxy() _type = package['_type'] if _type not in ['binaries', 'sources']: raise ValueError("_type sucks") def source(): url = proxy.get_dsc(package['_id']) dsc = os.path.basename(url) dget(url) yield dsc def binary(): info = proxy.get_deb_info(package['_id']) url_base = info['root'] out, err, ret = run_command(['wget'] + [ os.path.join(url_base, x) for x in info['packages']]) if ret != 0: raise Exception("zomgwtf") yield package['binaries'] with tdir() as where: with cd(where): for x in {"sources": source, "binaries": binary}[_type](): yield x
def checkout(package): proxy = get_proxy() _type = package['_type'] if _type not in ['binaries', 'sources']: raise ValueError("_type sucks") def source(): url = proxy.get_dsc(package['_id']) dsc = os.path.basename(url) dget(url) yield dsc def binary(): info = proxy.get_deb_info(package['_id']) url_base = info['root'] out, err, ret = run_command( ['wget'] + [os.path.join(url_base, x) for x in info['packages']]) if ret != 0: raise Exception("zomgwtf") yield package['binaries'] with tdir() as where: with cd(where): for x in {"sources": source, "binaries": binary}[_type](): yield x
def pep8(dsc, analysis): run_command(["dpkg-source", "-x", dsc, "source"]) with cd('source'): out, err, ret = run_command(['pep8', '.']) failed = ret != 0 for issue in parse_pep8(out.splitlines()): analysis.results.append(issue) return (analysis, out, failed)
def iterate(): suites = listize(config['suites']) arches = listize(config['arches']) with workon(suites, arches, list(PLUGINS.keys())) as job: if job is None: raise IDidNothingError("No more jobs") package_id = job['package'] type_ = job['package_type'] logging.debug("Fetching the %s package, id=%s", type_, package_id) package = None if type_ == 'binary': package = proxy.get_binary_package(package_id) elif type_ == 'source': package = proxy.get_source_package(package_id) else: raise IDidNothingError("SHIT") handler, version_getter = load_module(job['type']) firehose = create_firehose(package, version_getter) with tdir() as fd: with cd(fd): with checkout(package) as target: firehose, log, err = handler(target, package, job, firehose) type_ = {"sources": "source", "binaries": "binary"}[package['_type']] logging.info("Job worker returned, filing reports") report = proxy.submit_report(firehose.to_json(), job['_id'], err) logging.info("Sending the XML firehose report to the pool") open('firehose.xml', 'w').write(firehose.to_xml_bytes()) remote_firehose_path = proxy.get_firehose_write_location(report) cmd = config['copy'].format(src='firehose.xml', dest=remote_firehose_path) out, err, ret = run_command(cmd) logging.info("Sending the logs to the pool") remote_path = proxy.get_log_write_location(report) open('ethel-log', 'wb').write(log.encode('utf-8')) cmd = config['copy'].format(src='ethel-log', dest=remote_path) out, err, ret = run_command(cmd) if ret != 0: print(out) raise Exception("SHIT.")
def perlcritic(dsc, analysis): run_command(["dpkg-source", "-x", dsc, "source"]) with cd('source'): out, err, ret = run_command([ 'perlcritic', '--brutal', '.', '--verbose', '%f:%l:%c %s %p %m\n' ]) if ret == 1: raise Exception("Perlcritic had an internal error") failed = ret == 2 for issue in parse_perlcritic(out.splitlines()): analysis.results.append(issue) return (analysis, out, failed)
def iterate(): suites = listize(config["suites"]) arches = listize(config["arches"]) with workon(suites, arches, list(PLUGINS.keys())) as job: if job is None: raise IDidNothingError("No more jobs") package_id = job["package"] type_ = job["package_type"] logging.debug("Fetching the %s package, id=%s", type_, package_id) package = None if type_ == "binary": package = proxy.get_binary_package(package_id) elif type_ == "source": package = proxy.get_source_package(package_id) else: raise IDidNothingError("SHIT") handler, version_getter = load_module(job["type"]) firehose = create_firehose(package, version_getter) with tdir() as fd: with cd(fd): with checkout(package) as target: firehose, log, err = handler(target, package, job, firehose) type_ = {"sources": "source", "binaries": "binary"}[package["_type"]] logging.info("Job worker returned, filing reports") report = proxy.submit_report(firehose.to_json(), job["_id"], err) logging.info("Sending the XML firehose report to the pool") open("firehose.xml", "w").write(firehose.to_xml_bytes()) remote_firehose_path = proxy.get_firehose_write_location(report) cmd = config["copy"].format(src="firehose.xml", dest=remote_firehose_path) out, err, ret = run_command(cmd) logging.info("Sending the logs to the pool") remote_path = proxy.get_log_write_location(report) open("ethel-log", "wb").write(log.encode("utf-8")) cmd = config["copy"].format(src="ethel-log", dest=remote_path) out, err, ret = run_command(cmd) if ret != 0: print(out) raise Exception("SHIT.")
def cppcheck(dsc, analysis): run_command(["dpkg-source", "-x", dsc, "source"]) with cd('source'): out, err, ret = run_command(['cppcheck', '--enable=all', '.', '--xml']) xmlbytes = err.encode() failed = False if err.strip() == '': return (analysis, err, failed) for issue in parse_cppcheck(xmlbytes): analysis.results.append(issue) if not failed and issue.severity in [ 'performance', 'portability', 'error', 'warning' ]: failed = True return (analysis, err, failed)
def cppcheck(dsc, analysis): run_command(["dpkg-source", "-x", dsc, "source"]) with cd('source'): out, err, ret = run_command([ 'cppcheck', '--enable=all', '.', '--xml' ]) xmlbytes = err.encode() failed = False if err.strip() == '': return (analysis, err, failed) for issue in parse_cppcheck(xmlbytes): analysis.results.append(issue) if not failed and issue.severity in [ 'performance', 'portability', 'error', 'warning' ]: failed = True return (analysis, err, failed)
def run(dsc, source, job, firehose): run_command(["dpkg-source", "-x", dsc, "source"]) with cd('source'): return desktop_file_validate('source', firehose)