def do_pkg(upload_id): cnf = Config() session = DBConn().session() upload = session.query(PolicyQueueUpload).filter_by(id=upload_id).one() queue = upload.policy_queue changes = upload.changes origchanges = os.path.join(queue.path, changes.changesname) print origchanges htmlname = "{0}_{1}.html".format(changes.source, changes.version) htmlfile = os.path.join(cnf['Show-New::HTMLPath'], htmlname) # Have we already processed this? if os.path.exists(htmlfile) and \ os.stat(htmlfile).st_mtime > time.mktime(changes.created.timetuple()): with open(htmlfile, "r") as fd: if fd.read() != timeout_str: sources.append(htmlname) return (PROC_STATUS_SUCCESS, '%s already up-to-date' % htmlfile) # Go, process it... Now! htmlfiles_to_process.append(htmlfile) sources.append(htmlname) group = cnf.get('Dinstall::UnprivGroup') or None with open(htmlfile, 'w') as outfile: with policy.UploadCopy(upload, group=group) as upload_copy: handler = policy.PolicyQueueUploadHandler(upload, session) missing = [(o['type'], o['package']) for o in handler.missing_overrides()] distribution = changes.distribution print >> outfile, html_header(changes.source, missing) print >> outfile, examine_package.display_changes( distribution, origchanges) if upload.source is not None and ('dsc', upload.source.source) in missing: fn = os.path.join(upload_copy.directory, upload.source.poolfile.basename) print >> outfile, examine_package.check_dsc( distribution, fn, session) for binary in upload.binaries: if (binary.binarytype, binary.package) not in missing: continue fn = os.path.join(upload_copy.directory, binary.poolfile.basename) print >> outfile, examine_package.check_deb( distribution, fn, session) print >> outfile, html_footer() session.close() htmlfiles_to_process.remove(htmlfile) return (PROC_STATUS_SUCCESS, '{0} already updated'.format(htmlfile))
def check_pkg(upload, upload_copy, session): missing = [] save_stdout = sys.stdout changes = os.path.join(upload_copy.directory, upload.changes.changesname) suite_name = upload.target_suite.suite_name handler = PolicyQueueUploadHandler(upload, session) missing = [(m['type'], m["package"]) for m in handler.missing_overrides(hints=missing)] less_cmd = ("less", "-R", "-") less_process = daklib.daksubprocess.Popen(less_cmd, bufsize=0, stdin=subprocess.PIPE) try: sys.stdout = less_process.stdin print examine_package.display_changes(suite_name, changes) source = upload.source if source is not None: source_file = os.path.join( upload_copy.directory, os.path.basename(source.poolfile.filename)) print examine_package.check_dsc(suite_name, source_file) for binary in upload.binaries: binary_file = os.path.join( upload_copy.directory, os.path.basename(binary.poolfile.filename)) examined = examine_package.check_deb(suite_name, binary_file) # We always need to call check_deb to display package relations for every binary, # but we print its output only if new overrides are being added. if ("deb", binary.package) in missing: print examined print examine_package.output_package_relations() less_process.stdin.close() except IOError as e: if e.errno == errno.EPIPE: utils.warn("[examine_package] Caught EPIPE; skipping.") else: raise except KeyboardInterrupt: utils.warn("[examine_package] Caught C-c; skipping.") finally: less_process.wait() sys.stdout = save_stdout
def check_pkg (upload, upload_copy, session): missing = [] save_stdout = sys.stdout changes = os.path.join(upload_copy.directory, upload.changes.changesname) suite_name = upload.target_suite.suite_name handler = PolicyQueueUploadHandler(upload, session) missing = [(m['type'], m["package"]) for m in handler.missing_overrides(hints=missing)] less_cmd = ("less", "-R", "-") less_process = daklib.daksubprocess.Popen(less_cmd, bufsize=0, stdin=subprocess.PIPE) try: sys.stdout = less_process.stdin print examine_package.display_changes(suite_name, changes) source = upload.source if source is not None: source_file = os.path.join(upload_copy.directory, os.path.basename(source.poolfile.filename)) print examine_package.check_dsc(suite_name, source_file) for binary in upload.binaries: binary_file = os.path.join(upload_copy.directory, os.path.basename(binary.poolfile.filename)) examined = examine_package.check_deb(suite_name, binary_file) # We always need to call check_deb to display package relations for every binary, # but we print its output only if new overrides are being added. if ("deb", binary.package) in missing: print examined print examine_package.output_package_relations() less_process.stdin.close() except IOError as e: if e.errno == errno.EPIPE: utils.warn("[examine_package] Caught EPIPE; skipping.") else: raise except KeyboardInterrupt: utils.warn("[examine_package] Caught C-c; skipping.") finally: less_process.wait() sys.stdout = save_stdout
def do_pkg(upload_id): cnf = Config() session = DBConn().session() upload = session.query(PolicyQueueUpload).filter_by(id=upload_id).one() queue = upload.policy_queue changes = upload.changes origchanges = os.path.join(queue.path, changes.changesname) print origchanges htmlname = "{0}_{1}.html".format(changes.source, changes.version) htmlfile = os.path.join(cnf["Show-New::HTMLPath"], htmlname) # Have we already processed this? if os.path.exists(htmlfile) and os.stat(htmlfile).st_mtime > time.mktime(changes.created.timetuple()): with open(htmlfile, "r") as fd: if fd.read() != timeout_str: sources.append(htmlname) return (PROC_STATUS_SUCCESS, "%s already up-to-date" % htmlfile) # Go, process it... Now! htmlfiles_to_process.append(htmlfile) sources.append(htmlname) group = cnf.get("Dinstall::UnprivGroup") or None with open(htmlfile, "w") as outfile: with policy.UploadCopy(upload, group=group) as upload_copy: handler = policy.PolicyQueueUploadHandler(upload, session) missing = [(o["type"], o["package"]) for o in handler.missing_overrides()] distribution = changes.distribution print >> outfile, html_header(changes.source, missing) print >> outfile, examine_package.display_changes(distribution, origchanges) if upload.source is not None and ("dsc", upload.source.source) in missing: fn = os.path.join(upload_copy.directory, upload.source.poolfile.basename) print >> outfile, examine_package.check_dsc(distribution, fn, session) for binary in upload.binaries: if (binary.binarytype, binary.package) not in missing: continue fn = os.path.join(upload_copy.directory, binary.poolfile.basename) print >> outfile, examine_package.check_deb(distribution, fn, session) print >> outfile, html_footer() session.close() htmlfiles_to_process.remove(htmlfile) return (PROC_STATUS_SUCCESS, "{0} already updated".format(htmlfile))
def check_pkg (upload): save_stdout = sys.stdout try: sys.stdout = os.popen("less -R -", 'w', 0) changes = utils.parse_changes (upload.pkg.changes_file) print examine_package.display_changes(changes['distribution'], upload.pkg.changes_file) files = upload.pkg.files for f in files.keys(): if files[f].has_key("new"): ftype = files[f]["type"] if ftype == "deb": print examine_package.check_deb(changes['distribution'], f) elif ftype == "dsc": print examine_package.check_dsc(changes['distribution'], f) print examine_package.output_package_relations() except IOError as e: if e.errno == errno.EPIPE: utils.warn("[examine_package] Caught EPIPE; skipping.") else: sys.stdout = save_stdout raise except KeyboardInterrupt: utils.warn("[examine_package] Caught C-c; skipping.") sys.stdout = save_stdout
def do_pkg(changes_file): changes_file = utils.validate_changes_file_arg(changes_file, 0) if not changes_file: return print "\n" + changes_file u = Upload() u.pkg.changes_file = changes_file # We can afoord not to check the signature before loading the changes file # as we've validated it already (otherwise it couldn't be in new) # and we can more quickly skip over already processed files this way u.load_changes(changes_file) origchanges = os.path.abspath(u.pkg.changes_file) # Still be cautious in case paring the changes file went badly if u.pkg.changes.has_key('source') and u.pkg.changes.has_key('version'): htmlname = u.pkg.changes["source"] + "_" + u.pkg.changes["version"] + ".html" htmlfile = os.path.join(cnf["Show-New::HTMLPath"], htmlname) else: # Changes file was bad print "Changes file %s missing source or version field" % changes_file return # Have we already processed this? if os.path.exists(htmlfile) and \ os.stat(htmlfile).st_mtime > os.stat(origchanges).st_mtime: with open(htmlfile, "r") as fd: if fd.read() != timeout_str: sources.append(htmlname) return (PROC_STATUS_SUCCESS, '%s already up-to-date' % htmlfile) # Now we'll load the fingerprint session = DBConn().session() htmlfiles_to_process.append(htmlfile) (u.pkg.changes["fingerprint"], rejects) = utils.check_signature(changes_file, session=session) new_queue = get_policy_queue('new', session ); u.pkg.directory = new_queue.path u.update_subst() files = u.pkg.files changes = u.pkg.changes sources.append(htmlname) for deb_filename, f in files.items(): if deb_filename.endswith(".udeb") or deb_filename.endswith(".deb"): u.binary_file_checks(deb_filename, session) u.check_binary_against_db(deb_filename, session) else: u.source_file_checks(deb_filename, session) u.check_source_against_db(deb_filename, session) u.pkg.changes["suite"] = u.pkg.changes["distribution"] new, byhand = determine_new(u.pkg.changes_file, u.pkg.changes, files, 0, dsc=u.pkg.dsc, session=session) outfile = open(os.path.join(cnf["Show-New::HTMLPath"],htmlname),"w") filestoexamine = [] for pkg in new.keys(): for fn in new[pkg]["files"]: filestoexamine.append(fn) print >> outfile, html_header(changes["source"], filestoexamine) check_valid(new, session) distribution = changes["distribution"].keys()[0] print >> outfile, examine_package.display_changes(distribution, changes_file) for fn in filter(lambda fn: fn.endswith(".dsc"), filestoexamine): print >> outfile, examine_package.check_dsc(distribution, fn, session) for fn in filter(lambda fn: fn.endswith(".deb") or fn.endswith(".udeb"), filestoexamine): print >> outfile, examine_package.check_deb(distribution, fn, session) print >> outfile, html_footer() outfile.close() session.close() htmlfiles_to_process.remove(htmlfile) return (PROC_STATUS_SUCCESS, '%s already updated' % htmlfile)