def accept(directory, upload): cnf = Config() Logger.log(['ACCEPT', upload.changes.filename]) print("ACCEPT") upload.install() utils.process_buildinfos(upload.directory, upload.changes.buildinfo_files, upload.transaction.fs, Logger) accepted_to_real_suite = any(suite.policy_queue is None for suite in upload.final_suites) sourceful_upload = upload.changes.sourceful control = upload.changes.changes if sourceful_upload and not Options['No-Action']: urgency = control.get('Urgency') # As per policy 5.6.17, the urgency can be followed by a space and a # comment. Extract only the urgency from the string. if ' ' in urgency: urgency, comment = urgency.split(' ', 1) if urgency not in cnf.value_list('Urgency::Valid'): urgency = cnf['Urgency::Default'] UrgencyLog().log(control['Source'], control['Version'], urgency) pu = get_processed_upload(upload) daklib.announce.announce_accept(pu) # Move .changes to done, but only for uploads that were accepted to a # real suite. process-policy will handle this for uploads to queues. if accepted_to_real_suite: src = os.path.join(upload.directory, upload.changes.filename) now = datetime.datetime.now() donedir = os.path.join(cnf['Dir::Done'], now.strftime('%Y/%m/%d')) dst = os.path.join(donedir, upload.changes.filename) dst = utils.find_next_free(dst) upload.transaction.fs.copy(src, dst, mode=0o644) SummaryStats().accept_count += 1 SummaryStats().accept_bytes += upload.changes.bytes
def comment_accept(upload, srcqueue, comments, transaction): for byhand in upload.byhand: path = os.path.join(srcqueue.path, byhand.filename) if os.path.exists(path): raise Exception('E: cannot ACCEPT upload with unprocessed byhand file {0}'.format(byhand.filename)) cnf = Config() fs = transaction.fs session = transaction.session changesname = upload.changes.changesname allow_tainted = srcqueue.suite.archive.tainted # We need overrides to get the target component overridesuite = upload.target_suite if overridesuite.overridesuite is not None: overridesuite = session.query(Suite).filter_by(suite_name=overridesuite.overridesuite).one() def binary_component_func(db_binary): section = db_binary.proxy['Section'] component_name = 'main' if section.find('/') != -1: component_name = section.split('/', 1)[0] return get_mapped_component(component_name, session=session) def is_debug_binary(db_binary): return daklib.utils.is_in_debug_section(db_binary.proxy) def has_debug_binaries(upload): return any((is_debug_binary(x) for x in upload.binaries)) def source_component_func(db_source): package_list = PackageList(db_source.proxy) component = source_component_from_package_list(package_list, upload.target_suite) if component is not None: return get_mapped_component(component.component_name, session=session) # Fallback for packages without Package-List field query = session.query(Override).filter_by(suite=overridesuite, package=db_source.source) \ .join(OverrideType).filter(OverrideType.overridetype == 'dsc') \ .join(Component) return query.one().component policy_queue = upload.target_suite.policy_queue if policy_queue == srcqueue: policy_queue = None all_target_suites = [upload.target_suite if policy_queue is None else policy_queue.suite] if policy_queue is None or policy_queue.send_to_build_queues: all_target_suites.extend([q.suite for q in upload.target_suite.copy_queues]) throw_away_binaries = False if upload.source is not None: source_component = source_component_func(upload.source) if upload.target_suite.suite_name in cnf.value_list('Dinstall::ThrowAwayNewBinarySuites') and \ source_component.component_name in cnf.value_list('Dinstall::ThrowAwayNewBinaryComponents'): throw_away_binaries = True for suite in all_target_suites: debug_suite = suite.debug_suite if upload.source is not None: # If we have Source in this upload, let's include it into # upload suite. transaction.copy_source( upload.source, suite, source_component, allow_tainted=allow_tainted, ) if not throw_away_binaries: if debug_suite is not None and has_debug_binaries(upload): # If we're handing a debug package, we also need to include the # source in the debug suite as well. transaction.copy_source( upload.source, debug_suite, source_component_func(upload.source), allow_tainted=allow_tainted, ) if not throw_away_binaries: for db_binary in upload.binaries: # Now, let's work out where to copy this guy to -- if it's # a debug binary, and the suite has a debug suite, let's go # ahead and target the debug suite rather then the stock # suite. copy_to_suite = suite if debug_suite is not None and is_debug_binary(db_binary): copy_to_suite = debug_suite # build queues and debug suites may miss the source package # if this is a binary-only upload. if copy_to_suite != upload.target_suite: transaction.copy_source( db_binary.source, copy_to_suite, source_component_func(db_binary.source), allow_tainted=allow_tainted, ) transaction.copy_binary( db_binary, copy_to_suite, binary_component_func(db_binary), allow_tainted=allow_tainted, extra_archives=[upload.target_suite.archive], ) check_upload_for_external_signature_request(session, suite, copy_to_suite, db_binary) suite.update_last_changed() # Copy .changes if needed if policy_queue is None and upload.target_suite.copychanges: src = os.path.join(upload.policy_queue.path, upload.changes.changesname) dst = os.path.join(upload.target_suite.path, upload.changes.changesname) fs.copy(src, dst, mode=upload.target_suite.archive.mode) # List of files in the queue directory queue_files = [changesname] chg = daklib.upload.Changes(upload.policy_queue.path, changesname, keyrings=[], require_signature=False) queue_files.extend(f.filename for f in chg.buildinfo_files) # TODO: similar code exists in archive.py's `ArchiveUpload._install_policy` if policy_queue is not None: # register upload in policy queue new_upload = PolicyQueueUpload() new_upload.policy_queue = policy_queue new_upload.target_suite = upload.target_suite new_upload.changes = upload.changes new_upload.source = upload.source new_upload.binaries = upload.binaries session.add(new_upload) session.flush() # copy .changes & similar to policy queue for fn in queue_files: src = os.path.join(upload.policy_queue.path, fn) dst = os.path.join(policy_queue.path, fn) transaction.fs.copy(src, dst, mode=policy_queue.change_perms) # Copy upload to Process-Policy::CopyDir # Used on security.d.o to sync accepted packages to ftp-master, but this # should eventually be replaced by something else. copydir = cnf.get('Process-Policy::CopyDir') or None if policy_queue is None and copydir is not None: mode = upload.target_suite.archive.mode if upload.source is not None: for f in [df.poolfile for df in upload.source.srcfiles]: dst = os.path.join(copydir, f.basename) if not os.path.exists(dst): fs.copy(f.fullpath, dst, mode=mode) for db_binary in upload.binaries: f = db_binary.poolfile dst = os.path.join(copydir, f.basename) if not os.path.exists(dst): fs.copy(f.fullpath, dst, mode=mode) for fn in queue_files: src = os.path.join(upload.policy_queue.path, fn) dst = os.path.join(copydir, fn) # We check for `src` to exist as old uploads in policy queues # might still miss the `.buildinfo` files. if os.path.exists(src) and not os.path.exists(dst): fs.copy(src, dst, mode=mode) if policy_queue is None: utils.process_buildinfos(upload.policy_queue.path, chg.buildinfo_files, fs, Logger) if policy_queue is None and upload.source is not None and not Options['No-Action']: urgency = upload.changes.urgency # As per policy 5.6.17, the urgency can be followed by a space and a # comment. Extract only the urgency from the string. if ' ' in urgency: urgency, comment = urgency.split(' ', 1) if urgency not in cnf.value_list('Urgency::Valid'): urgency = cnf['Urgency::Default'] UrgencyLog().log(upload.source.source, upload.source.version, urgency) if policy_queue is None: print(" ACCEPT") else: print(" ACCEPT-TO-QUEUE") if not Options['No-Action']: Logger.log(["Policy Queue ACCEPT", srcqueue.queue_name, changesname]) if policy_queue is None: pu = get_processed_upload(upload) daklib.announce.announce_accept(pu) # TODO: code duplication. Similar code is in process-upload. # Move .changes to done now = datetime.datetime.now() donedir = os.path.join(cnf['Dir::Done'], now.strftime('%Y/%m/%d')) if policy_queue is None: for fn in queue_files: src = os.path.join(upload.policy_queue.path, fn) if os.path.exists(src): dst = os.path.join(donedir, fn) dst = utils.find_next_free(dst) fs.copy(src, dst, mode=0o644) if throw_away_binaries and upload.target_suite.archive.use_morgue: morguesubdir = cnf.get("New::MorgueSubDir", 'new') utils.move_to_morgue(morguesubdir, [db_binary.poolfile.fullpath for db_binary in upload.binaries], fs, Logger) remove_upload(upload, transaction)