def test_upload_new(self): # Parse the changes file changes = upload.Changes(fixture('packages'), 'linux_42.0-1_amd64.changes', [fixture('packages/gpg/pubring.gpg')], True) # Do all the setup needed for the upload to be accepted dbconn.get_or_set_fingerprint(changes.primary_fingerprint, self.session) self.setup_keys() self.setup_suites([('unstable', 'sid')]) self.setup_srcformats() self.setup_architectures() self.session.commit() # First attempt, it should succeed and be a NEW package first_upload = self.attempt_upload(changes) with first_upload as attempt: result = attempt.check() self.assertTrue(result) self.assertTrue(attempt.new) self.assertEquals(attempt.reject_reasons, []) # Add the override for the source package override = {} override['package'] = changes.source_name override['priority'] = 'optional' override['section'] = 'kernel' override['component'] = changes.source.component override['type'] = 'dsc' queue = policy.PolicyQueueUploadHandler(changes, self.session) queue.add_overrides([override], self.suite['unstable']) self.session.commit() # Second attempt, it should succeed but be NEW because of new binaries with self.attempt_upload(changes) as attempt: result = attempt.check() self.assertTrue(result) self.assertTrue(attempt.new) self.assertEquals(attempt.reject_reasons, []) # Add the binary overrides overrides = [] for binary in changes.binaries: override = {} override['package'] = binary.name override['priority'] = 'optional' override['section'] = 'kernel' override['component'] = binary.component override['type'] = binary.type overrides.append(override) queue.add_overrides(overrides, self.suite['unstable']) self.session.commit() # Third attempt, this time it should succeed and not be NEW with self.attempt_upload(changes) as attempt: result = attempt.check() self.assertTrue(result) self.assertFalse(attempt.new) self.assertEquals(attempt.reject_reasons, [])
def do_pkg(upload_id): cnf = Config() session = DBConn().session() upload = session.query(PolicyQueueUpload).filter_by(id=upload_id).one() queue = upload.policy_queue changes = upload.changes origchanges = os.path.join(queue.path, changes.changesname) print origchanges htmlname = "{0}_{1}.html".format(changes.source, changes.version) htmlfile = os.path.join(cnf['Show-New::HTMLPath'], htmlname) # Have we already processed this? if os.path.exists(htmlfile) and \ os.stat(htmlfile).st_mtime > time.mktime(changes.created.timetuple()): with open(htmlfile, "r") as fd: if fd.read() != timeout_str: sources.append(htmlname) return (PROC_STATUS_SUCCESS, '%s already up-to-date' % htmlfile) # Go, process it... Now! htmlfiles_to_process.append(htmlfile) sources.append(htmlname) group = cnf.get('Dinstall::UnprivGroup') or None with open(htmlfile, 'w') as outfile: with policy.UploadCopy(upload, group=group) as upload_copy: handler = policy.PolicyQueueUploadHandler(upload, session) missing = [(o['type'], o['package']) for o in handler.missing_overrides()] distribution = changes.distribution print >> outfile, html_header(changes.source, missing) print >> outfile, examine_package.display_changes( distribution, origchanges) if upload.source is not None and ('dsc', upload.source.source) in missing: fn = os.path.join(upload_copy.directory, upload.source.poolfile.basename) print >> outfile, examine_package.check_dsc( distribution, fn, session) for binary in upload.binaries: if (binary.binarytype, binary.package) not in missing: continue fn = os.path.join(upload_copy.directory, binary.poolfile.basename) print >> outfile, examine_package.check_deb( distribution, fn, session) print >> outfile, html_footer() session.close() htmlfiles_to_process.remove(htmlfile) return (PROC_STATUS_SUCCESS, '{0} already updated'.format(htmlfile))