def test_hourly_script(self): """Ensure garbo-hourly.py actually runs.""" rv, out, err = run_script( "cronscripts/garbo-hourly.py", ["-q"], expect_returncode=0) self.failIf(out.strip(), "Output to stdout: %s" % out) self.failIf(err.strip(), "Output to stderr: %s" % err) DatabaseLayer.force_dirty_database()
def test_run_reprocessing_script_no_params(self): # cronscripts/reprocess-hwdb-submissions.py needs at least the # parameter --start-file retcode, stdout, stderr = run_script( 'cronscripts/reprocess-hwdb-submissions.py', []) self.assertThat(stderr, Contains('Option --start-file not specified.')) DatabaseLayer.force_dirty_database()
def test_run_reprocessing_script_no_params(self): # cronscripts/reprocess-hwdb-submissions.py needs at least the # parameter --start-file retcode, stdout, stderr = run_script( 'cronscripts/reprocess-hwdb-submissions.py', []) self.assertThat( stderr, Contains('Option --start-file not specified.')) DatabaseLayer.force_dirty_database()
def test_remoteAddFileDoesntSendDatabaseName(self): # remoteAddFile should send the Database-Name header as well. client = InstrumentedLibrarianClient() # Because the remoteAddFile call commits to the database in a # different process, we need to explicitly tell the DatabaseLayer to # fully tear down and set up the database. DatabaseLayer.force_dirty_database() client.remoteAddFile("sample.txt", 6, StringIO("sample"), "text/plain") self.failUnless(client.sentDatabaseName, "Database-Name header not sent by remoteAddFile")
def test_run_reprocessing_script_max_submission_not_integer(self): # If the parameter --max-submissions is not an integer, # cronscripts/reprocess-hwdb-submissions.py reports an error. retcode, stdout, stderr = run_script( 'cronscripts/reprocess-hwdb-submissions.py', ['--max-submissions', 'nonsense']) expected = "Invalid value for --max_submissions specified: 'nonsense'" self.assertThat(stderr, Contains(expected)) DatabaseLayer.force_dirty_database()
def test_remoteAddFileDoesntSendDatabaseName(self): # remoteAddFile should send the Database-Name header as well. client = InstrumentedLibrarianClient() # Because the remoteAddFile call commits to the database in a # different process, we need to explicitly tell the DatabaseLayer to # fully tear down and set up the database. DatabaseLayer.force_dirty_database() client.remoteAddFile('sample.txt', 6, StringIO('sample'), 'text/plain') self.assertTrue(client.sentDatabaseName, "Database-Name header not sent by remoteAddFile")
def test_run_reprocessing_script_startfile_does_not_exist(self): # If the specified start file does not exist, # cronscripts/reprocess-hwdb-submissions.py reports an error. does_not_exist = mktemp() retcode, stdout, stderr = run_script( 'cronscripts/reprocess-hwdb-submissions.py', ['--start-file', does_not_exist]) self.assertThat( stderr, Contains('Cannot access file %s' % does_not_exist)) DatabaseLayer.force_dirty_database()
def test_BadSubmissions_respects_start(self): # It is possible to request a start id. Previous entries are ignored. submission1 = self.factory.makeHWSubmission( status=HWSubmissionProcessingStatus.INVALID) submission2 = self.factory.makeHWSubmission( status=HWSubmissionProcessingStatus.INVALID) self.assertTrue(submission1.id < submission2.id) loop = self._makeBadSubmissionsLoop(submission2.id) # The sample data already contains one submission. submissions = loop.getUnprocessedSubmissions(2) self.assertEqual([submission2], submissions) DatabaseLayer.force_dirty_database()
def test_run_reprocessing_script_startfile_with_negative_integer(self): # If the specified start file contains any non-integer string, # cronscripts/reprocess-hwdb-submissions.py reports an error. start_file_name = mktemp() start_file = open(start_file_name, 'w') start_file.write('-1') start_file.close() retcode, stdout, stderr = run_script( 'cronscripts/reprocess-hwdb-submissions.py', ['--start-file', start_file_name]) self.assertThat( stderr, Contains('%s must contain a positive integer' % start_file_name)) DatabaseLayer.force_dirty_database()
def testHideLibrarian(self): # First perform a successful upload: client = LibrarianClient() data = 'foo' client.remoteAddFile('foo', len(data), StringIO(data), 'text/plain') # The database was committed to, but not by this process, so we need # to ensure that it is fully torn down and recreated. DatabaseLayer.force_dirty_database() # Hide the librarian, and show that the upload fails: LibrarianLayer.hide() self.assertRaises(UploadFailed, client.remoteAddFile, 'foo', len(data), StringIO(data), 'text/plain') # Reveal the librarian again, allowing uploads: LibrarianLayer.reveal() client.remoteAddFile('foo', len(data), StringIO(data), 'text/plain')
def testHideLibrarian(self): # First perform a successful upload: client = LibrarianClient() data = 'foo' client.remoteAddFile( 'foo', len(data), StringIO(data), 'text/plain') # The database was committed to, but not by this process, so we need # to ensure that it is fully torn down and recreated. DatabaseLayer.force_dirty_database() # Hide the librarian, and show that the upload fails: LibrarianLayer.hide() self.assertRaises(UploadFailed, client.remoteAddFile, 'foo', len(data), StringIO(data), 'text/plain') # Reveal the librarian again, allowing uploads: LibrarianLayer.reveal() client.remoteAddFile( 'foo', len(data), StringIO(data), 'text/plain')
def test_cronscript(self): run_script( 'cronscripts/process-job-source.py', ['IInitializeDistroSeriesJobSource']) DatabaseLayer.force_dirty_database()
def updateRemoteProductTeardown(test): # Mark the DB as dirty, since we run a script in a sub process. DatabaseLayer.force_dirty_database() tearDown(test)
def test_cronscript(self): run_script("cronscripts/process-job-source.py", ["IInitializeDistroSeriesJobSource"]) DatabaseLayer.force_dirty_database()
def testCopyArchiveCreation(self): """Start archive population, check data before and after. Use the hoary-RELEASE suite along with the main component. """ # XXX: JamesWestby 2010-06-21 bug=596984: it is not clear # what this test is testing that is not covered in more # specific tests. It should be removed if there is nothing # else as it is fragile due to use of sampledata. DatabaseLayer.force_dirty_database() # Make sure a copy archive with the desired name does # not exist yet. distro_name = 'ubuntu' distro = getUtility(IDistributionSet).getByName(distro_name) archive_name = "msa%s" % int(time.time()) copy_archive = getUtility(IArchiveSet).getByDistroPurpose( distro, ArchivePurpose.COPY, archive_name) # This is a sanity check: a copy archive with this name should not # exist yet. self.assertTrue(copy_archive is None) hoary = getUtility(IDistributionSet)[distro_name]['hoary'] # Verify that we have the right source packages in the sample data. self._verifyPackagesInSampleData(hoary) # Command line arguments required for the invocation of the # 'populate-archive.py' script. extra_args = [ '-a', '386', '--from-distribution', distro_name, '--from-suite', 'hoary', '--to-distribution', distro_name, '--to-suite', 'hoary', '--to-archive', archive_name, '--to-user', 'salgado', '--reason', '"copy archive from %s"' % datetime.ctime(datetime.utcnow()), ] # Start archive population now! (exitcode, out, err) = self.runWrapperScript(extra_args) # Check for zero exit code. self.assertEqual( exitcode, 0, "\n=> %s\n=> %s\n=> %s\n" % (exitcode, out, err)) # Make sure the copy archive with the desired name was # created copy_archive = getUtility(IArchiveSet).getByDistroPurpose( distro, ArchivePurpose.COPY, archive_name) self.assertTrue(copy_archive is not None) # Make sure the right source packages were cloned. self._verifyClonedSourcePackages(copy_archive, hoary) # Now check that we have build records for the sources cloned. builds = list(getUtility(IBinaryPackageBuildSet).getBuildsForArchive( copy_archive, status=BuildStatus.NEEDSBUILD)) # Please note: there will be no build for the pmount package # since it is architecture independent and the 'hoary' # DistroSeries in the sample data has no DistroArchSeries # with chroots set up. build_spns = [ get_spn(removeSecurityProxy(build)).name for build in builds] self.assertEqual(build_spns, self.expected_build_spns)
def peopleKarmaTearDown(test): """Restore the database after testing karma.""" # We can't detect db changes made by the subprocess (yet). DatabaseLayer.force_dirty_database() tearDown(test)