def test_upgrade_branches_packagebranch(self):
        """Test that upgrade_branches can upgrade package branches."""
        self.useBzrBranches()
        package_branch = self.factory.makePackageBranch()
        target, target_tree = self.create_branch_and_tree(
            db_branch=package_branch, format='knit')
        target.branch_format = BranchFormat.BZR_BRANCH_5
        target.repository_format = RepositoryFormat.BZR_KNIT_1

        self.assertEqual(
            target_tree.branch.repository._format.get_format_string(),
            'Bazaar-NG Knit Repository Format 1')

        BranchUpgradeJob.create(target, self.factory.makePerson())
        transaction.commit()

        retcode, stdout, stderr = run_script(
            'cronscripts/process-job-source.py', ['IBranchUpgradeJobSource'],
            expect_returncode=0)
        self.assertEqual('', stdout)
        self.assertIn(
            'INFO    Ran 1 BranchUpgradeJob jobs.\n', stderr)

        target_branch = BzrBranch.open(target_tree.branch.base)
        self.assertEqual(
            target_branch.repository._format.get_format_string(),
            'Bazaar repository format 2a (needs bzr 1.16 or later)\n')
 def test_exclude(self):
     # Job sources can be excluded with a --exclude switch.
     args = ["MAIN"]
     for source in self.getJobSources("MAIN"):
         args.extend(("--exclude", source))
     returncode, output, error = run_script(self.script, args)
     self.assertEqual("", error)
 def test_hourly_script(self):
     """Ensure garbo-hourly.py actually runs."""
     rv, out, err = run_script(
         "cronscripts/garbo-hourly.py", ["-q"], expect_returncode=0)
     self.failIf(out.strip(), "Output to stdout: %s" % out)
     self.failIf(err.strip(), "Output to stderr: %s" % err)
     DatabaseLayer.force_dirty_database()
Beispiel #4
0
 def test_missing_argument(self):
     # The script should display usage info when called without any
     # arguments.
     returncode, output, error = run_script(self.script, [],
                                            expect_returncode=1)
     self.assertIn('Usage:', output)
     self.assertIn('process-job-source.py [options] JOB_SOURCE', output)
 def run_script_and_assert_success(self):
     """Run the scan_branches script and assert it ran successfully."""
     retcode, stdout, stderr = run_script(
         'cronscripts/process-job-source.py', ['IBranchScanJobSource'],
         expect_returncode=0)
     self.assertEqual('', stdout)
     self.assertIn('INFO    Ran 1 BranchScanJob jobs.\n', stderr)
    def test_reupload_translations(self):
        """Test a run of the script."""
        retcode, stdout, stderr = run_script(
            'scripts/rosetta/reupload-translations.py', [
                '-d', self.distroseries.distribution.name,
                '-s', self.distroseries.name,
                '-p', self.sourcepackagename1.name,
                '-p', self.sourcepackagename2.name,
                '-v',
                '--dry-run',
            ])

        self.assertEqual(0, retcode)
        self.assertEqual('', stdout)

        expected_output = (
            "INFO\s*Dry run.  Not really uploading anything.\n"
            "INFO\s*Processing [^\s]+ in .*\n"
            "WARNING\s*Found no translations upload for .*\n"
            "INFO\s*Processing [^\s]+ in .*\n"
            "WARNING\s*Found no translations upload for .*\n"
            "INFO\s*Done.\n")
        self.assertTrue(
            re.match(expected_output, stderr),
            'expected %s, got %s' % (expected_output, stderr))
Beispiel #7
0
 def test_exclude(self):
     # Job sources can be excluded with a --exclude switch.
     args = ["MAIN"]
     for source in self.getJobSources("MAIN"):
         args.extend(("--exclude", source))
     returncode, output, error = run_script(self.script, args)
     self.assertEqual("", error)
Beispiel #8
0
    def test_upgrade_branches_packagebranch(self):
        """Test that upgrade_branches can upgrade package branches."""
        self.useBzrBranches()
        package_branch = self.factory.makePackageBranch()
        target, target_tree = self.create_branch_and_tree(
            db_branch=package_branch, format='knit')
        target.branch_format = BranchFormat.BZR_BRANCH_5
        target.repository_format = RepositoryFormat.BZR_KNIT_1

        self.assertEqual(
            target_tree.branch.repository._format.get_format_string(),
            'Bazaar-NG Knit Repository Format 1')

        BranchUpgradeJob.create(target, self.factory.makePerson())
        transaction.commit()

        retcode, stdout, stderr = run_script(
            'cronscripts/process-job-source.py', ['IBranchUpgradeJobSource'],
            expect_returncode=0)
        self.assertEqual('', stdout)
        self.assertIn('INFO    Ran 1 BranchUpgradeJob jobs.\n', stderr)

        target_branch = BzrBranch.open(target_tree.branch.base)
        self.assertEqual(
            target_branch.repository._format.get_format_string(),
            'Bazaar repository format 2a (needs bzr 1.16 or later)\n')
 def test_revision_added_job(self):
     """RevisionsAddedJobs are run by sendbranchmail."""
     self.useBzrBranches()
     branch, tree = self.createBranch()
     tree.bzrdir.root_transport.put_bytes('foo', 'baz')
     # XXX: AaronBentley 2010-08-06 bug=614404: a bzr username is
     # required to generate the revision-id.
     with override_environ(BZR_EMAIL='*****@*****.**'):
         tree.commit('Added foo.', rev_id='rev2')
     job = RevisionsAddedJob.create(
         branch, 'rev1', 'rev2', '*****@*****.**')
     transaction.commit()
     retcode, stdout, stderr = run_script(
         'cronscripts/process-job-source.py', ['IRevisionsAddedJobSource'])
     self.assertTextMatchesExpressionIgnoreWhitespace(
         'INFO    '
         'Creating lockfile: /var/lock/launchpad-process-job-source-'
         'IRevisionsAddedJobSource.lock\n'
         'INFO    Running synchronously.\n'
         'INFO    Running <REVISIONS_ADDED_MAIL branch job \(\d+\) '
         'for .*?> \(ID %d\) in status Waiting\n'
         'INFO    Ran 1 RevisionsAddedJob jobs.\n' % job.job.id,
         stderr)
     self.assertEqual('', stdout)
     self.assertEqual(0, retcode)
 def test_script_runs(self):
     """Ensure merge-proposal-jobs script runs."""
     job = make_runnable_incremental_diff_job(self)
     transaction.commit()
     retcode, stdout, stderr = run_script(
         'cronscripts/process-job-source.py',
         ['--log-twisted', 'IBranchMergeProposalJobSource'])
     self.assertEqual(0, retcode)
     self.assertEqual('', stdout)
     matches_expected = MatchesRegex(
         'INFO    Creating lockfile: /var/lock/launchpad-process-job-'
         'source-IBranchMergeProposalJobSource.lock\n'
         'INFO    Running through Twisted.\n'
         'Log opened.\n'
         'INFO    Log opened.\n'
         'ProcessPool stats:\n'
         'INFO    ProcessPool stats:\n'
         '\tworkers: 0\n'
         'INFO    \tworkers: 0\n'
         '(.|\n)*'
         'INFO    Running '
         '<GENERATE_INCREMENTAL_DIFF job for merge .*?> \(ID %d\).\n'
         '(.|\n)*'
         'INFO    STOPPING: \'\'\n'
         'Main loop terminated.\n'
         'INFO    Main loop terminated.\n'
         'INFO    Ran 1 GenerateIncrementalDiffJob jobs.\n' % job.job.id)
     self.assertThat(stderr, matches_expected)
     self.assertEqual(JobStatus.COMPLETED, job.status)
Beispiel #11
0
    def test_reupload_translations(self):
        """Test a run of the script."""
        retcode, stdout, stderr = run_script(
            'scripts/rosetta/reupload-translations.py', [
                '-d',
                self.distroseries.distribution.name,
                '-s',
                self.distroseries.name,
                '-p',
                self.sourcepackagename1.name,
                '-p',
                self.sourcepackagename2.name,
                '-v',
                '--dry-run',
            ])

        self.assertEqual(0, retcode)
        self.assertEqual('', stdout)

        expected_output = ("INFO\s*Dry run.  Not really uploading anything.\n"
                           "INFO\s*Processing [^\s]+ in .*\n"
                           "WARNING\s*Found no translations upload for .*\n"
                           "INFO\s*Processing [^\s]+ in .*\n"
                           "WARNING\s*Found no translations upload for .*\n"
                           "INFO\s*Done.\n")
        self.assertTrue(re.match(expected_output, stderr),
                        'expected %s, got %s' % (expected_output, stderr))
 def test_empty_queue(self):
     # The script should just create a lockfile and exit if no jobs
     # are in the queue.
     returncode, output, error = run_script(self.script, ["IMembershipNotificationJobSource"])
     expected = (
         "INFO    Creating lockfile: .*launchpad-process-job-" "source-IMembershipNotificationJobSource.lock.*"
     )
     self.assertTextMatchesExpressionIgnoreWhitespace(expected, error)
 def test_missing_argument(self):
     # The script should display usage info when called without any
     # arguments.
     returncode, output, error = run_script(self.script, [], expect_returncode=1)
     self.assertIn(("Usage: process-job-source-groups.py " "[ -e JOB_SOURCE ] GROUP [GROUP]..."), output)
     self.assertIn("-e JOB_SOURCE, --exclude=JOB_SOURCE", output)
     self.assertIn("At least one group must be specified.", output)
     self.assertIn("Group: MAIN\n    I", output)
 def test_empty_queue(self):
     # The script should just launch a child for each job source class,
     # and then exit if no jobs are in the queue.  It should not create
     # its own lockfile.
     returncode, output, error = run_script(self.script, ["MAIN"])
     expected = ".*Creating lockfile:.*launchpad-process-job-" "source-IMembershipNotificationJobSource.lock.*"
     self.assertTextMatchesExpressionIgnoreWhitespace(expected, error)
     self.assertNotIn("launchpad-processjobsourcegroups.lock", error)
 def run_script_and_assert_success(self):
     """Run the scan_branches script and assert it ran successfully."""
     retcode, stdout, stderr = run_script(
         'cronscripts/process-job-source.py', ['IBranchScanJobSource'],
         expect_returncode=0)
     self.assertEqual('', stdout)
     self.assertIn(
         'INFO    Ran 1 BranchScanJob jobs.\n', stderr)
 def test_reclaimbranchspace_script(self):
     # When the reclaimbranchspace script is run, it removes from the file
     # system any branches that were deleted from the database more than a
     # week ago.
     db_branch = self.factory.makeAnyBranch()
     mirrored_path = self.getBranchPath(
         db_branch, config.codehosting.mirrored_branches_root)
     if os.path.exists(mirrored_path):
         shutil.rmtree(mirrored_path)
     os.makedirs(mirrored_path)
     db_branch.destroySelf()
     transaction.commit()
     # The first run doesn't remove anything yet.
     retcode, stdout, stderr = run_script(
         'cronscripts/process-job-source.py',
         ['IReclaimBranchSpaceJobSource'])
     self.assertEqual('', stdout)
     self.assertEqual(
         'INFO    Creating lockfile: /var/lock/'
         'launchpad-process-job-source-IReclaimBranchSpaceJobSource.lock\n'
         'INFO    Running synchronously.\n', stderr)
     self.assertEqual(0, retcode)
     self.assertTrue(
         os.path.exists(mirrored_path))
     # Now pretend that the branch was deleted 8 days ago.
     reclaim_job = IStore(BranchJob).find(
         BranchJob,
         BranchJob.job_type == BranchJobType.RECLAIM_BRANCH_SPACE).one()
     reclaim_job.job.scheduled_start -= datetime.timedelta(days=8)
     transaction.commit()
     # The script will now remove the branch from disk.
     retcode, stdout, stderr = run_script(
         'cronscripts/process-job-source.py',
         ['IReclaimBranchSpaceJobSource'])
     self.assertEqual('', stdout)
     self.assertTextMatchesExpressionIgnoreWhitespace(
         'INFO    Creating lockfile: /var/lock/'
         'launchpad-process-job-source-IReclaimBranchSpaceJobSource.lock\n'
         'INFO    Running synchronously.\n'
         'INFO    Running <RECLAIM_BRANCH_SPACE branch job \(\d+\) for '
         '\d+> \(ID %s\) in status Waiting\n'
         'INFO    Ran 1 ReclaimBranchSpaceJob jobs.\n' % reclaim_job.job.id,
         stderr)
     self.assertEqual(0, retcode)
     self.assertFalse(
         os.path.exists(mirrored_path))
Beispiel #17
0
 def test_empty_queue(self):
     # The script should just create a lockfile and exit if no jobs
     # are in the queue.
     returncode, output, error = run_script(
         self.script, ['IMembershipNotificationJobSource'])
     expected = ('INFO    Creating lockfile: .*launchpad-process-job-'
                 'source-IMembershipNotificationJobSource.lock.*')
     self.assertTextMatchesExpressionIgnoreWhitespace(expected, error)
 def test_run_script(self):
     # The script will run stand-alone.
     distro = self.makeDistro()
     self.factory.makeDistroSeries(distro)
     transaction.commit()
     retval, out, err = run_script(
         "cronscripts/generate-extra-overrides.py",
         ["-d", distro.name, "-q"])
     self.assertEqual(0, retval)
Beispiel #19
0
 def test_empty_queue(self):
     # The script should just launch a child for each job source class,
     # and then exit if no jobs are in the queue.  It should not create
     # its own lockfile.
     returncode, output, error = run_script(self.script, ['MAIN'])
     expected = ('.*Creating lockfile:.*launchpad-process-job-'
                 'source-IMembershipNotificationJobSource.lock.*')
     self.assertTextMatchesExpressionIgnoreWhitespace(expected, error)
     self.assertNotIn("launchpad-processjobsourcegroups.lock", error)
 def test_reclaimbranchspace_script(self):
     # When the reclaimbranchspace script is run, it removes from the file
     # system any branches that were deleted from the database more than a
     # week ago.
     db_branch = self.factory.makeAnyBranch()
     mirrored_path = self.getBranchPath(
         db_branch, config.codehosting.mirrored_branches_root)
     if os.path.exists(mirrored_path):
         shutil.rmtree(mirrored_path)
     os.makedirs(mirrored_path)
     db_branch.destroySelf()
     transaction.commit()
     # The first run doesn't remove anything yet.
     retcode, stdout, stderr = run_script(
         'cronscripts/process-job-source.py',
         ['IReclaimBranchSpaceJobSource'])
     self.assertEqual('', stdout)
     self.assertEqual(
         'INFO    Creating lockfile: /var/lock/'
         'launchpad-process-job-source-IReclaimBranchSpaceJobSource.lock\n'
         'INFO    Running synchronously.\n', stderr)
     self.assertEqual(0, retcode)
     self.assertTrue(os.path.exists(mirrored_path))
     # Now pretend that the branch was deleted 8 days ago.
     reclaim_job = IStore(BranchJob).find(
         BranchJob,
         BranchJob.job_type == BranchJobType.RECLAIM_BRANCH_SPACE).one()
     reclaim_job.job.scheduled_start -= datetime.timedelta(days=8)
     transaction.commit()
     # The script will now remove the branch from disk.
     retcode, stdout, stderr = run_script(
         'cronscripts/process-job-source.py',
         ['IReclaimBranchSpaceJobSource'])
     self.assertEqual('', stdout)
     self.assertTextMatchesExpressionIgnoreWhitespace(
         'INFO    Creating lockfile: /var/lock/'
         'launchpad-process-job-source-IReclaimBranchSpaceJobSource.lock\n'
         'INFO    Running synchronously.\n'
         'INFO    Running <RECLAIM_BRANCH_SPACE branch job \(\d+\) for '
         '\d+> \(ID %s\) in status Waiting\n'
         'INFO    Ran 1 ReclaimBranchSpaceJob jobs.\n' % reclaim_job.job.id,
         stderr)
     self.assertEqual(0, retcode)
     self.assertFalse(os.path.exists(mirrored_path))
 def test_exclude_non_existing_group(self):
     # If a job source specified by --exclude does not exist the script
     # continues, logging a short info message about it.
     args = ["MAIN"]
     for source in self.getJobSources("MAIN"):
         args.extend(("--exclude", source))
     args.extend(("--exclude", "BobbyDazzler"))
     returncode, output, error = run_script(self.script, args)
     expected = "INFO    'BobbyDazzler' is not in MAIN\n"
     self.assertThat(error, DocTestMatches(expected))
Beispiel #22
0
 def test_exclude_non_existing_group(self):
     # If a job source specified by --exclude does not exist the script
     # continues, logging a short info message about it.
     args = ["MAIN"]
     for source in self.getJobSources("MAIN"):
         args.extend(("--exclude", source))
     args.extend(("--exclude", "BobbyDazzler"))
     returncode, output, error = run_script(self.script, args)
     expected = "INFO    'BobbyDazzler' is not in MAIN\n"
     self.assertThat(error, DocTestMatches(expected))
Beispiel #23
0
 def _setUp(self):
     pidfile = pidfile_path(
         "codebrowse", use_config=LayerProcessController.appserver_config)
     pid = get_pid_from_file(pidfile)
     if pid is not None:
         warnings.warn(
             "Attempt to start LoggerheadFixture with an existing "
             "instance (%d) running in %s." % (pid, pidfile))
         kill_by_pidfile(pidfile)
     self.logfile = os.path.join(config.codebrowse.log_folder, "debug.log")
     remove_if_exists(self.logfile)
     self.addCleanup(kill_by_pidfile, pidfile)
     run_script(
         os.path.join("scripts", "start-loggerhead.py"),
         ["--daemon"],
         # The testrunner-appserver config provides the correct
         # openid_provider_root URL.
         extra_env={"LPCONFIG": BaseLayer.appserver_config_name})
     self._waitForStartup()
Beispiel #24
0
 def test_missing_argument(self):
     # The script should display usage info when called without any
     # arguments.
     returncode, output, error = run_script(self.script, [],
                                            expect_returncode=1)
     self.assertIn(('Usage: process-job-source-groups.py '
                    '[ -e JOB_SOURCE ] GROUP [GROUP]...'), output)
     self.assertIn('-e JOB_SOURCE, --exclude=JOB_SOURCE', output)
     self.assertIn('At least one group must be specified.', output)
     self.assertIn('Group: MAIN\n    I', output)
    def test_pottery_generate_intltool_script(self):
        # Let the script run to see it works fine.
        self.prepare_package("intltool_full_ok")

        return_code, stdout, stderr = run_script(
            'scripts/rosetta/pottery-generate-intltool.py', [])

        self.assertEqual(dedent("""\
            ./po-module1/packagename-module1.pot
            ./po-module2/packagename-module2.pot
            """), stdout)
Beispiel #26
0
    def test_pottery_generate_intltool_script(self):
        # Let the script run to see it works fine.
        self.prepare_package("intltool_full_ok")

        return_code, stdout, stderr = run_script(
            'scripts/rosetta/pottery-generate-intltool.py', [])

        self.assertEqual(
            dedent("""\
            ./po-module1/packagename-module1.pot
            ./po-module2/packagename-module2.pot
            """), stdout)
Beispiel #27
0
    def test_cronscript_succeeds(self):
        # The process-apport-blobs cronscript will run all pending
        # ProcessApportBlobJobs.
        getUtility(IProcessApportBlobJobSource).create(self.blob)
        transaction.commit()

        retcode, stdout, stderr = run_script(
            'cronscripts/process-job-source.py',
            ['IProcessApportBlobJobSource'],
            expect_returncode=0)
        self.assertEqual('', stdout)
        self.assertIn('INFO    Ran 1 ProcessApportBlobJob jobs.\n', stderr)
    def test_cronscript_succeeds(self):
        # The process-apport-blobs cronscript will run all pending
        # ProcessApportBlobJobs.
        getUtility(IProcessApportBlobJobSource).create(self.blob)
        transaction.commit()

        retcode, stdout, stderr = run_script(
            'cronscripts/process-job-source.py',
            ['IProcessApportBlobJobSource'], expect_returncode=0)
        self.assertEqual('', stdout)
        self.assertIn(
            'INFO    Ran 1 ProcessApportBlobJob jobs.\n', stderr)
Beispiel #29
0
    def test_merge_translations(self):
        job = make_translation_merge_job(self.factory)
        transaction.commit()
        retcode, stdout, stderr = run_script(
            'cronscripts/process-job-source.py',
            ['ITranslationPackagingJobSource'],
            expect_returncode=0)
        matcher = MatchesRegex(
            dedent("""\
            INFO    Creating lockfile: /var/lock/launchpad-process-job-source-ITranslationPackagingJobSource.lock
            INFO    Running synchronously.
            INFO    Running <.*?TranslationMergeJob.*?> \(ID .*\) in status Waiting
            INFO    Merging .* and .* in Ubuntu Distroseries.*
            INFO    Deleted POTMsgSets: 1.  TranslationMessages: 1.
            INFO    Merging template 1/2.
            INFO    Merging template 2/2.
            INFO    Ran 1 TranslationMergeJob jobs.
            """))
        self.assertThat(stderr, matcher)
        self.assertEqual('', stdout)

        with admin_logged_in():
            job.distroseries.getSourcePackage(
                job.sourcepackagename).deletePackaging()
        transaction.commit()
        retcode, stdout, stderr = run_script(
            'cronscripts/process-job-source.py',
            ['ITranslationPackagingJobSource'],
            expect_returncode=0)
        matcher = MatchesRegex(
            dedent("""\
            INFO    Creating lockfile: /var/lock/launchpad-process-job-source-ITranslationPackagingJobSource.lock
            INFO    Running synchronously.
            INFO    Running <.*?TranslationSplitJob.*?> \(ID .*\) in status Waiting
            INFO    Splitting .* and .* in Ubuntu Distroseries.*
            INFO    1 entries split.
            INFO    Ran 1 TranslationSplitJob jobs.
            """))
        self.assertThat(stderr, matcher)
        self.assertEqual('', stdout)
 def test_request_daily_builds_oops(self):
     """Ensure errors are handled cleanly."""
     archive = self.factory.makeArchive(purpose=ArchivePurpose.COPY)
     recipe = self.factory.makeSourcePackageRecipe(
         daily_build_archive=archive, build_daily=True)
     transaction.commit()
     retcode, stdout, stderr = run_script(
         'cronscripts/request_daily_builds.py', [])
     self.assertEqual(0, recipe.pending_builds.count())
     self.assertIn('Requested 0 daily builds.', stderr)
     self.oops_capture.sync()
     self.assertEqual('NonPPABuildRequest', self.oopses[0]['type'])
     self.assertEqual(1, len(self.oopses),
                      "Too many OOPSes: %r" % (self.oopses, ))
 def test_request_daily_builds_oops(self):
     """Ensure errors are handled cleanly."""
     archive = self.factory.makeArchive(purpose=ArchivePurpose.COPY)
     recipe = self.factory.makeSourcePackageRecipe(
         daily_build_archive=archive, build_daily=True)
     transaction.commit()
     retcode, stdout, stderr = run_script(
         'cronscripts/request_daily_builds.py', [])
     self.assertEqual(0, recipe.pending_builds.count())
     self.assertIn('Requested 0 daily builds.', stderr)
     self.oops_capture.sync()
     self.assertEqual('NonPPABuildRequest', self.oopses[0]['type'])
     self.assertEqual(
         1, len(self.oopses), "Too many OOPSes: %r" % (self.oopses,))
Beispiel #32
0
 def test_processed(self):
     # The script should output the number of jobs it processed.
     person = self.factory.makePerson(name='murdock')
     team = self.factory.makeTeam(name='a-team')
     login_person(team.teamowner)
     team.addMember(person, team.teamowner)
     membership_set = getUtility(ITeamMembershipSet)
     tm = membership_set.getByPersonAndTeam(person, team)
     tm.setStatus(TeamMembershipStatus.ADMIN, team.teamowner)
     transaction.commit()
     returncode, output, error = run_script(
         self.script, ['-v', 'IMembershipNotificationJobSource'])
     self.assertIn(('INFO    Running <MembershipNotificationJob '
                    'about ~murdock in ~a-team; status=Waiting>'), error)
     self.assertIn('DEBUG   MembershipNotificationJob sent email', error)
     self.assertIn('Ran 1 MembershipNotificationJob jobs.', error)
 def test_processed(self):
     # The script should output the number of jobs it processed.
     person = self.factory.makePerson(name="murdock")
     team = self.factory.makeTeam(name="a-team")
     login_person(team.teamowner)
     team.addMember(person, team.teamowner)
     membership_set = getUtility(ITeamMembershipSet)
     tm = membership_set.getByPersonAndTeam(person, team)
     tm.setStatus(TeamMembershipStatus.ADMIN, team.teamowner)
     transaction.commit()
     returncode, output, error = run_script(self.script, ["-v", "IMembershipNotificationJobSource"])
     self.assertIn(
         ("INFO    Running <MembershipNotificationJob " "about ~murdock in ~a-team; status=Waiting>"), error
     )
     self.assertIn("DEBUG   MembershipNotificationJob sent email", error)
     self.assertIn("Ran 1 MembershipNotificationJob jobs.", error)
 def test_processed(self):
     # The script should output the number of jobs that have been
     # processed by its child processes.
     person = self.factory.makePerson(name="murdock")
     team = self.factory.makeTeam(name="a-team")
     login_person(team.teamowner)
     team.addMember(person, team.teamowner)
     membership_set = getUtility(ITeamMembershipSet)
     tm = membership_set.getByPersonAndTeam(person, team)
     tm.setStatus(TeamMembershipStatus.ADMIN, team.teamowner)
     transaction.commit()
     returncode, output, error = run_script(self.script, ["-v", "--wait", "MAIN"])
     self.assertTextMatchesExpressionIgnoreWhitespace(
         ("INFO Running <MembershipNotificationJob " "about ~murdock in ~a-team; status=Waiting>"), error
     )
     self.assertIn("DEBUG   MembershipNotificationJob sent email", error)
     self.assertIn("Ran 1 MembershipNotificationJob jobs.", error)
Beispiel #35
0
 def test_processed(self):
     # The script should output the number of jobs that have been
     # processed by its child processes.
     person = self.factory.makePerson(name='murdock')
     team = self.factory.makeTeam(name='a-team')
     login_person(team.teamowner)
     team.addMember(person, team.teamowner)
     membership_set = getUtility(ITeamMembershipSet)
     tm = membership_set.getByPersonAndTeam(person, team)
     tm.setStatus(TeamMembershipStatus.ADMIN, team.teamowner)
     transaction.commit()
     returncode, output, error = run_script(self.script,
                                            ['-v', '--wait', 'MAIN'])
     self.assertTextMatchesExpressionIgnoreWhitespace(
         ('INFO Running <MembershipNotificationJob '
          'about ~murdock in ~a-team; status=Waiting>'), error)
     self.assertIn('DEBUG   MembershipNotificationJob sent email', error)
     self.assertIn('Ran 1 MembershipNotificationJob jobs.', error)
 def test_request_daily_builds(self):
     """Ensure the request_daily_builds script requests daily builds."""
     prod_branch = self.factory.makeProductBranch()
     prod_recipe = self.factory.makeSourcePackageRecipe(
         build_daily=True, is_stale=True, branches=[prod_branch])
     pack_branch = self.factory.makePackageBranch()
     pack_recipe = self.factory.makeSourcePackageRecipe(
         build_daily=True, is_stale=True, branches=[pack_branch])
     self.assertEqual(0, prod_recipe.pending_builds.count())
     self.assertEqual(0, pack_recipe.pending_builds.count())
     transaction.commit()
     retcode, stdout, stderr = run_script(
         'cronscripts/request_daily_builds.py', [])
     self.assertIn('Requested 2 daily builds.', stderr)
     self.assertEqual(1, prod_recipe.pending_builds.count())
     self.assertEqual(1, pack_recipe.pending_builds.count())
     self.assertFalse(prod_recipe.is_stale)
     self.assertFalse(pack_recipe.is_stale)
Beispiel #37
0
 def test_cronscript(self):
     dsp = self.factory.makeDistroSeriesParent()
     package = self.factory.makeSourcePackageName()
     self.getJobSource().createForPackagePublication(
         dsp.derived_series, package, PackagePublishingPocket.RELEASE)
     # Make changes visible to the process we'll be spawning.
     transaction.commit()
     return_code, stdout, stderr = run_script(
         'cronscripts/process-job-source.py',
         ['-v', 'IDistroSeriesDifferenceJobSource'])
     # The cronscript ran how we expected it to.
     self.assertEqual(return_code, 0)
     self.assertIn('INFO    Ran 1 DistroSeriesDifferenceJob jobs.', stderr)
     # And it did what we expected.
     jobs = find_waiting_jobs(dsp.derived_series, package,
                              dsp.parent_series)
     self.assertContentEqual([], jobs)
     self.assertEqual(1, find_dsd_for(dsp, package).count())
    def test_rosetta_branches_script(self):
        # If a job exists it will be executed and the template file will
        # be put into the import queue with status "Approved".
        self._clear_import_queue()
        pot_path = self.factory.getUniqueString() + ".pot"
        branch = self._setup_series_branch(pot_path)
        RosettaUploadJob.create(branch, NULL_REVISION)
        transaction.commit()

        return_code, stdout, stderr = run_script(
            'cronscripts/process-job-source.py', ['IRosettaUploadJobSource'])
        self.assertEqual(0, return_code)

        queue = getUtility(ITranslationImportQueue)
        self.assertEqual(1, queue.countEntries())
        entry = list(queue)[0]
        self.assertEqual(RosettaImportStatus.APPROVED, entry.status)
        self.assertEqual(pot_path, entry.path)
    def test_rosetta_branches_script(self):
        # If a job exists it will be executed and the template file will
        # be put into the import queue with status "Approved".
        self._clear_import_queue()
        pot_path = self.factory.getUniqueString() + ".pot"
        branch = self._setup_series_branch(pot_path)
        RosettaUploadJob.create(branch, NULL_REVISION)
        transaction.commit()

        return_code, stdout, stderr = run_script(
            'cronscripts/process-job-source.py', ['IRosettaUploadJobSource'])
        self.assertEqual(0, return_code)

        queue = getUtility(ITranslationImportQueue)
        self.assertEqual(1, queue.countEntries())
        entry = list(queue)[0]
        self.assertEqual(RosettaImportStatus.APPROVED, entry.status)
        self.assertEqual(pot_path, entry.path)
 def test_request_daily_builds(self):
     """Ensure the request_daily_builds script requests daily builds."""
     prod_branch = self.factory.makeProductBranch()
     prod_recipe = self.factory.makeSourcePackageRecipe(
         build_daily=True, is_stale=True, branches=[prod_branch])
     pack_branch = self.factory.makePackageBranch()
     pack_recipe = self.factory.makeSourcePackageRecipe(
         build_daily=True, is_stale=True, branches=[pack_branch])
     self.assertEqual(0, prod_recipe.pending_builds.count())
     self.assertEqual(0, pack_recipe.pending_builds.count())
     transaction.commit()
     retcode, stdout, stderr = run_script(
         'cronscripts/request_daily_builds.py', [])
     self.assertIn('Requested 2 daily builds.', stderr)
     self.assertEqual(1, prod_recipe.pending_builds.count())
     self.assertEqual(1, pack_recipe.pending_builds.count())
     self.assertFalse(prod_recipe.is_stale)
     self.assertFalse(pack_recipe.is_stale)
 def test_sendbranchmail(self):
     """Ensure sendbranchmail runs and sends email."""
     self.useBzrBranches()
     branch, tree = self.createBranch()
     mail_job = RevisionMailJob.create(
         branch, 1, '*****@*****.**', 'body', 'foo')
     transaction.commit()
     retcode, stdout, stderr = run_script(
         'cronscripts/process-job-source.py', ['IRevisionMailJobSource'])
     self.assertTextMatchesExpressionIgnoreWhitespace(
         'INFO    '
         'Creating lockfile: /var/lock/launchpad-process-job-source-'
         'IRevisionMailJobSource.lock\n'
         'INFO    Running synchronously.\n'
         'INFO    Running <REVISION_MAIL branch job \(\d+\) for .*?> '
         '\(ID %d\) in status Waiting\n'
         'INFO    Ran 1 RevisionMailJob jobs.\n' % mail_job.job.id, stderr)
     self.assertEqual('', stdout)
     self.assertEqual(0, retcode)
 def test_cronscript(self):
     dsp = self.factory.makeDistroSeriesParent()
     package = self.factory.makeSourcePackageName()
     self.getJobSource().createForPackagePublication(
         dsp.derived_series, package, PackagePublishingPocket.RELEASE)
     # Make changes visible to the process we'll be spawning.
     transaction.commit()
     return_code, stdout, stderr = run_script(
         'cronscripts/process-job-source.py',
         ['-v', 'IDistroSeriesDifferenceJobSource'])
     # The cronscript ran how we expected it to.
     self.assertEqual(return_code, 0)
     self.assertIn(
         'INFO    Ran 1 DistroSeriesDifferenceJob jobs.', stderr)
     # And it did what we expected.
     jobs = find_waiting_jobs(
         dsp.derived_series, package, dsp.parent_series)
     self.assertContentEqual([], jobs)
     self.assertEqual(1, find_dsd_for(dsp, package).count())
 def test_sendbranchmail(self):
     """Ensure sendbranchmail runs and sends email."""
     self.useBzrBranches()
     branch, tree = self.createBranch()
     mail_job = RevisionMailJob.create(branch, 1, '*****@*****.**',
                                       'body', 'foo')
     transaction.commit()
     retcode, stdout, stderr = run_script(
         'cronscripts/process-job-source.py', ['IRevisionMailJobSource'])
     self.assertTextMatchesExpressionIgnoreWhitespace(
         'INFO    '
         'Creating lockfile: /var/lock/launchpad-process-job-source-'
         'IRevisionMailJobSource.lock\n'
         'INFO    Running synchronously.\n'
         'INFO    Running <REVISION_MAIL branch job \(\d+\) for .*?> '
         '\(ID %d\) in status Waiting\n'
         'INFO    Ran 1 RevisionMailJob jobs.\n' % mail_job.job.id, stderr)
     self.assertEqual('', stdout)
     self.assertEqual(0, retcode)
Beispiel #44
0
    def test_run_from_cronscript(self):
        hook = self.factory.makeWebhook(delivery_url=u'http://example.com/ep')
        job = WebhookDeliveryJob.create(hook, 'test', payload={'foo': 'bar'})
        self.assertEqual(JobStatus.WAITING, job.status)
        transaction.commit()

        retcode, stdout, stderr = run_script(
            'cronscripts/process-job-source.py', ['IWebhookDeliveryJobSource'],
            expect_returncode=0)
        self.assertEqual('', stdout)
        self.assertIn(
            'WARNING Scheduling retry due to WebhookDeliveryRetry', stderr)
        self.assertIn(
            'INFO    1 WebhookDeliveryJob jobs did not complete.\n', stderr)

        self.assertEqual(JobStatus.WAITING, job.status)
        self.assertIn(
            'Cannot connect to proxy',
            job.json_data['result']['connection_error'])
 def test_merge_translations(self):
     """Running the script performs a translation merge."""
     # Import here to avoid autodetection by test runner.
     for packaging in set(TranslationMerger.findMergeablePackagings()):
         with person_logged_in(packaging.owner):
             packaging.destroySelf()
     job = make_translation_merge_job(self.factory)
     self.assertEqual(2, count_translations(job))
     transaction.commit()
     retcode, stdout, stderr = run_script(
         'scripts/rosetta/merge-existing-packagings.py', [],
         expect_returncode=0)
     merge_message = 'INFO    Merging %s/%s and %s/%s.\n' % (
         job.productseries.product.name, job.productseries.name,
         job.sourcepackagename.name, job.distroseries.name)
     self.assertEqual(
         merge_message +
         'INFO    Deleted POTMsgSets: 1.  TranslationMessages: 1.\n'
         'INFO    Merging template 1/2.\n'
         'INFO    Merging template 2/2.\n', stderr)
     self.assertEqual('', stdout)
     self.assertEqual(1, count_translations(job))
 def test_merge_translations(self):
     job = make_translation_merge_job(self.factory)
     TranslationSplitJob.create(
         job.productseries, job.distroseries, job.sourcepackagename)
     transaction.commit()
     retcode, stdout, stderr = run_script(
         'cronscripts/process-job-source.py',
         ['ITranslationPackagingJobSource'],
         expect_returncode=0)
     matcher = MatchesRegex(dedent("""\
         INFO    Creating lockfile: /var/lock/launchpad-process-job-source-ITranslationPackagingJobSource.lock
         INFO    Running synchronously.
         INFO    Running <.*?TranslationMergeJob.*?> \(ID .*\) in status Waiting
         INFO    Merging .* and .* in Ubuntu Distroseries.*
         INFO    Deleted POTMsgSets: 1.  TranslationMessages: 1.
         INFO    Running <.*?TranslationSplitJob.*?> \(ID .*\) in status Waiting
         INFO    Splitting .* and .* in Ubuntu Distroseries.*
         INFO    1 entries split.
         INFO    Ran 1 TranslationMergeJob jobs.
         INFO    Ran 1 TranslationSplitJob jobs.
         """))
     self.assertThat(stderr, matcher)
     self.assertEqual('', stdout)
 def test_merge_translations(self):
     """Running the script performs a translation merge."""
     # Import here to avoid autodetection by test runner.
     for packaging in set(TranslationMerger.findMergeablePackagings()):
         with person_logged_in(packaging.owner):
             packaging.destroySelf()
     job = make_translation_merge_job(self.factory)
     packaging = self.factory.makePackagingLink(job.productseries,
             job.sourcepackagename, job.distroseries)
     self.assertEqual(2, count_translations(job))
     transaction.commit()
     retcode, stdout, stderr = run_script(
         'scripts/rosetta/merge-existing-packagings.py', [],
         expect_returncode=0)
     merge_message = 'INFO    Merging %s/%s and %s/%s.\n' % (
         packaging.productseries.product.name,
         packaging.productseries.name,
         packaging.sourcepackagename.name, packaging.distroseries.name)
     self.assertEqual(
         merge_message +
         'INFO    Deleted POTMsgSets: 1.  TranslationMessages: 1.\n',
         stderr)
     self.assertEqual('', stdout)
     self.assertEqual(1, count_translations(job))
 def test_revision_added_job(self):
     """RevisionsAddedJobs are run by sendbranchmail."""
     self.useBzrBranches()
     branch, tree = self.createBranch()
     tree.bzrdir.root_transport.put_bytes('foo', 'baz')
     # XXX: AaronBentley 2010-08-06 bug=614404: a bzr username is
     # required to generate the revision-id.
     with override_environ(BZR_EMAIL='*****@*****.**'):
         tree.commit('Added foo.', rev_id='rev2')
     job = RevisionsAddedJob.create(branch, 'rev1', 'rev2',
                                    '*****@*****.**')
     transaction.commit()
     retcode, stdout, stderr = run_script(
         'cronscripts/process-job-source.py', ['IRevisionsAddedJobSource'])
     self.assertTextMatchesExpressionIgnoreWhitespace(
         'INFO    '
         'Creating lockfile: /var/lock/launchpad-process-job-source-'
         'IRevisionsAddedJobSource.lock\n'
         'INFO    Running synchronously.\n'
         'INFO    Running <REVISIONS_ADDED_MAIL branch job \(\d+\) '
         'for .*?> \(ID %d\) in status Waiting\n'
         'INFO    Ran 1 RevisionsAddedJob jobs.\n' % job.job.id, stderr)
     self.assertEqual('', stdout)
     self.assertEqual(0, retcode)
 def test_cronscript(self):
     run_script(
         'cronscripts/process-job-source.py',
         ['IInitializeDistroSeriesJobSource'])
     DatabaseLayer.force_dirty_database()
    def test_translations_export_to_branch(self):
        # End-to-end test of the script doing its work.

        # Set up a server for hosted branches.
        self.useBzrBranches(direct_database=False)

        # Set up a product and translatable series.
        product = self.factory.makeProduct(name='committobranch')
        product = removeSecurityProxy(product)
        series = product.getSeries('trunk')

        # Set up a translations_branch for the series.
        db_branch, tree = self.create_branch_and_tree(product=product)
        removeSecurityProxy(db_branch).last_scanned_id = 'null:'
        product.translations_usage = ServiceUsage.LAUNCHPAD
        series.translations_branch = db_branch

        # Set up a template & Dutch translation for the series.
        template = self.factory.makePOTemplate(
            productseries=series, owner=product.owner, name='foo',
            path='po/messages.pot')
        template = removeSecurityProxy(template)
        potmsgset = self.factory.makePOTMsgSet(
            template, singular='Hello World', sequence=1)
        pofile = self.factory.makePOFile(
            'nl', potemplate=template, owner=product.owner)
        self.factory.makeCurrentTranslationMessage(
            pofile=pofile, potmsgset=potmsgset,
            translator=product.owner, reviewer=product.owner,
            translations=['Hallo Wereld'])

        # Make all this visible to the script we're about to run.
        transaction.commit()

        # Run The Script.
        retcode, stdout, stderr = run_script(
            'cronscripts/translations-export-to-branch.py', ['-vvv'])

        self.assertEqual('', stdout)
        self.assertEqual(
            'INFO    '
            'Creating lockfile: '
            '/var/lock/launchpad-translations-export-to-branch.lock\n'
            'INFO    Exporting to translations branches.\n'
            'INFO    Exporting Committobranch trunk series.\n'
            'INFO    '
            'Processed 1 item(s); 0 failure(s), 0 unpushed branch(es).',
            self._filterOutput(stderr))
        self.assertIn('No previous translations commit found.', stderr)
        self.assertEqual(0, retcode)

        # The branch now contains a snapshot of the translation.  (Only
        # one file: the Dutch translation we set up earlier).
        branch_contents = map_branch_contents(db_branch.getBzrBranch())
        expected_contents = {
            'po/nl.po': """
                # Dutch translation for .*
                # Copyright .*
                (?:#.*$
                )*msgid ""
                msgstr ""
                (?:"[^"]*"
                )*
                msgid "Hello World"
                msgstr "Hallo Wereld"\n""",
        }

        branch_filenames = set(branch_contents.iterkeys())
        expected_filenames = set(expected_contents.iterkeys())

        unexpected_filenames = branch_filenames - expected_filenames
        self.assertEqual(set(), unexpected_filenames)

        missing_filenames = expected_filenames - branch_filenames
        self.assertEqual(set(), missing_filenames)

        for filename, expected in expected_contents.iteritems():
            contents = branch_contents[filename].lstrip('\n')
            pattern = dedent(expected.lstrip('\n'))
            if not re.match(pattern, contents, re.MULTILINE):
                self.assertEqual(pattern, contents)

        # If we run the script again at this point, it won't export
        # anything because it sees that the POFile has not been changed
        # since the last export.
        retcode, stdout, stderr = run_script(
            'cronscripts/translations-export-to-branch.py',
            ['-vvv', '--no-fudge'])
        self.assertEqual(0, retcode)
        self.assertIn('Last commit was at', stderr)
        self.assertIn(
            "Processed 1 item(s); 0 failure(s), 0 unpushed branch(es).",
            stderr)
        self.assertEqual(
            None, re.search("INFO\s+Committed [0-9]+ file", stderr))
 def test_request_daily_builds(self):
     """Ensure the request_daily_builds script requests daily builds."""
     processor = self.factory.makeProcessor(supports_virtualized=True)
     distroarchseries = self.factory.makeDistroArchSeries(
         processor=processor)
     fake_chroot = self.factory.makeLibraryFileAlias(
         filename="fake_chroot.tar.gz", db_only=True)
     distroarchseries.addOrUpdateChroot(fake_chroot)
     product = self.factory.makeProduct()
     prod_branch = self.factory.makeBranch(product=product)
     [prod_ref] = self.factory.makeGitRefs(target=product)
     bzr_prod_recipe = self.factory.makeSourcePackageRecipe(
         build_daily=True, is_stale=True, branches=[prod_branch])
     git_prod_recipe = self.factory.makeSourcePackageRecipe(
         build_daily=True, is_stale=True, branches=[prod_ref])
     bzr_prod_snap = self.factory.makeSnap(
         distroseries=distroarchseries.distroseries,
         processors=[distroarchseries.processor],
         auto_build=True,
         is_stale=True,
         branch=prod_branch)
     git_prod_snap = self.factory.makeSnap(
         distroseries=distroarchseries.distroseries,
         processors=[distroarchseries.processor],
         auto_build=True,
         is_stale=True,
         git_ref=prod_ref)
     package = self.factory.makeSourcePackage()
     pack_branch = self.factory.makeBranch(sourcepackage=package)
     [
         pack_ref
     ] = self.factory.makeGitRefs(target=package.distribution_sourcepackage)
     bzr_pack_recipe = self.factory.makeSourcePackageRecipe(
         build_daily=True, is_stale=True, branches=[pack_branch])
     git_pack_recipe = self.factory.makeSourcePackageRecipe(
         build_daily=True, is_stale=True, branches=[pack_ref])
     bzr_pack_snap = self.factory.makeSnap(
         distroseries=distroarchseries.distroseries,
         processors=[distroarchseries.processor],
         auto_build=True,
         is_stale=True,
         branch=pack_branch)
     git_pack_snap = self.factory.makeSnap(
         distroseries=distroarchseries.distroseries,
         processors=[distroarchseries.processor],
         auto_build=True,
         is_stale=True,
         git_ref=pack_ref)
     items = [
         bzr_prod_recipe,
         git_prod_recipe,
         bzr_prod_snap,
         git_prod_snap,
         bzr_pack_recipe,
         git_pack_recipe,
         bzr_pack_snap,
         git_pack_snap,
     ]
     for item in items:
         self.assertEqual(0, item.pending_builds.count())
     transaction.commit()
     loggerhead_server = self.makeLoggerheadServer()
     loggerhead_server.addInventory(prod_branch, 'snap', 'prod_snap')
     loggerhead_server.addInventory(prod_branch, 'snap/snapcraft.yaml',
                                    'prod_snapcraft_yaml')
     loggerhead_server.addBlob(prod_branch, 'prod_snapcraft_yaml',
                               b'name: prod-snap')
     loggerhead_server.addInventory(pack_branch, 'snap', 'pack_snap')
     loggerhead_server.addInventory(pack_branch, 'snap/snapcraft.yaml',
                                    'pack_snapcraft_yaml')
     loggerhead_server.addBlob(pack_branch, 'pack_snapcraft_yaml',
                               b'name: pack-snap')
     turnip_server = self.makeTurnipServer()
     turnip_server.addBlob(prod_ref.repository, 'snap/snapcraft.yaml',
                           b'name: prod-snap')
     turnip_server.addBlob(pack_ref.repository, 'snap/snapcraft.yaml',
                           b'name: pack-snap')
     retcode, stdout, stderr = run_script(
         'cronscripts/request_daily_builds.py', [])
     self.assertIn('Requested 4 daily recipe builds.', stderr)
     self.assertIn('Requested 4 automatic snap package builds.', stderr)
     for item in items:
         self.assertEqual(1, item.pending_builds.count())
         self.assertFalse(item.is_stale)
 def test_cronscript(self):
     run_script("cronscripts/process-job-source.py", ["IInitializeDistroSeriesJobSource"])
     DatabaseLayer.force_dirty_database()
 def test_run_script(self):
     # The script will run stand-alone.
     self.layer.force_dirty_database()
     retval, out, err = run_script('cronscripts/generate-contents-files.py',
                                   ['-d', 'ubuntu', '-q'])
     self.assertEqual(0, retval)
 def test_missing_argument(self):
     # The script should display usage info when called without any
     # arguments.
     returncode, output, error = run_script(self.script, [], expect_returncode=1)
     self.assertIn("Usage:", output)
     self.assertIn("process-job-source.py [options] JOB_SOURCE", output)
    def test_translations_export_to_branch(self):
        # End-to-end test of the script doing its work.

        # Set up a server for hosted branches.
        self.useBzrBranches(direct_database=False)

        # Set up a product and translatable series.
        product = self.factory.makeProduct(name='committobranch')
        product = removeSecurityProxy(product)
        series = product.getSeries('trunk')

        # Set up a translations_branch for the series.
        db_branch, tree = self.create_branch_and_tree(product=product)
        removeSecurityProxy(db_branch).last_scanned_id = 'null:'
        product.translations_usage = ServiceUsage.LAUNCHPAD
        series.translations_branch = db_branch

        # Set up a template & Dutch translation for the series.
        template = self.factory.makePOTemplate(productseries=series,
                                               owner=product.owner,
                                               name='foo',
                                               path='po/messages.pot')
        template = removeSecurityProxy(template)
        potmsgset = self.factory.makePOTMsgSet(template,
                                               singular='Hello World',
                                               sequence=1)
        pofile = self.factory.makePOFile('nl',
                                         potemplate=template,
                                         owner=product.owner)
        self.factory.makeCurrentTranslationMessage(
            pofile=pofile,
            potmsgset=potmsgset,
            translator=product.owner,
            reviewer=product.owner,
            translations=['Hallo Wereld'])

        # Make all this visible to the script we're about to run.
        transaction.commit()

        # Run The Script.
        retcode, stdout, stderr = run_script(
            'cronscripts/translations-export-to-branch.py', ['-vvv'])

        self.assertEqual('', stdout)
        self.assertEqual(
            'INFO    '
            'Creating lockfile: '
            '/var/lock/launchpad-translations-export-to-branch.lock\n'
            'INFO    Exporting to translations branches.\n'
            'INFO    Exporting Committobranch trunk series.\n'
            'INFO    '
            'Processed 1 item(s); 0 failure(s), 0 unpushed branch(es).',
            self._filterOutput(stderr))
        self.assertIn('No previous translations commit found.', stderr)
        self.assertEqual(0, retcode)

        # The branch now contains a snapshot of the translation.  (Only
        # one file: the Dutch translation we set up earlier).
        branch_contents = map_branch_contents(db_branch.getBzrBranch())
        expected_contents = {
            'po/nl.po':
            """
                # Dutch translation for .*
                # Copyright .*
                (?:#.*$
                )*msgid ""
                msgstr ""
                (?:"[^"]*"
                )*
                msgid "Hello World"
                msgstr "Hallo Wereld"\n""",
        }

        branch_filenames = set(branch_contents.iterkeys())
        expected_filenames = set(expected_contents.iterkeys())

        unexpected_filenames = branch_filenames - expected_filenames
        self.assertEqual(set(), unexpected_filenames)

        missing_filenames = expected_filenames - branch_filenames
        self.assertEqual(set(), missing_filenames)

        for filename, expected in expected_contents.iteritems():
            contents = branch_contents[filename].lstrip('\n')
            pattern = dedent(expected.lstrip('\n'))
            if not re.match(pattern, contents, re.MULTILINE):
                self.assertEqual(pattern, contents)

        # If we run the script again at this point, it won't export
        # anything because it sees that the POFile has not been changed
        # since the last export.
        retcode, stdout, stderr = run_script(
            'cronscripts/translations-export-to-branch.py',
            ['-vvv', '--no-fudge'])
        self.assertEqual(0, retcode)
        self.assertIn('Last commit was at', stderr)
        self.assertIn(
            "Processed 1 item(s); 0 failure(s), 0 unpushed branch(es).",
            stderr)
        self.assertEqual(None, re.search("INFO\s+Committed [0-9]+ file",
                                         stderr))
 def test_run_script(self):
     # The script will run stand-alone.
     self.layer.force_dirty_database()
     retval, out, err = run_script(
         'cronscripts/generate-contents-files.py', ['-d', 'ubuntu', '-q'])
     self.assertEqual(0, retval)