def yield_revisions(conduit, args):
    revision_list = phlcon_differential.query(conduit, args.ids)

    use_cache = not bool(args.ids)

    history = {}

    if use_cache:
        cache_filename = '.linteratecache'
        if os.path.isfile(cache_filename):
            with open(cache_filename) as cache_file:
                history = json.load(cache_file)

        # filter out revisions with nothing new
        # be careful to convert revision.id to string or it won't match history
        revision_list = filter(
            lambda x: set(history.get(x.phid, [])) != set(x.diffs),
            revision_list)

    for revision in revision_list:
        diff = phlcon_differential.get_revision_diff(conduit, revision.id)
        with phlsys_fs.chtmpdir_context() as temp_dir:
            try:
                phlcon_differential.write_diff_files(diff, temp_dir)
            except phlcon_differential.WriteDiffError as e:
                if not args.silent:
                    print('skipping revision ', revision.id, ':', e)
            else:
                yield revision

        history[revision.phid] = revision.diffs

        if use_cache:
            with open(cache_filename, 'w') as cache_file:
                json.dump(history, cache_file)
    def test_E_MergeConsumeNew(self):

        with phlsys_fs.chtmpdir_context():

            requester = _MockRequesterObject()
            url = 'http://a.test'
            cache_path = 'phlurl_watcher_cache.json'

            # initialise without existing cache
            watcher_cache_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
                cache_path, requester)
            watcher = watcher_cache_wrapper.watcher

            # set state 'a is new'
            self.assertTrue(watcher.peek_has_url_recently_changed(url))

            watcher2_cache_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
                cache_path, requester)
            watcher2 = watcher2_cache_wrapper.watcher

            data_before_merge = watcher2.get_data_for_merging()
            watcher2.merge_data_consume_only(watcher.get_data_for_merging())
            data_after_merge = watcher2.get_data_for_merging()
            self.assertEqual(data_before_merge, {})
            # [ E] b.merge_data_consume_only(a.get_data_for_merging()) copies
            #      elements which are present in b but not in a.
            self.assertEqual(data_after_merge, watcher.get_data_for_merging())
    def test_E_MergeConsumeNew(self):

        with phlsys_fs.chtmpdir_context():

            requester = _MockRequesterObject()
            url = 'http://a.test'
            cache_path = 'phlurl_watcher_cache.json'

            # initialise without existing cache
            watcher_cache_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
                cache_path, requester)
            watcher = watcher_cache_wrapper.watcher

            # set state 'a is new'
            self.assertTrue(watcher.peek_has_url_recently_changed(url))

            watcher2_cache_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
                cache_path, requester)
            watcher2 = watcher2_cache_wrapper.watcher

            data_before_merge = watcher2.get_data_for_merging()
            watcher2.merge_data_consume_only(watcher.get_data_for_merging())
            data_after_merge = watcher2.get_data_for_merging()
            self.assertEqual(data_before_merge, {})
            # [ E] b.merge_data_consume_only(a.get_data_for_merging()) copies
            #      elements which are present in b but not in a.
            self.assertEqual(data_after_merge, watcher.get_data_for_merging())
    def test_D_MergeNotConsumeUnmatching(self):

        with phlsys_fs.chtmpdir_context():

            requester = _MockRequesterObject()
            url = 'http://host.test'
            cache_path = 'phlurl_watcher_cache.json'

            # initialise without existing cache
            watcher_cache_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
                cache_path, requester)
            watcher = watcher_cache_wrapper.watcher

            # set state 'a is new'
            self.assertTrue(watcher.peek_has_url_recently_changed(url))

            # clone the watcher
            watcher_cache_wrapper.save()
            watcher2 = watcher_cache_wrapper.watcher

            # [ D] can't consume newness in merge_data_consume_only() with
            #      unmatched hashes
            watcher.has_url_recently_changed(url)
            data_after_consume = watcher.get_data_for_merging()
            watcher2.refresh()
            watcher2.merge_data_consume_only(data_after_consume)
            self.assertTrue(watcher.peek_has_url_recently_changed(url))
示例#5
0
def yield_revisions(conduit, args):
    revision_list = phlcon_differential.query(conduit, args.ids)

    use_cache = not bool(args.ids)

    history = {}

    if use_cache:
        cache_filename = '.linteratecache'
        if os.path.isfile(cache_filename):
            with open(cache_filename) as cache_file:
                history = json.load(cache_file)

        # filter out revisions with nothing new
        # be careful to convert revision.id to string or it won't match history
        revision_list = filter(
            lambda x: set(history.get(x.phid, [])) != set(x.diffs),
            revision_list)

    for revision in revision_list:
        diff = phlcon_differential.get_revision_diff(conduit, revision.id)
        with phlsys_fs.chtmpdir_context() as temp_dir:
            try:
                phlcon_differential.write_diff_files(diff, temp_dir)
            except phlcon_differential.WriteDiffError as e:
                if not args.silent:
                    print('skipping revision ', revision.id, ':', e)
            else:
                yield revision

        history[revision.phid] = revision.diffs

        if use_cache:
            with open(cache_filename, 'w') as cache_file:
                json.dump(history, cache_file)
    def test_A_Breathing(self):

        def request_func(url):
            return str(random.randint(0, 8 * 8 * 8 * 8)) + url

        watcher = phlurl_watcher.Watcher(request_func)

        with phlsys_fs.chtmpdir_context():

            with open('data', 'w') as f:
                watcher.dump(f)
            with open('data') as f:
                watcher.load(f)

            url = 'http://z.com'

            self.assertTrue(watcher.peek_has_url_recently_changed(url))
            self.assertTrue(watcher.peek_has_url_recently_changed(url))
            self.assertTrue(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.peek_has_url_recently_changed(url))

            with open('data', 'w') as f:
                watcher.dump(f)
            with open('data') as f:
                watcher.load(f)

            self.assertFalse(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.peek_has_url_recently_changed(url))
    def test_D_MergeNotConsumeUnmatching(self):

        with phlsys_fs.chtmpdir_context():

            requester = _MockRequesterObject()
            url = 'http://host.test'
            cache_path = 'phlurl_watcher_cache.json'

            # initialise without existing cache
            watcher_cache_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
                cache_path, requester)
            watcher = watcher_cache_wrapper.watcher

            # set state 'a is new'
            self.assertTrue(watcher.peek_has_url_recently_changed(url))

            # clone the watcher
            watcher_cache_wrapper.save()
            watcher2 = watcher_cache_wrapper.watcher

            # [ D] can't consume newness in merge_data_consume_only() with
            #      unmatched hashes
            watcher.has_url_recently_changed(url)
            data_after_consume = watcher.get_data_for_merging()
            watcher2.refresh()
            watcher2.merge_data_consume_only(data_after_consume)
            self.assertTrue(watcher.peek_has_url_recently_changed(url))
    def test_A_Breathing(self):
        with phlsys_fs.chtmpdir_context():
            fetch_config = str(
                'remote.origin.fetch=+refs/arcyd/landinglog'
                ':refs/arcyd/origin/landinglog')

            run = phlsys_subprocess.run_commands

            run('git init --bare origin')
            run('git clone origin dev --config ' + fetch_config)

            with phlsys_fs.chdir_context('dev'):

                # make an initial commit on the master branch
                run('touch README')
                run('git add README')
                run('git commit README -m initial_commit')
                run('git push origin master')
                run('git checkout -b myfeature')

                # create a new branch with unique content
                with open('README', 'w') as f:
                    f.write('myfeature content')
                run('git add README')
                run('git commit README -m myfeature_content')
                run('git push -u origin myfeature')

            dev = phlsys_git.Repo('dev')

            # make sure we can prepend a branch to the landinglog when empty
            abdt_landinglog.prepend(dev, '1234', 'myfeature', '4567')
            log = abdt_landinglog.get_log(dev)
            self.assertEqual(1, len(log))
            self.assertEqual(log[0].review_sha1, "1234")
            self.assertEqual(log[0].name, "myfeature")
            self.assertEqual(log[0].landed_sha1, "4567")

            # make sure we can prepend another branch
            abdt_landinglog.prepend(dev, '5678', 'newfeature', '8901')
            log = abdt_landinglog.get_log(dev)
            self.assertEqual(2, len(log))
            self.assertEqual(log[0].review_sha1, "5678")
            self.assertEqual(log[0].name, "newfeature")
            self.assertEqual(log[0].landed_sha1, "8901")
            self.assertEqual(log[1].review_sha1, "1234")
            self.assertEqual(log[1].name, "myfeature")
            self.assertEqual(log[1].landed_sha1, "4567")

            # make a new, independent clone and make sure we get the same log
            abdt_landinglog.push_log(dev, 'origin')
            run('git clone origin dev2 --config ' + fetch_config)
            with phlsys_fs.chdir_context('dev2'):
                run('git fetch')
            dev2 = phlsys_git.Repo('dev2')
            self.assertListEqual(
                abdt_landinglog.get_log(dev),
                abdt_landinglog.get_log(dev2))
    def test_A_Breathing(self):

        with phlsys_fs.chtmpdir_context():

            requester = _MockRequesterObject()
            url = 'http://host.test'
            cache_path = 'phlurl_watcher_cache.json'

            # initialise without existing cache
            watcher_cache_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
                cache_path, requester)
            watcher = watcher_cache_wrapper.watcher

            # check that we can test and consume the content change
            self.assertTrue(watcher.peek_has_url_recently_changed(url))
            self.assertTrue(watcher.peek_has_url_recently_changed(url))
            self.assertTrue(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.peek_has_url_recently_changed(url))

            # save and reload from the cache
            watcher_cache_wrapper.save()
            watcher_cache_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
                cache_path, requester)
            watcher = watcher_cache_wrapper.watcher

            # check that the content is still considered unchanged
            self.assertFalse(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.peek_has_url_recently_changed(url))

            # check that refreshing resets the changed flags
            watcher.refresh()

            # check that we can test and consume the content change
            self.assertTrue(watcher.peek_has_url_recently_changed(url))
            self.assertTrue(watcher.peek_has_url_recently_changed(url))
            self.assertTrue(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.peek_has_url_recently_changed(url))

            # update the content
            watcher.refresh()

            # save and reload from the cache
            watcher_cache_wrapper.save()
            watcher_cache_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
                cache_path, requester)
            watcher = watcher_cache_wrapper.watcher

            # check that we can consume the change
            self.assertTrue(watcher.peek_has_url_recently_changed(url))
            self.assertTrue(watcher.peek_has_url_recently_changed(url))
            self.assertTrue(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.peek_has_url_recently_changed(url))
示例#10
0
    def test_A_Breathing(self):
        with phlsys_fs.chtmpdir_context():
            fetch_config = str('remote.origin.fetch=+refs/arcyd/landinglog'
                               ':refs/arcyd/origin/landinglog')

            run = phlsys_subprocess.run_commands

            run('git init --bare origin')
            run('git clone origin dev --config ' + fetch_config)

            with phlsys_fs.chdir_context('dev'):

                # make an initial commit on the master branch
                run('touch README')
                run('git add README')
                run('git commit README -m initial_commit')
                run('git push origin master')
                run('git checkout -b myfeature')

                # create a new branch with unique content
                with open('README', 'w') as f:
                    f.write('myfeature content')
                run('git add README')
                run('git commit README -m myfeature_content')
                run('git push -u origin myfeature')

            dev = phlsys_git.Repo('dev')

            # make sure we can prepend a branch to the landinglog when empty
            abdt_landinglog.prepend(dev, '1234', 'myfeature', '4567')
            log = abdt_landinglog.get_log(dev)
            self.assertEqual(1, len(log))
            self.assertEqual(log[0].review_sha1, "1234")
            self.assertEqual(log[0].name, "myfeature")
            self.assertEqual(log[0].landed_sha1, "4567")

            # make sure we can prepend another branch
            abdt_landinglog.prepend(dev, '5678', 'newfeature', '8901')
            log = abdt_landinglog.get_log(dev)
            self.assertEqual(2, len(log))
            self.assertEqual(log[0].review_sha1, "5678")
            self.assertEqual(log[0].name, "newfeature")
            self.assertEqual(log[0].landed_sha1, "8901")
            self.assertEqual(log[1].review_sha1, "1234")
            self.assertEqual(log[1].name, "myfeature")
            self.assertEqual(log[1].landed_sha1, "4567")

            # make a new, independent clone and make sure we get the same log
            abdt_landinglog.push_log(dev, 'origin')
            run('git clone origin dev2 --config ' + fetch_config)
            with phlsys_fs.chdir_context('dev2'):
                run('git fetch')
            dev2 = phlsys_git.Repo('dev2')
            self.assertListEqual(abdt_landinglog.get_log(dev),
                                 abdt_landinglog.get_log(dev2))
    def test_A_Breathing(self):

        with phlsys_fs.chtmpdir_context():

            requester = _MockRequesterObject()
            url = 'http://host.test'
            cache_path = 'phlurl_watcher_cache.json'

            # initialise without existing cache
            watcher_cache_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
                cache_path, requester)
            watcher = watcher_cache_wrapper.watcher

            # check that we can test and consume the content change
            self.assertTrue(watcher.peek_has_url_recently_changed(url))
            self.assertTrue(watcher.peek_has_url_recently_changed(url))
            self.assertTrue(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.peek_has_url_recently_changed(url))

            # save and reload from the cache
            watcher_cache_wrapper.save()
            watcher_cache_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
                cache_path, requester)
            watcher = watcher_cache_wrapper.watcher

            # check that the content is still considered unchanged
            self.assertFalse(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.peek_has_url_recently_changed(url))

            # check that refreshing resets the changed flags
            watcher.refresh()

            # check that we can test and consume the content change
            self.assertTrue(watcher.peek_has_url_recently_changed(url))
            self.assertTrue(watcher.peek_has_url_recently_changed(url))
            self.assertTrue(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.peek_has_url_recently_changed(url))

            # update the content
            watcher.refresh()

            # save and reload from the cache
            watcher_cache_wrapper.save()
            watcher_cache_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
                cache_path, requester)
            watcher = watcher_cache_wrapper.watcher

            # check that we can consume the change
            self.assertTrue(watcher.peek_has_url_recently_changed(url))
            self.assertTrue(watcher.peek_has_url_recently_changed(url))
            self.assertTrue(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.has_url_recently_changed(url))
            self.assertFalse(watcher.peek_has_url_recently_changed(url))
    def test_B_rotation(self):
        with phlsys_fs.chtmpdir_context():
            debug_handler = _MAKE_HANDLER('testfile')
            content = 'Hello World, this is a test for the rotator.'

            phlsys_fs.write_text_file('testfile', content)
            debug_handler.rotator('testfile', 'dest.gz')

            with gzip.open('dest.gz', 'rb') as dest:
                # [ B] current log can be rotated to compressed one
                self.assertEqual(content, dest.read())
            # [ B] current log is deleted after rotation
            self.assertFalse(os.path.exists('testfile'))
    def test_B_rotation(self):
        with phlsys_fs.chtmpdir_context():
            debug_handler = _MAKE_HANDLER(
                'testfile')
            content = 'Hello World, this is a test for the rotator.'

            phlsys_fs.write_text_file('testfile', content)
            debug_handler.rotator('testfile', 'dest.gz')

            with gzip.open('dest.gz', 'rb') as dest:
                # [ B] current log can be rotated to compressed one
                self.assertEqual(content, dest.read())
            # [ B] current log is deleted after rotation
            self.assertFalse(os.path.exists('testfile'))
    def test_A_command_with_working_directory(self):
        working_dir = tempfile.mkdtemp()
        with phlsys_fs.chtmpdir_context():
            tmp_dir = os.getcwd()
            pycat_script_path = os.path.join(tmp_dir, 'pycat.sh')
            phlsys_fs.write_text_file(pycat_script_path, _PYCAT_COMMAND)
            mode = os.stat(pycat_script_path).st_mode
            os.chmod(pycat_script_path, mode | stat.S_IEXEC)

            self.assertEqual(os.getcwd(), tmp_dir)
            command = phlsys_workingdircommand.CommandWithWorkingDirectory(
                pycat_script_path, working_dir)
            result = command('Alice')
            # [ A] command is executed correctly
            self.assertEqual('Hello Alice!\n', result)
            # [ A] working directory is restored after command execution
            self.assertEqual(os.getcwd(), tmp_dir)
    def test_A_command_with_working_directory(self):
        working_dir = tempfile.mkdtemp()
        with phlsys_fs.chtmpdir_context():
            tmp_dir = os.getcwd()
            pycat_script_path = os.path.join(tmp_dir, 'pycat.sh')
            phlsys_fs.write_text_file(pycat_script_path, _PYCAT_COMMAND)
            mode = os.stat(pycat_script_path).st_mode
            os.chmod(pycat_script_path, mode | stat.S_IEXEC)

            self.assertEqual(os.getcwd(), tmp_dir)
            command = phlsys_workingdircommand.CommandWithWorkingDirectory(
                pycat_script_path, working_dir)
            result = command('Alice')
            # [ A] command is executed correctly
            self.assertEqual('Hello Alice!\n', result)
            # [ A] working directory is restored after command execution
            self.assertEqual(os.getcwd(), tmp_dir)
    def test_A_filehandler_breathing(self):
        with phlsys_fs.chtmpdir_context():
            # [ A] CompressedRotatingFileHandler can be initialzed
            debug_handler = _MAKE_HANDLER('testfile',
                                          maxBytes=10 * 1024,
                                          backupCount=2)
            debug_handler.setLevel(logging.DEBUG)

            # [ A] log file is created after initialization
            self.assertTrue(os.path.exists('testfile'))
            # [ A] the debug handler can be added to logger
            logging.getLogger().addHandler(debug_handler)

            for _ in xrange(1000):
                _LOGGER.debug('Hello World, this is conetent for debug log.')

            # [ A] no extra files are created by the debug handler
            expected_files = ['testfile', 'testfile.1.gz', 'testfile.2.gz']
            self.assertItemsEqual(expected_files, os.listdir('.'))
    def test_C_existing_files_rotation(self):
        with phlsys_fs.chtmpdir_context():
            debug_handler = _MAKE_HANDLER('testfile', backupCount=2)
            content1 = 'Hello World, this is conetent for testfile1.'
            content2 = 'Hello World, this is conetent for testfile2.'

            with gzip.open('testfile.1.gz', 'wb') as f:
                f.write(content1)
            with gzip.open('testfile.2.gz', 'wb') as f:
                f.write(content2)
            debug_handler.rotate_existing_files()

            # [ C] number of compressed files do not exceed backupCount
            self.assertFalse(os.path.exists('testfile.3.gz'))

            # [ C] existing files are rotated correctly
            self.assertFalse(os.path.exists('testfile.1.gz'))
            with gzip.open('testfile.2.gz', 'rb') as f:
                self.assertEqual(content1, f.read())
    def test_A_filehandler_breathing(self):
        with phlsys_fs.chtmpdir_context():
            # [ A] CompressedRotatingFileHandler can be initialzed
            debug_handler = _MAKE_HANDLER(
                'testfile',
                maxBytes=10 * 1024,
                backupCount=2)
            debug_handler.setLevel(logging.DEBUG)

            # [ A] log file is created after initialization
            self.assertTrue(os.path.exists('testfile'))
            # [ A] the debug handler can be added to logger
            logging.getLogger().addHandler(debug_handler)

            for _ in xrange(1000):
                _LOGGER.debug('Hello World, this is conetent for debug log.')

            # [ A] no extra files are created by the debug handler
            expected_files = ['testfile', 'testfile.1.gz', 'testfile.2.gz']
            self.assertItemsEqual(expected_files, os.listdir('.'))
    def test_C_existing_files_rotation(self):
        with phlsys_fs.chtmpdir_context():
            debug_handler = _MAKE_HANDLER(
                'testfile',
                backupCount=2)
            content1 = 'Hello World, this is conetent for testfile1.'
            content2 = 'Hello World, this is conetent for testfile2.'

            with gzip.open('testfile.1.gz', 'wb') as f:
                f.write(content1)
            with gzip.open('testfile.2.gz', 'wb') as f:
                f.write(content2)
            debug_handler.rotate_existing_files()

            # [ C] number of compressed files do not exceed backupCount
            self.assertFalse(os.path.exists('testfile.3.gz'))

            # [ C] existing files are rotated correctly
            self.assertFalse(os.path.exists('testfile.1.gz'))
            with gzip.open('testfile.2.gz', 'rb') as f:
                self.assertEqual(content1, f.read())
    def test_B_verbose_filter(self):
        # pychecker makes us do this, it won't recognise that logging.handlers
        # is a thing.
        lg = logging
        with phlsys_fs.chtmpdir_context():
            error_handler = lg.handlers.RotatingFileHandler(
                'errorlog',
                maxBytes=10 * 1024,
                backupCount=10)
            error_handler.setLevel(logging.ERROR)
            logging.getLogger().addHandler(error_handler)

            info_handler = lg.handlers.RotatingFileHandler(
                'infolog',
                maxBytes=10 * 1024,
                backupCount=10)
            info_handler.setLevel(logging.INFO)
            # [ B] VerboseErrorFilter can be added to log handler
            info_handler.addFilter(phlsys_verboseerrorfilter.make_filter())
            logging.getLogger().addHandler(info_handler)

            concise_message = "This is a concise error message."
            verbose_message = "VERBOSE MESSAGE: This is a verbose error "
            "message. This should not appear in error log but not in info log."
            _LOGGER.error(concise_message)
            _LOGGER.error(verbose_message)

            with open('errorlog') as f:
                # [ B] Error log contains both - concise and verbose messages
                error_log = f.read()
                self.assertTrue(concise_message in error_log)
                self.assertTrue(verbose_message in error_log)

            with open('infolog') as f:
                info_log = f.read()
                # [ B] info log contains concise message
                self.assertTrue(concise_message in info_log)
                # [ B] info log does not contains verbose message
                self.assertFalse(verbose_message in info_log)
示例#21
0
    def test_B_verbose_filter(self):
        # pychecker makes us do this, it won't recognise that logging.handlers
        # is a thing.
        lg = logging
        with phlsys_fs.chtmpdir_context():
            error_handler = lg.handlers.RotatingFileHandler('errorlog',
                                                            maxBytes=10 * 1024,
                                                            backupCount=10)
            error_handler.setLevel(logging.ERROR)
            logging.getLogger().addHandler(error_handler)

            info_handler = lg.handlers.RotatingFileHandler('infolog',
                                                           maxBytes=10 * 1024,
                                                           backupCount=10)
            info_handler.setLevel(logging.INFO)
            # [ B] VerboseErrorFilter can be added to log handler
            info_handler.addFilter(phlsys_verboseerrorfilter.make_filter())
            logging.getLogger().addHandler(info_handler)

            concise_message = "This is a concise error message."
            verbose_message = "VERBOSE MESSAGE: This is a verbose error "
            "message. This should not appear in error log but not in info log."
            _LOGGER.error(concise_message)
            _LOGGER.error(verbose_message)

            with open('errorlog') as f:
                # [ B] Error log contains both - concise and verbose messages
                error_log = f.read()
                self.assertTrue(concise_message in error_log)
                self.assertTrue(verbose_message in error_log)

            with open('infolog') as f:
                info_log = f.read()
                # [ B] info log contains concise message
                self.assertTrue(concise_message in info_log)
                # [ B] info log does not contains verbose message
                self.assertFalse(verbose_message in info_log)
def main():
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=__doc__,
        epilog=_USAGE_EXAMPLES)

    parser.add_argument(
        '--phab-uri',
        type=str,
        default=phldef_conduit.TEST_URI,
        help='URI of Phabricator instance to connect to, defaults to expect a '
             'phabricator-tools provisioned local install.')

    def make_creds_from_account(account):
        return (
            account.user,
            account.email,
            account.certificate,
        )

    parser.add_argument(
        '--arcyd-user-email-cert',
        type=str,
        nargs=3,
        default=make_creds_from_account(phldef_conduit.PHAB),
        help='The username, email address and conduit certificate of the '
             'arcyd user, default to the "phab" user in a phabricator-tools '
             'provisioned install. The user should be an administrator of the '
             'instance.')

    parser.add_argument(
        '--alice-user-email-cert',
        type=str,
        nargs=3,
        default=make_creds_from_account(phldef_conduit.ALICE),
        help='The username, email address and conduit certificate of the '
             '"alice" user, default to the "alice" user in a '
             'phabricator-tools provisioned install. The user should be an '
             'administrator of the instance.')

    parser.add_argument(
        '--bob-user-email-cert',
        type=str,
        nargs=3,
        default=make_creds_from_account(phldef_conduit.BOB),
        help='The username, email address and conduit certificate of the '
             '"bob" user, default to the "bob" user in a phabricator-tools '
             'provisioned install. The user should be an administrator of the '
             'instance.')

    parser.add_argument(
        '--repo-count',
        type=int,
        default=1,
        help='The number of repositories to simulate working on, a simple way '
             'to exercise concurrency and gather more accurate performance '
             'information.')

    args = parser.parse_args()

    with phlsys_fs.chtmpdir_context():
        _do_tests(args)
示例#23
0
def main():
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=__doc__,
        epilog=_USAGE_EXAMPLES)

    parser.add_argument(
        '--phab-uri',
        type=str,
        default=phldef_conduit.TEST_URI,
        help='URI of Phabricator instance to connect to, defaults to expect a '
             'phabricator-tools provisioned local install.')

    def make_creds_from_account(account):
        return (
            account.user,
            account.email,
            account.certificate,
        )

    parser.add_argument(
        '--arcyd-user-email-cert',
        type=str,
        nargs=3,
        default=make_creds_from_account(phldef_conduit.PHAB),
        help='The username, email address and conduit certificate of the '
             'arcyd user, default to the "phab" user in a phabricator-tools '
             'provisioned install. The user should be an administrator of the '
             'instance.')

    parser.add_argument(
        '--alice-user-email-cert',
        type=str,
        nargs=3,
        default=make_creds_from_account(phldef_conduit.ALICE),
        help='The username, email address and conduit certificate of the '
             '"alice" user, default to the "alice" user in a '
             'phabricator-tools provisioned install. The user should be an '
             'administrator of the instance.')

    parser.add_argument(
        '--bob-user-email-cert',
        type=str,
        nargs=3,
        default=make_creds_from_account(phldef_conduit.BOB),
        help='The username, email address and conduit certificate of the '
             '"bob" user, default to the "bob" user in a phabricator-tools '
             'provisioned install. The user should be an administrator of the '
             'instance.')

    parser.add_argument(
        '--repo-count',
        type=int,
        default=1,
        help='The number of repositories to simulate working on, a simple way '
             'to exercise concurrency and gather more accurate performance '
             'information.')

    parser.add_argument(
        '--enable-debug-shell',
        action='store_true',
        default=False,
        help='If this argument is provided, debug shell is launched '
             'automatically if any of the tests fail. By default, this option '
             'is set to false.')

    args = parser.parse_args()

    with phlsys_fs.chtmpdir_context():
        _do_tests(args)