def test_log_module_launchpad_name(self, lp_bug_generator, lp_module_exists): repo = { 'module': 'savanna', 'launchpad_name': 'sahara', } modified_since = 1234567890 lp_bug_generator.return_value = iter([BUG]) lp_module_exists.side_effect = iter([True]) expected = [{ 'assignee': 'slukjanov', 'date_created': 1433252154, 'date_fix_committed': 1433266265, 'date_fix_released': 1433266265, 'id': 'savanna/1458945', 'importance': 'Medium', 'module': 'savanna', # should be the same as primary module name 'owner': 'samueldmq', 'status': 'Fix Released', 'title': 'Bug #1458945 in Sahara: "Use graduated oslo.policy"', 'web_link': 'https://bugs.launchpad.net/sahara/+bug/1458945' }] actual = list(bps.log(repo, modified_since)) self.assertEqual(expected, actual)
def test_log_module_alias(self, lp_bug_generator, lp_module_exists): # bug linked to another project should not appear repo = {"module": "savanna", "aliases": ["sahara"]} modified_since = 1234567890 lp_bug_generator.return_value = iter([BUG]) lp_module_exists.side_effect = iter([False, True]) expected = [ { "assignee": "slukjanov", "date_created": 1433252154, "date_fix_committed": 1433266265, "id": "savanna/1458945", "importance": "Medium", "module": "savanna", # should be the same as primary module name "owner": "samueldmq", "status": "Fix Released", "title": 'Bug #1458945 in Sahara: "Use graduated oslo.policy"', "web_link": "https://bugs.launchpad.net/sahara/+bug/1458945", } ] actual = list(bps.log(repo, modified_since)) self.assertEqual(expected, actual)
def test_log_additional_module(self, lp_bug_generator): # bug linked to another project should not appear repo = { 'module': 'sahara', 'launchpad_name': 'sahara', } modified_since = 1234567890 lp_bug_generator.return_value = iter([BUG, LINKED_BUG]) expected = [{ 'assignee': 'slukjanov', 'date_created': 1433252154, 'date_fix_committed': 1433266265, 'date_fix_released': 1433266265, 'id': 'sahara/1458945', 'importance': 'Medium', 'module': 'sahara', 'owner': 'samueldmq', 'status': 'Fix Released', 'title': 'Bug #1458945 in Sahara: "Use graduated oslo.policy"', 'web_link': 'https://bugs.launchpad.net/sahara/+bug/1458945' }] actual = list(bps.log(repo, modified_since)) self.assertEqual(expected, actual)
def test_log_released_not_committed(self, lp_bug_generator): repo = { 'module': 'sahara', 'launchpad_name': 'sahara', } modified_since = 1234567890 lp_bug_generator.return_value = iter([RELEASED_NOT_COMMITTED_BUG]) expected = [{ 'assignee': 'slukjanov', 'date_created': 1433252154, 'date_fix_released': 1433266265, 'id': 'sahara/1458945', 'importance': 'Medium', 'module': 'sahara', 'owner': 'samueldmq', 'status': 'Fix Released', 'title': 'Bug #1458945 in Sahara: "Use graduated oslo.policy"', 'web_link': 'https://bugs.launchpad.net/sahara/+bug/1458945' }] actual = list(bps.log(repo, modified_since)) self.assertEqual(expected, actual)
def test_log_additional_module(self, lp_bug_generator): # bug linked to another project should not appear repo = {'module': 'sahara'} modified_since = 1234567890 lp_bug_generator.return_value = iter([BUG, LINKED_BUG]) expected = [{ 'assignee': 'slukjanov', 'date_created': 1433252154, 'date_fix_committed': 1433266265, 'id': 'sahara/1458945', 'importance': 'Medium', 'module': 'sahara', 'owner': 'samueldmq', 'status': 'Fix Released', 'title': 'Bug #1458945 in Sahara: "Use graduated oslo.policy"', 'web_link': 'https://bugs.launchpad.net/sahara/+bug/1458945' }] actual = list(bps.log(repo, modified_since)) self.assertEqual(expected, actual)
def test_log_module_alias(self, lp_bug_generator, lp_module_exists): # bug linked to another project should not appear repo = { 'module': 'savanna', 'aliases': ['sahara'] } modified_since = 1234567890 lp_bug_generator.return_value = iter([BUG]) lp_module_exists.side_effect = iter([False, True]) expected = [{ 'assignee': 'slukjanov', 'date_created': 1433252154, 'date_fix_committed': 1433266265, 'id': 'savanna/1458945', 'importance': 'Medium', 'module': 'savanna', # should be the same as primary module name 'owner': 'samueldmq', 'status': 'Fix Released', 'title': 'Bug #1458945 in Sahara: "Use graduated oslo.policy"', 'web_link': 'https://bugs.launchpad.net/sahara/+bug/1458945' }] actual = list(bps.log(repo, modified_since)) self.assertEqual(expected, actual)
def _process_repo(repo, runtime_storage_inst, record_processor_inst, rcs_inst): uri = repo["uri"] LOG.info("Processing repo uri: %s", uri) LOG.debug("Processing blueprints for repo uri: %s", uri) bp_iterator = lp.log(repo) bp_iterator_typed = _record_typer(bp_iterator, "bp") processed_bp_iterator = record_processor_inst.process(bp_iterator_typed) runtime_storage_inst.set_records(processed_bp_iterator, utils.merge_records) LOG.debug("Processing bugs for repo uri: %s", uri) current_date = utils.date_to_timestamp("now") bug_modified_since = runtime_storage_inst.get_by_key("bug_modified_since-%s" % repo["module"]) bug_iterator = bps.log(repo, bug_modified_since) bug_iterator_typed = _record_typer(bug_iterator, "bug") processed_bug_iterator = record_processor_inst.process(bug_iterator_typed) runtime_storage_inst.set_records(processed_bug_iterator, utils.merge_records) runtime_storage_inst.set_by_key("bug_modified_since-%s" % repo["module"], current_date) vcs_inst = vcs.get_vcs(repo, cfg.CONF.sources_root) vcs_inst.fetch() branches = {repo.get("default_branch", "master")} for release in repo.get("releases"): if "branch" in release: branches.add(release["branch"]) for branch in branches: LOG.debug("Processing commits in repo: %s, branch: %s", uri, branch) vcs_key = "vcs:" + str(parse.quote_plus(uri) + ":" + branch) last_id = runtime_storage_inst.get_by_key(vcs_key) commit_iterator = vcs_inst.log(branch, last_id) commit_iterator_typed = _record_typer(commit_iterator, "commit") processed_commit_iterator = record_processor_inst.process(commit_iterator_typed) runtime_storage_inst.set_records(processed_commit_iterator, _merge_commits) last_id = vcs_inst.get_last_id(branch) runtime_storage_inst.set_by_key(vcs_key, last_id) LOG.debug("Processing reviews for repo: %s, branch: %s", uri, branch) rcs_key = "rcs:" + str(parse.quote_plus(uri) + ":" + branch) last_id = runtime_storage_inst.get_by_key(rcs_key) review_iterator = rcs_inst.log(repo, branch, last_id, grab_comments=("ci" in repo)) review_iterator_typed = _record_typer(review_iterator, "review") if "ci" in repo: # add external CI data review_iterator_typed = _process_reviews(review_iterator_typed, repo["ci"], repo["module"], branch) processed_review_iterator = record_processor_inst.process(review_iterator_typed) runtime_storage_inst.set_records(processed_review_iterator, utils.merge_records) last_id = rcs_inst.get_last_id(repo, branch) runtime_storage_inst.set_by_key(rcs_key, last_id)
def test_log_module_launchpad_name_with_alias(self, lp_bug_generator, lp_module_exists): repo = { 'module': 'savanna', 'launchpad_name': 'sahara', 'aliases': ['someothername'], } modified_since = 1234567890 list(bps.log(repo, modified_since)) assert lp_module_exists.asser_has_calls( [mock.call('sahara'), mock.call('someothername')], any_order=True)
def _process_repo_bugs(repo, runtime_storage_inst, record_processor_inst): LOG.info('Processing bugs for repo: %s', repo['uri']) current_date = utils.date_to_timestamp('now') bug_modified_since = runtime_storage_inst.get_by_key( 'bug_modified_since-%s' % repo['module']) bug_iterator = bps.log(repo, bug_modified_since) bug_iterator_typed = _record_typer(bug_iterator, 'bug') processed_bug_iterator = record_processor_inst.process(bug_iterator_typed) runtime_storage_inst.set_records(processed_bug_iterator, utils.merge_records) runtime_storage_inst.set_by_key('bug_modified_since-%s' % repo['module'], current_date)
def test_log_module_launchpad_name_none(self, lp_bug_generator, lp_module_exists): repo = { 'module': 'savanna', 'launchpad_name': None, } modified_since = 1234567890 lp_bug_generator.return_value = iter([BUG]) lp_module_exists.side_effect = iter([True]) expected = [] actual = list(bps.log(repo, modified_since)) lp_module_exists.assert_not_called() self.assertEqual(expected, actual)
def test_log_additional_milestone(self, lp_bug_generator): # bug linked to different milestone should be mapped to the release repo = { 'module': 'sahara' } modified_since = 1234567890 lp_bug_generator.return_value = iter([BUG, ANOTHER_MILESTONE_BUG]) expected = [{ 'assignee': 'slukjanov', 'date_created': 1433252154, 'date_fix_committed': 1433266265, 'date_fix_released': 1433266265, 'id': 'sahara/1458945', 'importance': 'Medium', 'module': 'sahara', 'owner': 'samueldmq', 'status': 'Fix Released', 'title': 'Bug #1458945 in Sahara: "Use graduated oslo.policy"', 'web_link': 'https://bugs.launchpad.net/sahara/+bug/1458945' }, { 'assignee': 'slukjanov', 'date_created': 1433252154, 'date_fix_committed': 1433266265, 'date_fix_released': 1433266265, 'id': 'sahara/kilo/1458945', 'importance': 'Medium', 'module': 'sahara', 'release': 'kilo', 'owner': 'samueldmq', 'status': 'Fix Released', 'title': 'Bug #1458945 in Sahara Kilo: ' '"Use graduated oslo.policy"', 'web_link': 'https://bugs.launchpad.net/sahara/kilo/+bug/1458945' }] actual = list(bps.log(repo, modified_since)) self.assertEqual(expected, actual)
def test_log(self, lp_bug_generator): repo = {"module": "sahara"} modified_since = 1234567890 lp_bug_generator.return_value = iter([BUG]) expected = [ { "assignee": "slukjanov", "date_created": 1433252154, "date_fix_committed": 1433266265, "id": "sahara/1458945", "importance": "Medium", "module": "sahara", "owner": "samueldmq", "status": "Fix Released", "title": 'Bug #1458945 in Sahara: "Use graduated oslo.policy"', "web_link": "https://bugs.launchpad.net/sahara/+bug/1458945", } ] actual = list(bps.log(repo, modified_since)) self.assertEqual(expected, actual)
def test_log_released_not_committed(self, lp_bug_generator): repo = { 'module': 'sahara' } modified_since = 1234567890 lp_bug_generator.return_value = iter([RELEASED_NOT_COMMITTED_BUG]) expected = [{ 'assignee': 'slukjanov', 'date_created': 1433252154, 'date_fix_released': 1433266265, 'id': 'sahara/1458945', 'importance': 'Medium', 'module': 'sahara', 'owner': 'samueldmq', 'status': 'Fix Released', 'title': 'Bug #1458945 in Sahara: "Use graduated oslo.policy"', 'web_link': 'https://bugs.launchpad.net/sahara/+bug/1458945' }] actual = list(bps.log(repo, modified_since)) self.assertEqual(expected, actual)
def test_log_additional_milestone(self, lp_bug_generator): # bug linked to different milestone should be mapped to the release repo = {"module": "sahara"} modified_since = 1234567890 lp_bug_generator.return_value = iter([BUG, ANOTHER_MILESTONE_BUG]) expected = [ { "assignee": "slukjanov", "date_created": 1433252154, "date_fix_committed": 1433266265, "id": "sahara/1458945", "importance": "Medium", "module": "sahara", "owner": "samueldmq", "status": "Fix Released", "title": 'Bug #1458945 in Sahara: "Use graduated oslo.policy"', "web_link": "https://bugs.launchpad.net/sahara/+bug/1458945", }, { "assignee": "slukjanov", "date_created": 1433252154, "date_fix_committed": 1433266265, "id": "sahara/kilo/1458945", "importance": "Medium", "module": "sahara", "release": "kilo", "owner": "samueldmq", "status": "Fix Released", "title": "Bug #1458945 in Sahara Kilo: " '"Use graduated oslo.policy"', "web_link": "https://bugs.launchpad.net/sahara/kilo/+bug/1458945", }, ] actual = list(bps.log(repo, modified_since)) self.assertEqual(expected, actual)
def _process_repo(repo, runtime_storage_inst, record_processor_inst, rcs_inst): uri = repo['uri'] quoted_uri = six.moves.urllib.parse.quote_plus(uri) LOG.info('Processing repo uri: %s', uri) LOG.info('Processing blueprints for repo uri: %s', uri) bp_iterator = lp.log(repo) bp_iterator_typed = _record_typer(bp_iterator, 'bp') processed_bp_iterator = record_processor_inst.process(bp_iterator_typed) runtime_storage_inst.set_records(processed_bp_iterator, utils.merge_records) LOG.info('Processing bugs for repo uri: %s', uri) current_date = utils.date_to_timestamp('now') bug_modified_since = runtime_storage_inst.get_by_key( 'bug_modified_since-%s' % repo['module']) bug_iterator = bps.log(repo, bug_modified_since) bug_iterator_typed = _record_typer(bug_iterator, 'bug') processed_bug_iterator = record_processor_inst.process(bug_iterator_typed) runtime_storage_inst.set_records(processed_bug_iterator, utils.merge_records) runtime_storage_inst.set_by_key('bug_modified_since-%s' % repo['module'], current_date) vcs_inst = vcs.get_vcs(repo, cfg.CONF.sources_root) vcs_inst.fetch() branches = {repo.get('default_branch', 'master')} for release in repo.get('releases'): if 'branch' in release: branches.add(release['branch']) for branch in branches: LOG.info('Processing commits in repo: %s, branch: %s', uri, branch) vcs_key = 'vcs:%s:%s' % (quoted_uri, branch) last_id = runtime_storage_inst.get_by_key(vcs_key) commit_iterator = vcs_inst.log(branch, last_id) commit_iterator_typed = _record_typer(commit_iterator, 'commit') processed_commit_iterator = record_processor_inst.process( commit_iterator_typed) runtime_storage_inst.set_records(processed_commit_iterator, _merge_commits) last_id = vcs_inst.get_last_id(branch) runtime_storage_inst.set_by_key(vcs_key, last_id) LOG.info('Processing reviews for repo: %s, branch: %s', uri, branch) rcs_key = 'rcs:%s:%s' % (quoted_uri, branch) last_retrieval_time = runtime_storage_inst.get_by_key(rcs_key) current_retrieval_time = int(time.time()) review_iterator = itertools.chain( rcs_inst.log(repo, branch, last_retrieval_time, status='open'), rcs_inst.log(repo, branch, last_retrieval_time, status='merged'), rcs_inst.log(repo, branch, last_retrieval_time, status='abandoned', grab_comments=True), ) review_iterator_typed = _record_typer(review_iterator, 'review') processed_review_iterator = record_processor_inst.process( review_iterator_typed) runtime_storage_inst.set_records(processed_review_iterator, utils.merge_records) runtime_storage_inst.set_by_key(rcs_key, current_retrieval_time) if 'drivers' in repo: LOG.info('Processing CI votes for repo: %s, branch: %s', uri, branch) rcs_key = 'ci:%s:%s' % (quoted_uri, branch) last_retrieval_time = runtime_storage_inst.get_by_key(rcs_key) current_retrieval_time = int(time.time()) review_iterator = rcs_inst.log(repo, branch, last_retrieval_time, status='merged', grab_comments=True) review_iterator = driverlog.log(review_iterator, repo['drivers']) review_iterator_typed = _record_typer(review_iterator, 'ci') processed_review_iterator = record_processor_inst.process( review_iterator_typed) runtime_storage_inst.set_records(processed_review_iterator, utils.merge_records) runtime_storage_inst.set_by_key(rcs_key, current_retrieval_time)
def _process_repo(repo, runtime_storage_inst, record_processor_inst, bug_modified_since): uri = repo['uri'] LOG.info('Processing repo uri: %s', uri) LOG.debug('Processing blueprints for repo uri: %s', uri) bp_iterator = lp.log(repo) bp_iterator_typed = _record_typer(bp_iterator, 'bp') processed_bp_iterator = record_processor_inst.process( bp_iterator_typed) runtime_storage_inst.set_records(processed_bp_iterator, utils.merge_records) LOG.debug('Processing bugs for repo uri: %s', uri) bug_iterator = bps.log(repo, bug_modified_since) bug_iterator_typed = _record_typer(bug_iterator, 'bug') processed_bug_iterator = record_processor_inst.process( bug_iterator_typed) runtime_storage_inst.set_records(processed_bug_iterator, utils.merge_records) vcs_inst = vcs.get_vcs(repo, cfg.CONF.sources_root) vcs_inst.fetch() rcs_inst = rcs.get_rcs(repo, cfg.CONF.review_uri) rcs_inst.setup(key_filename=cfg.CONF.ssh_key_filename, username=cfg.CONF.ssh_username) branches = set(['master']) for release in repo.get('releases'): if 'branch' in release: branches.add(release['branch']) for branch in branches: LOG.debug('Processing commits in repo: %s, branch: %s', uri, branch) vcs_key = 'vcs:' + str(parse.quote_plus(uri) + ':' + branch) last_id = runtime_storage_inst.get_by_key(vcs_key) commit_iterator = vcs_inst.log(branch, last_id) commit_iterator_typed = _record_typer(commit_iterator, 'commit') processed_commit_iterator = record_processor_inst.process( commit_iterator_typed) runtime_storage_inst.set_records( processed_commit_iterator, _merge_commits) last_id = vcs_inst.get_last_id(branch) runtime_storage_inst.set_by_key(vcs_key, last_id) LOG.debug('Processing reviews for repo: %s, branch: %s', uri, branch) rcs_key = 'rcs:' + str(parse.quote_plus(uri) + ':' + branch) last_id = runtime_storage_inst.get_by_key(rcs_key) review_iterator = rcs_inst.log(branch, last_id, grab_comments=('ci' in repo)) review_iterator_typed = _record_typer(review_iterator, 'review') if 'ci' in repo: # add external CI data review_iterator_typed = _process_reviews( review_iterator_typed, repo['ci'], repo['module'], branch) processed_review_iterator = record_processor_inst.process( review_iterator_typed) runtime_storage_inst.set_records(processed_review_iterator, utils.merge_records) last_id = rcs_inst.get_last_id(branch) runtime_storage_inst.set_by_key(rcs_key, last_id)
def test_log_additional_milestone(self, lp_bug_generator): # bug linked to different milestone should be mapped to the release repo = { 'module': 'sahara', 'launchpad_name': 'sahara', } modified_since = 1234567890 lp_bug_generator.return_value = iter([BUG, ANOTHER_MILESTONE_BUG]) expected = [{ 'assignee': 'slukjanov', 'date_created': 1433252154, 'date_fix_committed': 1433266265, 'date_fix_released': 1433266265, 'id': 'sahara/1458945', 'importance': 'Medium', 'module': 'sahara', 'owner': 'samueldmq', 'status': 'Fix Released', 'title': 'Bug #1458945 in Sahara: "Use graduated oslo.policy"', 'web_link': 'https://bugs.launchpad.net/sahara/+bug/1458945' }, { 'assignee': 'slukjanov', 'date_created': 1433252154, 'date_fix_committed': 1433266265, 'date_fix_released': 1433266265, 'id': 'sahara/kilo/1458945', 'importance': 'Medium', 'module': 'sahara', 'release': 'kilo', 'owner': 'samueldmq', 'status': 'Fix Released', 'title': 'Bug #1458945 in Sahara Kilo: ' '"Use graduated oslo.policy"', 'web_link': 'https://bugs.launchpad.net/sahara/kilo/+bug/1458945' }] actual = list(bps.log(repo, modified_since)) self.assertEqual(expected, actual)
def _process_repo(repo, runtime_storage_inst, record_processor_inst, bug_modified_since): uri = repo['uri'] LOG.info('Processing repo uri: %s', uri) LOG.debug('Processing blueprints for repo uri: %s', uri) bp_iterator = lp.log(repo) bp_iterator_typed = _record_typer(bp_iterator, 'bp') processed_bp_iterator = record_processor_inst.process(bp_iterator_typed) runtime_storage_inst.set_records(processed_bp_iterator, utils.merge_records) LOG.debug('Processing bugs for repo uri: %s', uri) bug_iterator = bps.log(repo, bug_modified_since) bug_iterator_typed = _record_typer(bug_iterator, 'bug') processed_bug_iterator = record_processor_inst.process(bug_iterator_typed) runtime_storage_inst.set_records(processed_bug_iterator, utils.merge_records) vcs_inst = vcs.get_vcs(repo, cfg.CONF.sources_root) vcs_inst.fetch() rcs_inst = rcs.get_rcs(repo, cfg.CONF.review_uri) rcs_inst.setup(key_filename=cfg.CONF.ssh_key_filename, username=cfg.CONF.ssh_username) branches = set(['master']) for release in repo.get('releases'): if 'branch' in release: branches.add(release['branch']) for branch in branches: LOG.debug('Processing commits in repo: %s, branch: %s', uri, branch) vcs_key = 'vcs:' + str(parse.quote_plus(uri) + ':' + branch) last_id = runtime_storage_inst.get_by_key(vcs_key) commit_iterator = vcs_inst.log(branch, last_id) commit_iterator_typed = _record_typer(commit_iterator, 'commit') processed_commit_iterator = record_processor_inst.process( commit_iterator_typed) runtime_storage_inst.set_records(processed_commit_iterator, _merge_commits) last_id = vcs_inst.get_last_id(branch) runtime_storage_inst.set_by_key(vcs_key, last_id) LOG.debug('Processing reviews for repo: %s, branch: %s', uri, branch) rcs_key = 'rcs:' + str(parse.quote_plus(uri) + ':' + branch) last_id = runtime_storage_inst.get_by_key(rcs_key) review_iterator = rcs_inst.log(branch, last_id, grab_comments=('ci' in repo)) review_iterator_typed = _record_typer(review_iterator, 'review') if 'ci' in repo: # add external CI data review_iterator_typed = _process_reviews(review_iterator_typed, repo['ci'], repo['module'], branch) processed_review_iterator = record_processor_inst.process( review_iterator_typed) runtime_storage_inst.set_records(processed_review_iterator, utils.merge_records) last_id = rcs_inst.get_last_id(branch) runtime_storage_inst.set_by_key(rcs_key, last_id)
def _process_repo(repo, runtime_storage_inst, record_processor_inst, rcs_inst): uri = repo['uri'] quoted_uri = six.moves.urllib.parse.quote_plus(uri) LOG.info('Processing repo uri: %s', uri) LOG.info('Processing blueprints for repo uri: %s', uri) bp_iterator = lp.log(repo) bp_iterator_typed = _record_typer(bp_iterator, 'bp') processed_bp_iterator = record_processor_inst.process( bp_iterator_typed) runtime_storage_inst.set_records(processed_bp_iterator, utils.merge_records) LOG.info('Processing bugs for repo uri: %s', uri) current_date = utils.date_to_timestamp('now') bug_modified_since = runtime_storage_inst.get_by_key( 'bug_modified_since-%s' % repo['module']) bug_iterator = bps.log(repo, bug_modified_since) bug_iterator_typed = _record_typer(bug_iterator, 'bug') processed_bug_iterator = record_processor_inst.process( bug_iterator_typed) runtime_storage_inst.set_records(processed_bug_iterator, utils.merge_records) runtime_storage_inst.set_by_key( 'bug_modified_since-%s' % repo['module'], current_date) vcs_inst = vcs.get_vcs(repo, cfg.CONF.sources_root) vcs_inst.fetch() branches = {repo.get('default_branch', 'master')} for release in repo.get('releases'): if 'branch' in release: branches.add(release['branch']) for branch in branches: LOG.info('Processing commits in repo: %s, branch: %s', uri, branch) vcs_key = 'vcs:%s:%s' % (quoted_uri, branch) last_id = runtime_storage_inst.get_by_key(vcs_key) commit_iterator = vcs_inst.log(branch, last_id) commit_iterator_typed = _record_typer(commit_iterator, 'commit') processed_commit_iterator = record_processor_inst.process( commit_iterator_typed) runtime_storage_inst.set_records( processed_commit_iterator, _merge_commits) last_id = vcs_inst.get_last_id(branch) runtime_storage_inst.set_by_key(vcs_key, last_id) if 'has_gerrit' not in repo: continue # do not poll reviews for those that do not have them LOG.info('Processing reviews for repo: %s, branch: %s', uri, branch) rcs_key = 'rcs:%s:%s' % (quoted_uri, branch) last_retrieval_time = runtime_storage_inst.get_by_key(rcs_key) current_retrieval_time = int(time.time()) review_iterator = itertools.chain( rcs_inst.log(repo, branch, last_retrieval_time, status='open'), rcs_inst.log(repo, branch, last_retrieval_time, status='merged'), rcs_inst.log(repo, branch, last_retrieval_time, status='abandoned', grab_comments=True), ) review_iterator_typed = _record_typer(review_iterator, 'review') processed_review_iterator = record_processor_inst.process( review_iterator_typed) runtime_storage_inst.set_records(processed_review_iterator, utils.merge_records) runtime_storage_inst.set_by_key(rcs_key, current_retrieval_time) if 'drivers' in repo: LOG.info('Processing CI votes for repo: %s, branch: %s', uri, branch) rcs_key = 'ci:%s:%s' % (quoted_uri, branch) last_retrieval_time = runtime_storage_inst.get_by_key(rcs_key) current_retrieval_time = int(time.time()) review_iterator = rcs_inst.log(repo, branch, last_retrieval_time, status='merged', grab_comments=True) review_iterator = driverlog.log(review_iterator, repo['drivers']) review_iterator_typed = _record_typer(review_iterator, 'ci') processed_review_iterator = record_processor_inst.process( review_iterator_typed) runtime_storage_inst.set_records(processed_review_iterator, utils.merge_records) runtime_storage_inst.set_by_key(rcs_key, current_retrieval_time)