def _get_modules(self): if self.modules is None: self.modules = set() self.alias_module_map = dict() for repo in utils.load_repos(self.runtime_storage_inst): module = repo['module'].lower() module_aliases = repo.get('aliases') or [] add = True for module_name in ([module] + module_aliases): for m in self.modules: if module_name.find(m) >= 0: add = False break if m.find(module_name) >= 0: self.modules.remove(m) break if add: self.modules.add(module_name) for alias in module_aliases: self.alias_module_map[alias] = module return self.modules, self.alias_module_map
def process(runtime_storage_inst, default_data, sources_root, force_update): LOG.debug('Process default data') normalizer.normalize_default_data(default_data) dd_changed = _check_default_data_change(runtime_storage_inst, default_data) if 'project_sources' in default_data: if not _retrieve_project_list(default_data): raise Exception('Unable to retrieve project list') _update_default_data(runtime_storage_inst, default_data) if (dd_changed or force_update): LOG.debug('Gather release index for all repos') release_index = {} for repo in utils.load_repos(runtime_storage_inst): vcs_inst = vcs.get_vcs(repo, sources_root) release_index.update(vcs_inst.get_release_index()) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) # need to iterate over full view of records and generate valid # users profiles LOG.debug('Iterate all records to create valid users profiles') for record in runtime_storage_inst.get_all_records(): record_processor_inst.update_user(record) # update records according to generated users profiles LOG.debug('Update all records according to users profiles') updated_records = record_processor_inst.update( runtime_storage_inst.get_all_records(), release_index) runtime_storage_inst.set_records(updated_records)
def _get_modules(self): if self.modules is None: self.modules = set() self.alias_module_map = dict() for repo in utils.load_repos(self.runtime_storage_inst): module = repo['module'].lower() module_aliases = repo.get('aliases') or [] add = True for module_name in ([module] + module_aliases): for m in self.modules: if module_name.find(m) >= 0: add = False break if m.find(module_name) >= 0: self.modules.remove(m) break if add: self.modules.add(module_name) for alias in module_aliases: self.alias_module_map[alias] = module return self.modules, self.alias_module_map
def process(runtime_storage_inst, record_processor_inst): repos = utils.load_repos(runtime_storage_inst) current_date = utils.date_to_timestamp('now') bug_modified_since = runtime_storage_inst.get_by_key('bug_modified_since') rcs_inst = rcs.get_rcs(cfg.CONF.review_uri) rcs_inst.setup(key_filename=cfg.CONF.ssh_key_filename, username=cfg.CONF.ssh_username) for repo in repos: _process_repo(repo, runtime_storage_inst, record_processor_inst, rcs_inst, bug_modified_since) rcs_inst.close() runtime_storage_inst.set_by_key('bug_modified_since', current_date) LOG.info('Processing mail lists') mail_lists = runtime_storage_inst.get_by_key('mail_lists') or [] for mail_list in mail_lists: _process_mail_list(mail_list, runtime_storage_inst, record_processor_inst) _post_process_records(record_processor_inst, repos)
def process(runtime_storage_inst, default_data, sources_root, force_update): LOG.debug('Process default data') normalizer.normalize_default_data(default_data) dd_changed = _check_default_data_change(runtime_storage_inst, default_data) if 'project_sources' in default_data: if not _retrieve_project_list(default_data): raise Exception('Unable to retrieve project list') _update_default_data(runtime_storage_inst, default_data) if (dd_changed or force_update): LOG.debug('Gather release index for all repos') release_index = {} for repo in utils.load_repos(runtime_storage_inst): vcs_inst = vcs.get_vcs(repo, sources_root) release_index.update(vcs_inst.get_release_index()) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) # need to iterate over full view of records and generate valid # users profiles LOG.debug('Iterate all records to create valid users profiles') for record in runtime_storage_inst.get_all_records(): record_processor_inst.update_user(record) # update records according to generated users profiles LOG.debug('Update all records according to users profiles') updated_records = record_processor_inst.update( runtime_storage_inst.get_all_records(), release_index) runtime_storage_inst.set_records(updated_records)
def process(runtime_storage_inst, record_processor_inst): repos = utils.load_repos(runtime_storage_inst) current_date = utils.date_to_timestamp('now') bug_modified_since = runtime_storage_inst.get_by_key('bug_modified_since') rcs_inst = rcs.get_rcs(cfg.CONF.review_uri) rcs_inst.setup(key_filename=cfg.CONF.ssh_key_filename, username=cfg.CONF.ssh_username) for repo in repos: _process_repo(repo, runtime_storage_inst, record_processor_inst, rcs_inst, bug_modified_since) rcs_inst.close() runtime_storage_inst.set_by_key('bug_modified_since', current_date) LOG.info('Processing mail lists') mail_lists = runtime_storage_inst.get_by_key('mail_lists') or [] for mail_list in mail_lists: _process_mail_list(mail_list, runtime_storage_inst, record_processor_inst) _post_process_records(record_processor_inst, repos)
def _update_records(runtime_storage_inst, sources_root): LOG.debug("Update existing records") release_index = {} for repo in utils.load_repos(runtime_storage_inst): vcs_inst = vcs.get_vcs(repo, sources_root) release_index.update(vcs_inst.get_release_index()) record_processor_inst = record_processor.RecordProcessor(runtime_storage_inst) record_processor_inst.update(release_index)
def _update_records(runtime_storage_inst, sources_root): LOG.debug('Update existing records') release_index = {} for repo in utils.load_repos(runtime_storage_inst): vcs_inst = vcs.get_vcs(repo, sources_root) release_index.update(vcs_inst.get_release_index()) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) record_processor_inst.update(release_index)
def update_records(runtime_storage_inst, record_processor_inst): repos = utils.load_repos(runtime_storage_inst) for repo in repos: process_repo(repo, runtime_storage_inst, record_processor_inst) mail_lists = runtime_storage_inst.get_by_key('mail_lists') or [] for mail_list in mail_lists: process_mail_list(mail_list, runtime_storage_inst, record_processor_inst) record_processor_inst.update()
def update_records(runtime_storage_inst, record_processor_inst): repos = utils.load_repos(runtime_storage_inst) for repo in repos: process_repo(repo, runtime_storage_inst, record_processor_inst) mail_lists = runtime_storage_inst.get_by_key('mail_lists') or [] for mail_list in mail_lists: process_mail_list(mail_list, runtime_storage_inst, record_processor_inst) record_processor_inst.update()
def process(runtime_storage_inst, record_processor_inst): repos = utils.load_repos(runtime_storage_inst) rcs_inst = rcs.get_rcs(CONF.review_uri) # rcs_inst.setup(key_filename=CONF.ssh_key_filename, # username=CONF.ssh_username, # gerrit_retry=CONF.gerrit_retry) for repo in repos: _process_repo(repo, runtime_storage_inst, record_processor_inst, rcs_inst) rcs_inst.close() _post_process_records(record_processor_inst, repos)
def process(runtime_storage_inst, record_processor_inst): repos = utils.load_repos(runtime_storage_inst) for repo in repos: _process_repo(repo, runtime_storage_inst, record_processor_inst) LOG.info('Processing mail lists') mail_lists = runtime_storage_inst.get_by_key('mail_lists') or [] for mail_list in mail_lists: _process_mail_list(mail_list, runtime_storage_inst, record_processor_inst) LOG.info('Processing translations stats') _process_translation_stats(runtime_storage_inst, record_processor_inst) _post_process_records(record_processor_inst, repos)
def process(runtime_storage_inst, record_processor_inst): repos = utils.load_repos(runtime_storage_inst) current_date = utils.date_to_timestamp('now') bug_modified_since = runtime_storage_inst.get_by_key('bug_modified_since') for repo in repos: _process_repo(repo, runtime_storage_inst, record_processor_inst, bug_modified_since) runtime_storage_inst.set_by_key('bug_modified_since', current_date) LOG.info('Processing mail lists') mail_lists = runtime_storage_inst.get_by_key('mail_lists') or [] for mail_list in mail_lists: _process_mail_list(mail_list, runtime_storage_inst, record_processor_inst) _post_process_records(record_processor_inst, repos)
def process(runtime_storage_inst, record_processor_inst): repos = utils.load_repos(runtime_storage_inst) rcs_inst = rcs.get_rcs(cfg.CONF.review_uri) rcs_inst.setup(key_filename=cfg.CONF.ssh_key_filename, username=cfg.CONF.ssh_username) for repo in repos: _process_repo(repo, runtime_storage_inst, record_processor_inst, rcs_inst) rcs_inst.close() LOG.info("Processing mail lists") mail_lists = runtime_storage_inst.get_by_key("mail_lists") or [] for mail_list in mail_lists: _process_mail_list(mail_list, runtime_storage_inst, record_processor_inst) _post_process_records(record_processor_inst, repos)
def _get_modules(self): if self.modules is None: self.modules = set() for repo in utils.load_repos(self.runtime_storage_inst): module = repo['module'].lower() add = True for m in self.modules: if module.find(m) >= 0: add = False break if m.find(module) >= 0: self.modules.remove(m) break if add: self.modules.add(module) return self.modules
def _get_modules(self): if self.modules is None: self.modules = set() for repo in utils.load_repos(self.runtime_storage_inst): module = repo['module'].lower() add = True for m in self.modules: if module.find(m) >= 0: add = False break if m.find(module) >= 0: self.modules.remove(m) break if add: self.modules.add(module) return self.modules
def process(runtime_storage_inst, record_processor_inst): repos = utils.load_repos(runtime_storage_inst) current_date = utils.date_to_timestamp('now') bug_modified_since = runtime_storage_inst.get_by_key('bug_modified_since') for repo in repos: _process_repo(repo, runtime_storage_inst, record_processor_inst, bug_modified_since) runtime_storage_inst.set_by_key('bug_modified_since', current_date) LOG.info('Processing mail lists') mail_lists = runtime_storage_inst.get_by_key('mail_lists') or [] for mail_list in mail_lists: _process_mail_list(mail_list, runtime_storage_inst, record_processor_inst) _post_process_records(record_processor_inst, repos)
def process(runtime_storage_inst, record_processor_inst): repos = utils.load_repos(runtime_storage_inst) rcs_inst = rcs.get_rcs(cfg.CONF.review_uri) rcs_inst.setup(key_filename=cfg.CONF.ssh_key_filename, username=cfg.CONF.ssh_username) for repo in repos: _process_repo(repo, runtime_storage_inst, record_processor_inst, rcs_inst) rcs_inst.close() LOG.info('Processing mail lists') mail_lists = runtime_storage_inst.get_by_key('mail_lists') or [] for mail_list in mail_lists: _process_mail_list(mail_list, runtime_storage_inst, record_processor_inst) _post_process_records(record_processor_inst, repos)
def _retrieve_project_list(runtime_storage_inst, project_sources): LOG.info('Retrieving project list from GitHub') repo_index = {} stored_repos = utils.load_repos(runtime_storage_inst) for repo in stored_repos: repo_index[repo['uri']] = repo github = MainClass.Github(timeout=60) for project_source in project_sources: organization = project_source['organization'] LOG.debug('Get list of projects for organization %s', organization) try: repos = github.get_organization(organization).get_repos() except Exception as e: LOG.exception(e) LOG.warn('Fail to retrieve list of projects. Keep it unmodified') return for repo in repos: repo_uri = repo.git_url repo_name = repo.name if repo_uri not in repo_index: r = { 'branches': ['master'], 'module': repo_name, 'organization': organization, 'project_type': project_source['project_type'], 'project_group': project_source['project_group'], 'uri': repo_uri, 'releases': [] } stored_repos.append(r) LOG.debug('Project is added to default data: %s', r) runtime_storage_inst.set_by_key('repos', stored_repos)
def process(runtime_storage_inst, record_processor_inst): repos = utils.load_repos(runtime_storage_inst) rcs_inst = rcs.get_rcs(cfg.CONF.review_uri) rcs_inst.setup(key_filename=cfg.CONF.ssh_key_filename, username=cfg.CONF.ssh_username) for repo in repos: _process_repo(repo, runtime_storage_inst, record_processor_inst, rcs_inst) rcs_inst.close() LOG.info('Processing mail lists') mail_lists = runtime_storage_inst.get_by_key('mail_lists') or [] for mail_list in mail_lists: _process_mail_list(mail_list, runtime_storage_inst, record_processor_inst) #TODO(adiantum): replace stub with tranlslation acquisition logic _process_translation(runtime_storage_inst, record_processor_inst) _post_process_records(record_processor_inst, repos)
def init_project_types(vault): runtime_storage_inst = vault['runtime_storage'] project_type_options = {} project_type_group_index = {'all': set(['unknown'])} for repo in utils.load_repos(runtime_storage_inst): project_type = repo['project_type'].lower() project_group = None if ('project_group' in repo) and (repo['project_group']): project_group = repo['project_group'].lower() if project_type in project_type_options: if project_group: project_type_options[project_type].add(project_group) else: if project_group: project_type_options[project_type] = set([project_group]) else: project_type_options[project_type] = set() module = repo['module'] if project_type in project_type_group_index: project_type_group_index[project_type].add(module) else: project_type_group_index[project_type] = set([module]) if project_group: if project_group in project_type_group_index: project_type_group_index[project_group].add(module) else: project_type_group_index[project_group] = set([module]) project_type_group_index['all'].add(module) vault['project_type_options'] = project_type_options vault['project_type_group_index'] = project_type_group_index
def init_project_types(vault): runtime_storage_inst = vault['runtime_storage'] project_type_options = {} project_type_group_index = {'all': set(['unknown'])} for repo in utils.load_repos(runtime_storage_inst): project_type = repo['project_type'].lower() project_group = None if ('project_group' in repo) and (repo['project_group']): project_group = repo['project_group'].lower() if project_type in project_type_options: if project_group: project_type_options[project_type].add(project_group) else: if project_group: project_type_options[project_type] = set([project_group]) else: project_type_options[project_type] = set() module = repo['module'] if project_type in project_type_group_index: project_type_group_index[project_type].add(module) else: project_type_group_index[project_type] = set([module]) if project_group: if project_group in project_type_group_index: project_type_group_index[project_group].add(module) else: project_type_group_index[project_group] = set([module]) project_type_group_index['all'].add(module) vault['project_type_options'] = project_type_options vault['project_type_group_index'] = project_type_group_index
def process(runtime_storage_inst, record_processor_inst): repos = utils.load_repos(runtime_storage_inst) rcs_inst = rcs.get_rcs(CONF.review_uri) rcs_inst.setup(key_filename=CONF.ssh_key_filename, username=CONF.ssh_username, gerrit_retry=CONF.gerrit_retry) for repo in repos: _process_repo(repo, runtime_storage_inst, record_processor_inst, rcs_inst) rcs_inst.close() LOG.info('Processing mail lists') mail_lists = runtime_storage_inst.get_by_key('mail_lists') or [] for mail_list in mail_lists: _process_mail_list(mail_list, runtime_storage_inst, record_processor_inst) LOG.info('Processing translations stats') _process_translation_stats(runtime_storage_inst, record_processor_inst) _post_process_records(record_processor_inst, repos)