def lp_bug_generator(module, modified_since): uri = LP_URI_DEVEL % (module + '?ws.op=searchTasks') for status in BUG_STATUSES: uri += '&status=' + six.moves.urllib.parse.quote_plus(status) if modified_since: uri += '&modified_since=' + utils.timestamp_to_utc_date(modified_since) while uri: LOG.debug('Reading chunk from uri %s', uri) chunk = utils.read_json_from_uri(uri, session=launchpad_session) if not chunk: LOG.warning('No data was read from uri %s', uri) break for record in chunk['entries']: yield record related_tasks_uri = record['related_tasks_collection_link'] LOG.debug('Reading related task from uri %s', related_tasks_uri) related_tasks = utils.read_json_from_uri(related_tasks_uri, session=launchpad_session) if not related_tasks: LOG.warning('No data was read from uri %s', uri) elif related_tasks['entries']: for related_task in related_tasks['entries']: yield related_task uri = chunk.get('next_collection_link')
def main(): # init conf and logging conf = cfg.CONF conf.register_cli_opts(config.OPTS) conf.register_opts(config.OPTS) conf() logging.setup('stackalytics') LOG.info('Logging enabled') runtime_storage_inst = runtime_storage.get_runtime_storage( cfg.CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 default_data_processor.process(runtime_storage_inst, default_data, cfg.CONF.sources_root, cfg.CONF.force_update) process_program_list(runtime_storage_inst, cfg.CONF.program_list_uri) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) update_records(runtime_storage_inst, record_processor_inst) apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst) # long operation should be the last update_members(runtime_storage_inst, record_processor_inst)
def _zanata_get_project_versions(project_id): LOG.debug("Reading iterations for project %s" % project_id) uri = ZANATA_URI % ('projects/p/%s' % project_id) project_data = utils.read_json_from_uri(uri, session=zanata_session) return (it['id'] for it in project_data.get('iterations', []) if ZANATA_VERSION_PATTERN.match(it['id']))
def _zanata_get_projects(): uri = ZANATA_URI % 'projects' LOG.debug("Reading projects from %s" % uri) projects_data = utils.read_json_from_uri(uri, session=zanata_session) return (p['id'] for p in projects_data if ZANATA_PROJECT_PATTERN.match(p['id']))
def main(): utils.init_config_and_logging(config.CONNECTION_OPTS + config.PROCESSOR_OPTS) runtime_storage_inst = runtime_storage.get_runtime_storage( CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 try: jsonschema.validate(default_data, schema.default_data) except jsonschema.ValidationError as e: LOG.critical('The default data is invalid: %s' % e) return not 0 default_data_processor.process(runtime_storage_inst, default_data) process_project_list(runtime_storage_inst) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) process(runtime_storage_inst, record_processor_inst) runtime_storage_inst.set_by_key('runtime_storage_update_time', utils.date_to_timestamp('now')) LOG.info('stackalytics-processor succeeded.')
def _update_with_driverlog_data(default_data, driverlog_data_uri): LOG.info('Reading DriverLog data from uri: %s', driverlog_data_uri) driverlog_data = utils.read_json_from_uri(driverlog_data_uri) module_ci_ids = {} ci_ids = set() for driver in driverlog_data['drivers']: if 'ci' in driver: module = driver['project_id'].split('/')[1] if module not in module_ci_ids: module_ci_ids[module] = {} ci_id = driver['ci']['id'] module_ci_ids[module][ci_id] = driver if ci_id not in ci_ids: ci_ids.add(ci_id) default_data['users'].append({ 'user_id': user_processor.make_user_id(gerrit_id=ci_id), 'gerrit_id': ci_id, 'user_name': ci_id, 'static': True, 'companies': [ {'company_name': driver['vendor'], 'end_date': None}], }) for repo in default_data['repos']: if repo['module'] in module_ci_ids: repo['ci'] = module_ci_ids[repo['module']]
def main(): # init conf and logging conf = cfg.CONF conf.register_cli_opts(config.OPTS) conf.register_opts(config.OPTS) conf() logging.setup('stackalytics') LOG.info('Logging enabled') runtime_storage_inst = runtime_storage.get_runtime_storage( cfg.CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 default_data_processor.process(runtime_storage_inst, default_data, cfg.CONF.sources_root, cfg.CONF.force_update) process_program_list(runtime_storage_inst, cfg.CONF.program_list_uri) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) update_records(runtime_storage_inst, record_processor_inst) apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst) # long operation should be the last update_members(runtime_storage_inst, record_processor_inst)
def main(): # init conf and logging conf = cfg.CONF conf.register_cli_opts(config.OPTS) conf.register_opts(config.OPTS) conf() logging.setup('stackalytics') LOG.info('Logging enabled') runtime_storage_inst = runtime_storage.get_runtime_storage( cfg.CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 default_data_processor.process(runtime_storage_inst, default_data, cfg.CONF.sources_root) update_pids(runtime_storage_inst) update_repos(runtime_storage_inst) apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst)
def _update_with_driverlog_data(default_data, driverlog_data_uri): LOG.info('Reading DriverLog data from uri: %s', driverlog_data_uri) driverlog_data = utils.read_json_from_uri(driverlog_data_uri) cis = {} for driver in driverlog_data['drivers']: if 'ci' in driver: module = driver['project_id'].split('/')[1] if module not in cis: cis[module] = {} cis[module][driver['ci']['id']] = driver default_data['users'].append({ 'gerrit_id': driver['ci']['id'], 'user_name': driver['ci']['id'], 'static': True, 'companies': [{ 'company_name': driver['vendor'], 'end_date': None }], }) for repo in default_data['repos']: if repo['module'] in cis: repo['ci'] = cis[repo['module']]
def _update_with_driverlog_data(default_data, driverlog_data_uri): LOG.info('Reading DriverLog data from uri: %s', driverlog_data_uri) driverlog_data = utils.read_json_from_uri(driverlog_data_uri) module_ci_ids = {} ci_ids = set() for driver in driverlog_data['drivers']: if 'ci' in driver: module = driver['project_id'].split('/')[1] if module not in module_ci_ids: module_ci_ids[module] = {} ci_id = driver['ci']['id'] module_ci_ids[module][ci_id] = driver if ci_id not in ci_ids: ci_ids.add(ci_id) default_data['users'].append({ 'user_id': user_processor.make_user_id(gerrit_id=ci_id), 'gerrit_id': ci_id, 'user_name': ci_id, 'static': True, 'companies': [ {'company_name': driver['vendor'], 'end_date': None}], }) for repo in default_data['repos']: if repo['module'] in module_ci_ids: repo['ci'] = module_ci_ids[repo['module']]
def _update_with_driverlog_data(default_data, driverlog_data_uri): LOG.info('Reading DriverLog data from uri: %s', driverlog_data_uri) driverlog_data = utils.read_json_from_uri(driverlog_data_uri) if not driverlog_data: LOG.warning('DriverLog data is not available') return module_cis = collections.defaultdict(list) for driver in driverlog_data['drivers']: if 'ci' not in driver: continue module = (driver.get('repo') or driver['project_id']).split('/')[1] module_cis[module].append(driver) default_data['users'].append({ 'user_id': user_processor.make_user_id(ci_id=driver['name']), 'user_name': driver['name'], 'static': True, 'companies': [{ 'company_name': driver['vendor'], 'end_date': None }], }) for repo in default_data['repos']: if repo['module'] in module_cis: repo['drivers'] = module_cis[repo['module']]
def main(): utils.init_config_and_logging(config.CONNECTION_OPTS + config.PROCESSOR_OPTS) runtime_storage_inst = runtime_storage.get_runtime_storage( cfg.CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 default_data_processor.process(runtime_storage_inst, default_data, cfg.CONF.driverlog_data_uri) process_project_list(runtime_storage_inst) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) process(runtime_storage_inst, record_processor_inst) apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst) # long operation should be the last update_members(runtime_storage_inst, record_processor_inst) runtime_storage_inst.set_by_key('runtime_storage_update_time', utils.date_to_timestamp('now'))
def main(): # init conf and logging conf = cfg.CONF conf.register_cli_opts(config.OPTS) conf.register_opts(config.OPTS) conf() logging.setup('stackalytics') LOG.info('Logging enabled') runtime_storage_inst = runtime_storage.get_runtime_storage( cfg.CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 default_data_processor.process(runtime_storage_inst, default_data, cfg.CONF.sources_root) update_pids(runtime_storage_inst) update_repos(runtime_storage_inst) apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst)
def main(): utils.init_config_and_logging(config.CONNECTION_OPTS + config.PROCESSOR_OPTS) runtime_storage_inst = runtime_storage.get_runtime_storage(cfg.CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri) if not default_data: LOG.critical("Unable to load default data") return not 0 default_data_processor.process(runtime_storage_inst, default_data, cfg.CONF.driverlog_data_uri) process_project_list(runtime_storage_inst, cfg.CONF.project_list_uri) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor(runtime_storage_inst) process(runtime_storage_inst, record_processor_inst) apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst) # long operation should be the last update_members(runtime_storage_inst, record_processor_inst) runtime_storage_inst.set_by_key("runtime_storage_update_time", utils.date_to_timestamp("now"))
def _update_with_driverlog_data(default_data, driverlog_data_uri): LOG.info('Reading DriverLog data from uri: %s', driverlog_data_uri) driverlog_data = utils.read_json_from_uri(driverlog_data_uri) if not driverlog_data: LOG.warning('DriverLog data is not available') return module_cis = collections.defaultdict(list) for driver in driverlog_data['drivers']: if 'ci' not in driver: continue module = (driver.get('repo') or driver['project_id']).split('/')[1] module_cis[module].append(driver) default_data['users'].append({ 'user_id': user_processor.make_user_id(ci_id=driver['name']), 'user_name': driver['name'], 'static': True, 'companies': [ {'company_name': driver['vendor'], 'end_date': None}], }) for repo in default_data['repos']: if repo['module'] in module_cis: repo['drivers'] = module_cis[repo['module']]
def _openstack_profile_by_email(email): LOG.debug('Lookup user email %s at OpenStackID', email) uri = OSID_URI % email data = utils.read_json_from_uri(uri, session=_openstackid_session) if not data: return None if not data.get('data'): return None # not found return data['data'][-1] # return the last (most recent) record
def _openstack_profile_by_email(email): LOG.debug('Lookup user email %s at OpenStackID', email) uri = OSID_URI % email data = utils.read_json_from_uri(uri, session=_openstackid_session) if not data: return None if not data.get('data'): return None # not found return data['data'][-1] # return the last (most recent) record
def lp_blueprint_generator(module): uri = LP_URI_DEVEL % (module + '/all_specifications') while uri: LOG.debug('Reading chunk from uri %s', uri) chunk = utils.read_json_from_uri(uri) if not chunk: LOG.warn('No data was read from uri %s', uri) break for record in chunk['entries']: yield record uri = chunk.get('next_collection_link')
def apply_corrections(uri, runtime_storage_inst): LOG.info('Applying corrections from uri %s', uri) corrections = utils.read_json_from_uri(uri) if not corrections: LOG.error('Unable to read corrections from uri: %s', uri) return valid_corrections = [] for c in corrections['corrections']: if 'primary_key' in c: valid_corrections.append(c) else: LOG.warning('Correction misses primary key: %s', c) runtime_storage_inst.apply_corrections(valid_corrections)
def lp_blueprint_generator(module): uri = LP_URI_DEVEL % (module + '/all_specifications') while uri: LOG.debug('Reading chunk from uri %s', uri) chunk = utils.read_json_from_uri(uri, session=launchpad_session) if not chunk: LOG.warning('No data was read from uri %s', uri) break for record in chunk['entries']: yield record uri = chunk.get('next_collection_link')
def apply_corrections(uri, runtime_storage_inst): LOG.info('Applying corrections from uri %s', uri) corrections = utils.read_json_from_uri(uri) if not corrections: LOG.error('Unable to read corrections from uri: %s', uri) return valid_corrections = [] for c in corrections['corrections']: if 'primary_key' in c: valid_corrections.append(c) else: LOG.warn('Correction misses primary key: %s', c) runtime_storage_inst.apply_corrections(valid_corrections)
def _get_lp_info(self, email): lp_profile = None if not re.match(r'[\w\d_\.-]+@([\w\d_\.-]+\.)+[\w]+', email): LOG.debug('User email is not valid %s' % email) else: LOG.debug('Lookup user email %s at Launchpad' % email) uri = ('https://api.launchpad.net/1.0/people/?' 'ws.op=getByEmail&email=%s' % email) lp_profile = utils.read_json_from_uri(uri) if not lp_profile: LOG.debug('User with email %s not found', email) return None, None return lp_profile['name'], lp_profile['display_name']
def _get_lp_info(self, email): lp_profile = None if not re.match(r'[\w\d_\.-]+@([\w\d_\.-]+\.)+[\w]+', email): LOG.debug('User email is not valid %s' % email) else: LOG.debug('Lookup user email %s at Launchpad' % email) uri = ('https://api.launchpad.net/1.0/people/?' 'ws.op=getByEmail&email=%s' % email) lp_profile = utils.read_json_from_uri(uri) if not lp_profile: LOG.debug('User with email %s not found', email) return None, None return lp_profile['name'], lp_profile['display_name']
def main(): # init conf and logging conf = cfg.CONF conf.register_cli_opts(config.OPTS) conf.register_opts(config.OPTS) conf(project='stackalytics') logging.setup('stackalytics') LOG.info('Logging enabled') runtime_storage_inst = runtime_storage.get_runtime_storage( cfg.CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 gerrit = rcs.get_rcs(None, cfg.CONF.review_uri) gerrit.setup(key_filename=cfg.CONF.ssh_key_filename, username=cfg.CONF.ssh_username) default_data_processor.process(runtime_storage_inst, default_data, cfg.CONF.git_base_uri, gerrit, cfg.CONF.driverlog_data_uri) process_program_list(runtime_storage_inst, cfg.CONF.program_list_uri) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) process(runtime_storage_inst, record_processor_inst) apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst) # long operation should be the last update_members(runtime_storage_inst, record_processor_inst) runtime_storage_inst.set_by_key('runtime_storage_update_time', utils.date_to_timestamp('now'))
def main(): # init conf and logging conf = cfg.CONF conf.register_cli_opts(config.OPTS) conf.register_opts(config.OPTS) conf(project='stackalytics') logging.setup('stackalytics') LOG.info('Logging enabled') runtime_storage_inst = runtime_storage.get_runtime_storage( cfg.CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 gerrit = rcs.get_rcs(None, cfg.CONF.review_uri) gerrit.setup(key_filename=cfg.CONF.ssh_key_filename, username=cfg.CONF.ssh_username) default_data_processor.process(runtime_storage_inst, default_data, cfg.CONF.git_base_uri, gerrit, cfg.CONF.driverlog_data_uri) process_program_list(runtime_storage_inst, cfg.CONF.program_list_uri) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) process(runtime_storage_inst, record_processor_inst) apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst) # long operation should be the last update_members(runtime_storage_inst, record_processor_inst) runtime_storage_inst.set_by_key('runtime_storage_update_time', utils.date_to_timestamp('now'))
def main(): # init conf and logging conf = cfg.CONF conf.register_cli_opts(config.OPTS) conf.register_opts(config.OPTS) logging.register_options(conf) logging.set_defaults() conf(project='stackalytics') logging.setup(conf, 'stackalytics') LOG.info('Logging enabled') conf.log_opt_values(LOG, std_logging.DEBUG) runtime_storage_inst = runtime_storage.get_runtime_storage( cfg.CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 default_data_processor.process(runtime_storage_inst, default_data, cfg.CONF.driverlog_data_uri) process_project_list(runtime_storage_inst, cfg.CONF.project_list_uri) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) process(runtime_storage_inst, record_processor_inst) apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst) # long operation should be the last update_members(runtime_storage_inst, record_processor_inst) runtime_storage_inst.set_by_key('runtime_storage_update_time', utils.date_to_timestamp('now'))
def main(): # init conf and logging conf = cfg.CONF conf.register_cli_opts(config.OPTS) conf.register_opts(config.OPTS) logging.register_options(conf) logging.set_defaults() conf(project='stackalytics') logging.setup(conf, 'stackalytics') LOG.info('Logging enabled') conf.log_opt_values(LOG, std_logging.DEBUG) runtime_storage_inst = runtime_storage.get_runtime_storage( cfg.CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 default_data_processor.process(runtime_storage_inst, default_data, cfg.CONF.driverlog_data_uri) process_project_list(runtime_storage_inst, cfg.CONF.project_list_uri) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) process(runtime_storage_inst, record_processor_inst) apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst) # long operation should be the last update_members(runtime_storage_inst, record_processor_inst) runtime_storage_inst.set_by_key('runtime_storage_update_time', utils.date_to_timestamp('now'))
def main(): utils.init_config_and_logging(config.CONNECTION_OPTS + config.PROCESSOR_OPTS) runtime_storage_inst = runtime_storage.get_runtime_storage( CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 try: jsonschema.validate(default_data, schema.default_data) except jsonschema.ValidationError as e: LOG.critical('The default data is invalid: %s' % e) return not 0 default_data_processor.process(runtime_storage_inst, default_data) process_project_list(runtime_storage_inst) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) process(runtime_storage_inst, record_processor_inst) apply_corrections(CONF.corrections_uri, runtime_storage_inst) # long operation should be the last update_members(runtime_storage_inst, record_processor_inst) runtime_storage_inst.set_by_key('runtime_storage_update_time', utils.date_to_timestamp('now')) LOG.info('stackalytics-processor succeeded.')
def _update_with_driverlog_data(default_data, driverlog_data_uri): LOG.info('Reading DriverLog data from uri: %s', driverlog_data_uri) driverlog_data = utils.read_json_from_uri(driverlog_data_uri) cis = {} for driver in driverlog_data['drivers']: if 'ci' in driver: module = driver['project_id'].split('/')[1] if module not in cis: cis[module] = {} cis[module][driver['ci']['id']] = driver default_data['users'].append({ 'launchpad_id': driver['ci']['id'], 'user_name': driver['ci']['id'], 'companies': [ {'company_name': driver['vendor'], 'end_date': None}], }) for repo in default_data['repos']: if repo['module'] in cis: repo['ci'] = cis[repo['module']]
def lp_profile_by_launchpad_id(launchpad_id): LOG.debug('Lookup user id %s at Launchpad', launchpad_id) uri = LP_URI_V1 % ('~' + launchpad_id) return utils.read_json_from_uri(uri)
def lp_profile_by_email(email): LOG.debug('Lookup user email %s at Launchpad', email) uri = LP_URI_V1 % ('people/?ws.op=getByEmail&email=' + email) return utils.read_json_from_uri(uri)
def lp_profile_by_email(email): LOG.debug('Lookup user email %s at Launchpad', email) uri = LP_URI_V1 % ('people/?ws.op=getByEmail&email=' + email) return utils.read_json_from_uri(uri)
def _zanata_get_user_stats(zanata_user_id, start_date, end_date): uri = ZANATA_URI % ('stats/user/%s/%s..%s' % (zanata_user_id, start_date, end_date)) return utils.read_json_from_uri(uri, session=zanata_session)
def _zanata_get_user_stats(zanata_user_id, start_date, end_date): uri = ZANATA_URI % ('stats/user/%s/%s..%s' % (zanata_user_id, start_date, end_date)) return utils.read_json_from_uri(uri, session=zanata_session)
def lp_profile_by_launchpad_id(launchpad_id): LOG.debug('Lookup user id %s at Launchpad', launchpad_id) uri = LP_URI_V1 % ('~' + launchpad_id) return utils.read_json_from_uri(uri)
def _lp_profile_by_launchpad_id(launchpad_id): LOG.debug('Lookup user id %s at Launchpad', launchpad_id) uri = LP_URI_V1 % ('~' + launchpad_id) lp_profile = utils.read_json_from_uri(uri, session=launchpad_session) utils.validate_lp_display_name(lp_profile) return lp_profile
def _lp_profile_by_email(email): LOG.debug('Lookup user email %s at Launchpad', email) uri = LP_URI_V1 % ('people/?ws.op=getByEmail&email=' + email) lp_profile = utils.read_json_from_uri(uri, session=launchpad_session) utils.validate_lp_display_name(lp_profile) return lp_profile
def _zanata_get_user_stats(project_id, iteration_id, zanata_user_id, start_date, end_date): uri = ZANATA_URI % ('stats/project/%s/version/%s/contributor/%s/%s..%s' % (project_id, iteration_id, zanata_user_id, start_date, end_date)) return utils.read_json_from_uri(uri, session=zanata_session)