def main(): utils.init_config_and_logging(config.CONNECTION_OPTS + config.PROCESSOR_OPTS) runtime_storage_inst = runtime_storage.get_runtime_storage( cfg.CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 default_data_processor.process(runtime_storage_inst, default_data, cfg.CONF.driverlog_data_uri) process_project_list(runtime_storage_inst) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) process(runtime_storage_inst, record_processor_inst) apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst) # long operation should be the last update_members(runtime_storage_inst, record_processor_inst) runtime_storage_inst.set_by_key('runtime_storage_update_time', utils.date_to_timestamp('now'))
def main(): # init conf and logging conf = cfg.CONF conf.register_cli_opts(config.OPTS) conf.register_opts(config.OPTS) conf() logging.setup('stackalytics') LOG.info('Logging enabled') runtime_storage_inst = runtime_storage.get_runtime_storage( cfg.CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 default_data_processor.process(runtime_storage_inst, default_data, cfg.CONF.sources_root, cfg.CONF.force_update) process_program_list(runtime_storage_inst, cfg.CONF.program_list_uri) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) update_records(runtime_storage_inst, record_processor_inst) apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst) # long operation should be the last update_members(runtime_storage_inst, record_processor_inst)
def main(): # init conf and logging conf = cfg.CONF conf.register_cli_opts(config.OPTS) conf.register_opts(config.OPTS) conf() logging.setup('stackalytics') LOG.info('Logging enabled') runtime_storage_inst = runtime_storage.get_runtime_storage( cfg.CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 default_data_processor.process(runtime_storage_inst, default_data, cfg.CONF.sources_root) update_pids(runtime_storage_inst) update_repos(runtime_storage_inst) apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst)
def main(): utils.init_config_and_logging(config.CONNECTION_OPTS + config.PROCESSOR_OPTS) runtime_storage_inst = runtime_storage.get_runtime_storage(cfg.CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri) if not default_data: LOG.critical("Unable to load default data") return not 0 default_data_processor.process(runtime_storage_inst, default_data, cfg.CONF.driverlog_data_uri) process_project_list(runtime_storage_inst, cfg.CONF.project_list_uri) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor(runtime_storage_inst) process(runtime_storage_inst, record_processor_inst) apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst) # long operation should be the last update_members(runtime_storage_inst, record_processor_inst) runtime_storage_inst.set_by_key("runtime_storage_update_time", utils.date_to_timestamp("now"))
def main(): utils.init_config_and_logging(config.CONNECTION_OPTS + config.PROCESSOR_OPTS) runtime_storage_inst = runtime_storage.get_runtime_storage( CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 try: jsonschema.validate(default_data, schema.default_data) except jsonschema.ValidationError as e: LOG.critical('The default data is invalid: %s' % e) return not 0 default_data_processor.process(runtime_storage_inst, default_data) process_project_list(runtime_storage_inst) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) process(runtime_storage_inst, record_processor_inst) runtime_storage_inst.set_by_key('runtime_storage_update_time', utils.date_to_timestamp('now')) LOG.info('stackalytics-processor succeeded.')
def main(): # init conf and logging conf = cfg.CONF conf.register_cli_opts(config.OPTS) conf.register_opts(config.OPTS) conf(project='stackalytics') logging.setup('stackalytics') LOG.info('Logging enabled') runtime_storage_inst = runtime_storage.get_runtime_storage( cfg.CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 gerrit = rcs.get_rcs(None, cfg.CONF.review_uri) gerrit.setup(key_filename=cfg.CONF.ssh_key_filename, username=cfg.CONF.ssh_username) default_data_processor.process(runtime_storage_inst, default_data, cfg.CONF.git_base_uri, gerrit, cfg.CONF.driverlog_data_uri) process_program_list(runtime_storage_inst, cfg.CONF.program_list_uri) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) process(runtime_storage_inst, record_processor_inst) apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst) # long operation should be the last update_members(runtime_storage_inst, record_processor_inst) runtime_storage_inst.set_by_key('runtime_storage_update_time', utils.date_to_timestamp('now'))
def main(): # init conf and logging conf = cfg.CONF conf.register_cli_opts(config.OPTS) conf.register_opts(config.OPTS) logging.register_options(conf) logging.set_defaults() conf(project='stackalytics') logging.setup(conf, 'stackalytics') LOG.info('Logging enabled') conf.log_opt_values(LOG, std_logging.DEBUG) runtime_storage_inst = runtime_storage.get_runtime_storage( cfg.CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(cfg.CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 default_data_processor.process(runtime_storage_inst, default_data, cfg.CONF.driverlog_data_uri) process_project_list(runtime_storage_inst, cfg.CONF.project_list_uri) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) process(runtime_storage_inst, record_processor_inst) apply_corrections(cfg.CONF.corrections_uri, runtime_storage_inst) # long operation should be the last update_members(runtime_storage_inst, record_processor_inst) runtime_storage_inst.set_by_key('runtime_storage_update_time', utils.date_to_timestamp('now'))
def main(): utils.init_config_and_logging(config.CONNECTION_OPTS + config.PROCESSOR_OPTS) runtime_storage_inst = runtime_storage.get_runtime_storage( CONF.runtime_storage_uri) default_data = utils.read_json_from_uri(CONF.default_data_uri) if not default_data: LOG.critical('Unable to load default data') return not 0 try: jsonschema.validate(default_data, schema.default_data) except jsonschema.ValidationError as e: LOG.critical('The default data is invalid: %s' % e) return not 0 default_data_processor.process(runtime_storage_inst, default_data) process_project_list(runtime_storage_inst) update_pids(runtime_storage_inst) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) process(runtime_storage_inst, record_processor_inst) apply_corrections(CONF.corrections_uri, runtime_storage_inst) # long operation should be the last update_members(runtime_storage_inst, record_processor_inst) runtime_storage_inst.set_by_key('runtime_storage_update_time', utils.date_to_timestamp('now')) LOG.info('stackalytics-processor succeeded.')