def reset_db_daily(): # Clean the whole database and regenerate it (like this we do not # keep data of the old rankings) report = ReportsGenerator() report.flush_temp_db() report.build_reports_lasts_days(days_history_cache) # date used to generate a ranking with the data in the database at # this point date = (datetime.date.today() - datetime.timedelta(1)).isoformat() return date
def reset_db_daily(): # Clean the whole database and regenerate it (like this we do not keep data of the old rankings) report = ReportsGenerator() report.flush_temp_db() report.build_reports_lasts_days(int(config.get('ranking','days'))) # date used to generate a ranking with the data in the database at this point date = (datetime.date.today() - datetime.timedelta(1)).isoformat() m = Map() m.get_country_codes() m.generate() return date
def __init__(self): self.config = ConfigParser.RawConfigParser() config_file = "/path/to/bgp-ranking.conf" self.config.read(config_file) root_dir = self.config.get('directories','root') sys.path.append(os.path.join(root_dir,self.config.get('directories','libraries'))) from ranking.reports import Reports from ranking.reports_generator import ReportsGenerator # Ensure there is something to display report_generator = ReportsGenerator() report_generator.flush_temp_db() report_generator.build_reports_lasts_days(int(self.config.get('ranking','days'))) report_generator.build_last_reports() self.report = Reports() self.days_graph = 30
# Split the plain text file fs = FilesSplitter(output.name, int(config.get('routing','number_of_splits'))) splitted_files = fs.fplit() syslog.syslog(syslog.LOG_INFO, 'Splitting finished.') # Flush the old routing database and launch the population of the new database routing_db.flushdb() syslog.syslog(syslog.LOG_INFO, 'Start pushing all routes...') run_splitted_processing(int(config.get('processes','routing_push')), pushing_process_service, splitted_files) syslog.syslog(syslog.LOG_INFO, 'All routes pushed.') # Remove the binary and the plain text files os.unlink(output.name) os.unlink(filename) if compute_yesterday_ranking(): # Clean the whole database and regenerate it (like this we do not keep data of the old rankings) report = ReportsGenerator() report.flush_temp_db() report.build_reports_lasts_days(int(config.get('ranking','days'))) # date used to generate a ranking with the data in the database at this point date = (datetime.date.today() - datetime.timedelta(1)).isoformat() m = Map() m.get_country_codes() m.generate() else: date = datetime.date.today().isoformat() separator = config.get('input_keys','separator') sources = global_db.smembers('{date}{sep}{key}'.format(date = date, sep = separator, key = config.get('input_keys','index_sources'))) pipeline = history_db.pipeline() pipeline_static = history_db_static.pipeline()
while routing_db.dbsize() > 0: time.sleep(sleep_timer) if not os.path.exists(filename) or history_db.exists(key_to_rank): # wait for a new file time.sleep(bview_check_interval) continue prepare_bview_file() amount_asns = routing_db.scard('asns') if compute_yesterday_ranking(): date = reset_db_daily() else: date = datetime.date.today().isoformat() prepare_keys_for_ranking() service_start_multiple(ranking_process_service, rank_procs) while history_db.scard(key_to_rank) > 0: # wait for a new file time.sleep(sleep_timer) rmpid(ranking_process_service) # Save the number of asns known by the RIPE history_db_static.set('{date}|amount_asns'.format(date=date), amount_asns) routing_db.flushdb() publisher.info('Updating the reports...') ReportsGenerator().build_reports(date) publisher.info('...done.')
int(config.get('routing', 'number_of_splits'))) splitted_files = fs.fplit() syslog.syslog(syslog.LOG_INFO, 'Splitting finished.') # Flush the old routing database and launch the population of the new database routing_db.flushdb() syslog.syslog(syslog.LOG_INFO, 'Start pushing all routes...') run_splitted_processing(int(config.get('processes', 'routing_push')), pushing_process_service, splitted_files) syslog.syslog(syslog.LOG_INFO, 'All routes pushed.') # Remove the binary and the plain text files os.unlink(output.name) os.unlink(filename) if compute_yesterday_ranking(): # Clean the whole database and regenerate it (like this we do not keep data of the old rankings) report = ReportsGenerator() report.flush_temp_db() report.build_reports_lasts_days(int(config.get('ranking', 'days'))) # date used to generate a ranking with the data in the database at this point date = (datetime.date.today() - datetime.timedelta(1)).isoformat() m = Map() m.get_country_codes() m.generate() else: date = datetime.date.today().isoformat() separator = config.get('input_keys', 'separator') sources = global_db.smembers('{date}{sep}{key}'.format( date=date, sep=separator, key=config.get('input_keys', 'index_sources')))