def raw_data_backup(config: ConfigEngine, bucket_name: str): """ Uploads into S3 the raw data generated by the cameras and the areas. """ s3_uploader = S3Uploader() sources = config.get_video_sources() areas = config.get_areas() source_log_directory = get_source_log_directory(config) area_log_directory = get_area_log_directory(config) # Backup all the source files for src in sources: source_directory = os.path.join(source_log_directory, src["id"]) objects_log_directory = os.path.join(source_directory, "objects_log") today_objects_csv = os.path.join(objects_log_directory, str(date.today()) + ".csv") bucket_prefix = f"sources/{src['id']}/object_logs" if os.path.isfile(today_objects_csv): # Upload the today object files to S3 s3_uploader.upload_file(bucket_name, today_objects_csv, f"{str(date.today())}.csv", bucket_prefix) # Backup all the area files for area in areas: area_directory = os.path.join(area_log_directory, area.id) occupancy_log_directory = os.path.join(area_directory, "occupancy_log") today_occupancy_csv = os.path.join(occupancy_log_directory, str(date.today()) + ".csv") bucket_prefix = f"areas/{area.id}/occupancy_log" if os.path.isfile(today_objects_csv): # Upload the today occupancy files to S3 s3_uploader.upload_file(bucket_name, today_occupancy_csv, f"{str(date.today())}.csv", bucket_prefix)
def main(config): logging.basicConfig(level=logging.INFO) if isinstance(config, str): config = ConfigEngine(config) if not config.get_boolean('Logger', 'EnableReports'): logger.info("Reporting disabled!") else: logger.info("Reporting enabled!") schedule.every().day.at("00:01").do(create_daily_report, config=config) sources = config.get_video_sources() areas = config.get_areas() for src in sources: if src['daily_report']: schedule.every().day.at(src['daily_report_time']).do( send_daily_report_notification, config=config, entity_info=src) for area in areas: if area['daily_report']: schedule.every().day.at(area['daily_report_time']).do( send_daily_report_notification, config=config, entity_info=area) while True: schedule.run_pending() time.sleep(10)
def reports_backup(config: ConfigEngine, bucket_name: str): """ Uploads into s3 the reports generated yesterday by the cameras and the areas. """ s3_uploader = S3Uploader() sources = config.get_video_sources() areas = config.get_areas() source_log_directory = get_source_log_directory(config) area_log_directory = get_area_log_directory(config) yesterday = str(date.today() - timedelta(days=1)) # Backup the sources yesterday reports for src in sources: source_directory = os.path.join(source_log_directory, src["id"]) reports_directory = os.path.join(source_directory, "reports") source_metrics = [FaceMaskUsageMetric, SocialDistancingMetric] for metric in source_metrics: metric_folder = os.path.join(reports_directory, metric.reports_folder) metric_hourly_report = os.path.join(metric_folder, f"report_{yesterday}.csv") metric_daily_report = os.path.join(metric_folder, "report.csv") bucket_prefix = f"sources/{src['id']}/reports/{metric.reports_folder}" if os.path.isfile(metric_hourly_report): s3_uploader.upload_file(bucket_name, metric_hourly_report, f"report_{yesterday}.csv", bucket_prefix) if os.path.isfile(metric_daily_report): s3_uploader.upload_file(bucket_name, metric_daily_report, "report.csv", bucket_prefix) # Backup the areas yesterday reports for area in areas: area_directory = os.path.join(area_log_directory, area["id"]) occupancy_reports_directory = os.path.join(area_directory, "reports", OccupancyMetric.reports_folder) occupancy_hourly_report = os.path.join(occupancy_reports_directory, f"report_{yesterday}.csv") occupancy_daily_report = os.path.join(occupancy_reports_directory, "report.csv") bucket_prefix = f"areas/{area['id']}/reports/{OccupancyMetric.reports_folder}" if os.path.isfile(occupancy_hourly_report): s3_uploader.upload_file(bucket_name, occupancy_hourly_report, f"report_{yesterday}.csv", bucket_prefix) if os.path.isfile(occupancy_daily_report): s3_uploader.upload_file(bucket_name, occupancy_hourly_report, "report.csv", bucket_prefix)
def main(config): logging.basicConfig(level=logging.INFO) if isinstance(config, str): config = ConfigEngine(config) # Schedule configured periodic tasks periodic_tasks_names = [ x for x in config.get_sections() if x.startswith("PeriodicTask_") ] for p_task in periodic_tasks_names: if not config.get_boolean(p_task, "Enabled"): continue task_name = config.get_section_dict(p_task).get("Name") if task_name == "metrics": logger.info("Metrics enabled!") schedule.every().day.at("00:01").do(compute_daily_metrics, config=config) schedule.every().hour.at(":01").do(compute_hourly_metrics, config=config) live_interval = int( config.get_section_dict(p_task).get("LiveInterval", 10)) schedule.every(live_interval).minutes.do( compute_live_metrics, config=config, live_interval=live_interval) else: raise ValueError(f"Not supported periodic task named: {task_name}") # Schedule daily/weekly reports for sources and areas sources = config.get_video_sources() areas = config.get_areas() for src in sources: if src['daily_report']: schedule.every().day.at(src['daily_report_time']).do( send_daily_report_notification, config=config, entity_info=src) for area in areas: if area['daily_report']: schedule.every().day.at(area['daily_report_time']).do( send_daily_report_notification, config=config, entity_info=area) if config.get_boolean("App", "DailyGlobalReport"): schedule.every().day.at( config.get_section_dict("App")["GlobalReportTime"]).do( send_daily_global_report, config=config, sources=sources, areas=areas) if config.get_boolean("App", "WeeklyGlobalReport"): schedule.every(7).days.at( config.get_section_dict("App")["GlobalReportTime"]).do( send_weekly_global_report, config=config, sources=sources, areas=areas) while True: schedule.run_pending() time.sleep(10)