def main(config): logging.basicConfig(level=logging.INFO) if isinstance(config, str): config = ConfigEngine(config) if not config.get_boolean('Logger', 'EnableReports'): logger.info("Reporting disabled!") else: logger.info("Reporting enabled!") schedule.every().day.at("00:01").do(create_daily_report, config=config) sources = config.get_video_sources() areas = config.get_areas() for src in sources: if src['daily_report']: schedule.every().day.at(src['daily_report_time']).do( send_daily_report_notification, config=config, entity_info=src) for area in areas: if area['daily_report']: schedule.every().day.at(area['daily_report_time']).do( send_daily_report_notification, config=config, entity_info=area) while True: schedule.run_pending() time.sleep(10)
def raw_data_backup(config: ConfigEngine, bucket_name: str): """ Uploads into S3 the raw data generated by the cameras and the areas. """ s3_uploader = S3Uploader() sources = config.get_video_sources() areas = config.get_areas() source_log_directory = get_source_log_directory(config) area_log_directory = get_area_log_directory(config) # Backup all the source files for src in sources: source_directory = os.path.join(source_log_directory, src["id"]) objects_log_directory = os.path.join(source_directory, "objects_log") today_objects_csv = os.path.join(objects_log_directory, str(date.today()) + ".csv") bucket_prefix = f"sources/{src['id']}/object_logs" if os.path.isfile(today_objects_csv): # Upload the today object files to S3 s3_uploader.upload_file(bucket_name, today_objects_csv, f"{str(date.today())}.csv", bucket_prefix) # Backup all the area files for area in areas: area_directory = os.path.join(area_log_directory, area.id) occupancy_log_directory = os.path.join(area_directory, "occupancy_log") today_occupancy_csv = os.path.join(occupancy_log_directory, str(date.today()) + ".csv") bucket_prefix = f"areas/{area.id}/occupancy_log" if os.path.isfile(today_objects_csv): # Upload the today occupancy files to S3 s3_uploader.upload_file(bucket_name, today_occupancy_csv, f"{str(date.today())}.csv", bucket_prefix)
def main(config): logging.basicConfig(level=logging.INFO) if isinstance(config, str): config = ConfigEngine(config) libs.pubsub.init_shared_resources() video_path = config.get_section_dict("App")["VideoPath"] process_engine = Process(target=start_engine, args=(config, video_path,)) process_api = Process(target=start_web_gui, args=(config,)) process_api.start() process_engine.start() logger.info("Services Started.") forever = threading.Event() try: forever.wait() except KeyboardInterrupt: logger.info("Received interrupt. Terminating...") process_engine.terminate() process_engine.join() logger.info("CV Engine terminated.") process_api.terminate() process_api.join() logger.info("Web GUI terminated.")
def reports_backup(config: ConfigEngine, bucket_name: str): """ Uploads into s3 the reports generated yesterday by the cameras and the areas. """ s3_uploader = S3Uploader() sources = config.get_video_sources() areas = config.get_areas() source_log_directory = get_source_log_directory(config) area_log_directory = get_area_log_directory(config) yesterday = str(date.today() - timedelta(days=1)) # Backup the sources yesterday reports for src in sources: source_directory = os.path.join(source_log_directory, src["id"]) reports_directory = os.path.join(source_directory, "reports") source_metrics = [FaceMaskUsageMetric, SocialDistancingMetric] for metric in source_metrics: metric_folder = os.path.join(reports_directory, metric.reports_folder) metric_hourly_report = os.path.join(metric_folder, f"report_{yesterday}.csv") metric_daily_report = os.path.join(metric_folder, "report.csv") bucket_prefix = f"sources/{src['id']}/reports/{metric.reports_folder}" if os.path.isfile(metric_hourly_report): s3_uploader.upload_file(bucket_name, metric_hourly_report, f"report_{yesterday}.csv", bucket_prefix) if os.path.isfile(metric_daily_report): s3_uploader.upload_file(bucket_name, metric_daily_report, "report.csv", bucket_prefix) # Backup the areas yesterday reports for area in areas: area_directory = os.path.join(area_log_directory, area["id"]) occupancy_reports_directory = os.path.join(area_directory, "reports", OccupancyMetric.reports_folder) occupancy_hourly_report = os.path.join(occupancy_reports_directory, f"report_{yesterday}.csv") occupancy_daily_report = os.path.join(occupancy_reports_directory, "report.csv") bucket_prefix = f"areas/{area['id']}/reports/{OccupancyMetric.reports_folder}" if os.path.isfile(occupancy_hourly_report): s3_uploader.upload_file(bucket_name, occupancy_hourly_report, f"report_{yesterday}.csv", bucket_prefix) if os.path.isfile(occupancy_daily_report): s3_uploader.upload_file(bucket_name, occupancy_hourly_report, "report.csv", bucket_prefix)
def get_config_file_json_strings(config_sample_path, decamelize=False): config_sample = ConfigEngine(config_sample_path) sections = config_sample.get_sections() config_sample_json = {} for section in sections: config_sample_json[section] = config_sample.get_section_dict(section) if decamelize: """ Do not forget that "Source_1" becomes "source__1". """ config_sample_json = humps.decamelize(config_sample_json) return config_sample_json
def test_get_config(): config = ConfigEngine(config_sample_path) app_instance = ProcessorAPI(config) api = app_instance.app client = TestClient(api) response_get = client.get("/config") assert response_get.status_code == 200 assert response_get.json() == config_sample_json
def config_rollback(): original_path = "/repo/api/tests/data/config-x86-openvino.ini" config_sample_path_to_modify = "/repo/api/tests/data/config-x86-openvino_TEMPORARY.ini" shutil.copyfile(original_path, config_sample_path_to_modify) config = ConfigEngine(config_sample_path_to_modify) Settings(config=config) # Import ProcessorAPI after Settings has been initialized with a config. from api.processor_api import ProcessorAPI app_instance = ProcessorAPI() api = app_instance.app client = TestClient(api) yield client, config_sample_path_to_modify os.remove(config_sample_path_to_modify)
def config_rollback_base(option="JUST_CAMERAS"): original_path = "" if option == "EMPTY": """ Empty template with no camera or area. """ original_path = "/repo/api/tests/data/config-x86-openvino_EMPTY.ini" elif option == "JUST_CAMERAS": """ Here there are charged only 2 cameras: camera_example (ID: 49) camera_example_2 (ID: 50) """ original_path = "/repo/api/tests/data/config-x86-openvino_JUST_CAMERAS.ini" elif option == "METRICS": """ Here there are charged 4 cameras and two areas: camera_example (ID: 49), Area 5 camera_example_2 (ID: 50), Area 5 camera_example_3 (ID: 51), Area 6 camera_example_4 (ID: 52), Area 6 """ original_path = "/repo/api/tests/data/config-x86-openvino_METRICS.ini" config_sample_path_to_modify = "/repo/api/tests/data/config-x86-openvino_TEMPORARY.ini" shutil.copyfile(original_path, config_sample_path_to_modify) config = ConfigEngine(config_sample_path_to_modify) Settings(config=config) # Import ProcessorAPI after Settings has been initialized with a config. from api.processor_api import ProcessorAPI app_instance = ProcessorAPI() api = app_instance.app client = TestClient(api) return client, config_sample_path_to_modify
def __init__(self, args): self.config = ConfigEngine(args.config) self.engine = CvEngine(self.config) self.ui = UI(self.config, self.engine) self.engine.set_ui(self.ui) self.ui.start()
def main(config): logging.basicConfig(level=logging.INFO) if isinstance(config, str): config = ConfigEngine(config) # Schedule configured periodic tasks periodic_tasks_names = [ x for x in config.get_sections() if x.startswith("PeriodicTask_") ] for p_task in periodic_tasks_names: if not config.get_boolean(p_task, "Enabled"): continue task_name = config.get_section_dict(p_task).get("Name") if task_name == "metrics": logger.info("Metrics enabled!") schedule.every().day.at("00:01").do(compute_daily_metrics, config=config) schedule.every().hour.at(":01").do(compute_hourly_metrics, config=config) live_interval = int( config.get_section_dict(p_task).get("LiveInterval", 10)) schedule.every(live_interval).minutes.do( compute_live_metrics, config=config, live_interval=live_interval) else: raise ValueError(f"Not supported periodic task named: {task_name}") # Schedule daily/weekly reports for sources and areas sources = config.get_video_sources() areas = config.get_areas() for src in sources: if src['daily_report']: schedule.every().day.at(src['daily_report_time']).do( send_daily_report_notification, config=config, entity_info=src) for area in areas: if area['daily_report']: schedule.every().day.at(area['daily_report_time']).do( send_daily_report_notification, config=config, entity_info=area) if config.get_boolean("App", "DailyGlobalReport"): schedule.every().day.at( config.get_section_dict("App")["GlobalReportTime"]).do( send_daily_global_report, config=config, sources=sources, areas=areas) if config.get_boolean("App", "WeeklyGlobalReport"): schedule.every(7).days.at( config.get_section_dict("App")["GlobalReportTime"]).do( send_weekly_global_report, config=config, sources=sources, areas=areas) while True: schedule.run_pending() time.sleep(10)
def main(config): logging.basicConfig(level=logging.INFO) if isinstance(config, str): config = ConfigEngine(config) Settings(config=config) start_api(config)
from fastapi.testclient import TestClient from libs.config_engine import ConfigEngine from api.models.base import Config from api.processor_api import ProcessorAPI import pytest import humps import copy config_sample_path='/repo/api/models/data/config-sample.ini' config = ConfigEngine(config_sample_path) app_instance = ProcessorAPI(config) api = app_instance.app client = TestClient(api) # read sample config file config_sample = ConfigEngine(config_sample_path) sections = config_sample.get_sections() config_sample_json = {} for section in sections: config_sample_json[section] = config_sample.get_section_dict(section) config_sample_json = humps.decamelize(config_sample_json) #@pytest.mark.order1 def test_set_config(): response = client.put( "/config", json=config_sample_json, )
def main(config): logging.basicConfig(level=logging.INFO) if isinstance(config, str): config = ConfigEngine(config) start_core(config)