def reports_backup(config: ConfigEngine, bucket_name: str): """ Uploads into s3 the reports generated yesterday by the cameras and the areas. """ s3_uploader = S3Uploader() sources = config.get_video_sources() areas = config.get_areas() source_log_directory = get_source_log_directory(config) area_log_directory = get_area_log_directory(config) yesterday = str(date.today() - timedelta(days=1)) # Backup the sources yesterday reports for src in sources: source_directory = os.path.join(source_log_directory, src["id"]) reports_directory = os.path.join(source_directory, "reports") source_metrics = [FaceMaskUsageMetric, SocialDistancingMetric] for metric in source_metrics: metric_folder = os.path.join(reports_directory, metric.reports_folder) metric_hourly_report = os.path.join(metric_folder, f"report_{yesterday}.csv") metric_daily_report = os.path.join(metric_folder, "report.csv") bucket_prefix = f"sources/{src['id']}/reports/{metric.reports_folder}" if os.path.isfile(metric_hourly_report): s3_uploader.upload_file(bucket_name, metric_hourly_report, f"report_{yesterday}.csv", bucket_prefix) if os.path.isfile(metric_daily_report): s3_uploader.upload_file(bucket_name, metric_daily_report, "report.csv", bucket_prefix) # Backup the areas yesterday reports for area in areas: area_directory = os.path.join(area_log_directory, area["id"]) occupancy_reports_directory = os.path.join(area_directory, "reports", OccupancyMetric.reports_folder) occupancy_hourly_report = os.path.join(occupancy_reports_directory, f"report_{yesterday}.csv") occupancy_daily_report = os.path.join(occupancy_reports_directory, "report.csv") bucket_prefix = f"areas/{area['id']}/reports/{OccupancyMetric.reports_folder}" if os.path.isfile(occupancy_hourly_report): s3_uploader.upload_file(bucket_name, occupancy_hourly_report, f"report_{yesterday}.csv", bucket_prefix) if os.path.isfile(occupancy_daily_report): s3_uploader.upload_file(bucket_name, occupancy_hourly_report, "report.csv", bucket_prefix)
def raw_data_backup(config: ConfigEngine, bucket_name: str): """ Uploads into S3 the raw data generated by the cameras and the areas. """ s3_uploader = S3Uploader() sources = config.get_video_sources() areas = config.get_areas() source_log_directory = get_source_log_directory(config) area_log_directory = get_area_log_directory(config) # Backup all the source files for src in sources: source_directory = os.path.join(source_log_directory, src["id"]) objects_log_directory = os.path.join(source_directory, "objects_log") today_objects_csv = os.path.join(objects_log_directory, str(date.today()) + ".csv") bucket_prefix = f"sources/{src['id']}/object_logs" if os.path.isfile(today_objects_csv): # Upload the today object files to S3 s3_uploader.upload_file(bucket_name, today_objects_csv, f"{str(date.today())}.csv", bucket_prefix) # Backup all the area files for area in areas: area_directory = os.path.join(area_log_directory, area.id) occupancy_log_directory = os.path.join(area_directory, "occupancy_log") today_occupancy_csv = os.path.join(occupancy_log_directory, str(date.today()) + ".csv") bucket_prefix = f"areas/{area.id}/occupancy_log" if os.path.isfile(today_objects_csv): # Upload the today occupancy files to S3 s3_uploader.upload_file(bucket_name, today_occupancy_csv, f"{str(date.today())}.csv", bucket_prefix)
def get_entity_base_directory(cls, config=None): if config: return get_source_log_directory( config) if cls.entity == "source" else get_area_log_directory( config) return os.getenv( "SourceLogDirectory") if cls.entity == "source" else os.getenv( "AreaLogDirectory")
def get_video_sources(self): try: sources = [] for title, section in self.config.items(): if title.startswith("Source_"): is_slack_enabled = self.config["App"][ "SlackChannel"] and is_slack_configured() is_email_enabled = is_mailing_configured() config_dir = config_utils.get_source_config_directory(self) video_source_logs_dir = get_source_log_directory(self) src = VideoSource(section, title, is_email_enabled, is_slack_enabled, config_dir, video_source_logs_dir) sources.append(src) return sources except Exception: # Sources are invalid in config file. What should we do? raise RuntimeError("Invalid sources in config file")
def compute_daily_metrics(cls, config): super().compute_daily_metrics(config) base_directory = get_source_log_directory(config) entities = config.get_video_sources() for entity in entities: entity_directory = os.path.join(base_directory, entity["id"]) objects_log_directory = os.path.join(entity_directory, "objects_log") heatmaps_directory = os.path.join(entity_directory, "heatmaps") # Create missing directories os.makedirs(objects_log_directory, exist_ok=True) os.makedirs(heatmaps_directory, exist_ok=True) yesterday = str(date.today() - timedelta(days=1)) yesterday_csv = os.path.join(objects_log_directory, yesterday + ".csv") if os.path.isfile(yesterday_csv): detection_heatmap_file = os.path.join(heatmaps_directory, "detections_heatmap_" + yesterday) violation_heatmap_file = os.path.join(heatmaps_directory, "violations_heatmap_" + yesterday) cls.create_heatmap_report(config, yesterday_csv, detection_heatmap_file, "Detections") cls.create_heatmap_report(config, yesterday_csv, violation_heatmap_file, "Violations")
def get_daily_report(config, entity_info, report_date): entity_type = entity_info['type'] all_violations_per_hour = [] log_directory = get_source_log_directory(config) if entity_type == 'Camera': reports_directory = os.path.join(log_directory, entity_info['id'], "reports") daily_csv_file_paths = [ os.path.join(reports_directory, SocialDistancingMetric.reports_folder, 'report_' + report_date + '.csv') ] else: # entity == 'Area' camera_ids = entity_info['cameras'] daily_csv_file_paths = [ os.path.join( log_directory, camera_id, f"reports/{SocialDistancingMetric.reports_folder}/report_" + report_date + ".csv") for camera_id in camera_ids ] for file_path in daily_csv_file_paths: violations_per_hour = [] if not os.path.isfile(file_path): violations_per_hour = list(np.zeros(24).astype(int)) else: with open(file_path, newline='') as csvfile: reader = csv.DictReader(csvfile) for row in reader: violations_per_hour.append( int(row["DetectedObjects"]) - int(row["NoInfringement"])) if not all_violations_per_hour: all_violations_per_hour = violations_per_hour else: all_violations_per_hour = list( map(operator.add, all_violations_per_hour, violations_per_hour)) return all_violations_per_hour
def create_fastapi_app(self): os.environ["SourceLogDirectory"] = get_source_log_directory(self.settings.config) os.environ["SourceConfigDirectory"] = get_config_source_directory(self.settings.config) os.environ["AreaLogDirectory"] = get_area_log_directory(self.settings.config) os.environ["AreaConfigDirectory"] = get_config_areas_directory(self.settings.config) os.environ["ScreenshotsDirectory"] = get_screenshots_directory(self.settings.config) os.environ["HeatmapResolution"] = self.settings.config.get_section_dict("App")["HeatmapResolution"] os.environ["Resolution"] = self.settings.config.get_section_dict("App")["Resolution"] # Create and return a fastapi instance app = FastAPI() dependencies = [] if self.settings.config.get_boolean("API", "UseAuthToken"): dependencies = [Depends(validate_token)] app.include_router(config_router, prefix="/config", tags=["Config"], dependencies=dependencies) app.include_router(cameras_router, prefix="/cameras", tags=["Cameras"], dependencies=dependencies) app.include_router(areas_router, prefix="/areas", tags=["Areas"], dependencies=dependencies) app.include_router(app_router, prefix="/app", tags=["App"], dependencies=dependencies) app.include_router(api_router, prefix="/api", tags=["Api"], dependencies=dependencies) app.include_router(core_router, prefix="/core", tags=["Core"], dependencies=dependencies) app.include_router(detector_router, prefix="/detector", tags=["Detector"], dependencies=dependencies) app.include_router(classifier_router, prefix="/classifier", tags=["Classifier"], dependencies=dependencies) app.include_router(tracker_router, prefix="/tracker", tags=["Tracker"], dependencies=dependencies) app.include_router(source_post_processors_router, prefix="/source_post_processors", tags=["Source Post Processors"], dependencies=dependencies) app.include_router(source_loggers_router, prefix="/source_loggers", tags=["Source Loggers"], dependencies=dependencies) app.include_router(area_loggers_router, prefix="/area_loggers", tags=["Area Loggers"], dependencies=dependencies) app.include_router(periodic_tasks_router, prefix="/periodic_tasks", tags=["Periodic Tasks"], dependencies=dependencies) app.include_router(area_metrics_router, prefix="/metrics/areas", tags=["Metrics"], dependencies=dependencies) app.include_router(camera_metrics_router, prefix="/metrics/cameras", tags=["Metrics"], dependencies=dependencies) app.include_router(export_router, prefix="/export", tags=["Export"], dependencies=dependencies) app.include_router(slack_router, prefix="/slack", tags=["Slack"], dependencies=dependencies) app.include_router(auth_router, prefix="/auth", tags=["Auth"]) app.include_router(static_router, prefix="/static", dependencies=dependencies) app.include_router(ml_model_router, prefix="/ml_model", tags=["ML Models"], dependencies=dependencies) @app.exception_handler(RequestValidationError) async def validation_exception_handler(request: Request, exc: RequestValidationError): return JSONResponse( status_code=status.HTTP_400_BAD_REQUEST, content=jsonable_encoder({"detail": exc.errors(), "body": exc.body}), ) if os.environ.get("DEV_ALLOW_ALL_ORIGINS", False): # This option allows React development server (which is served on another port, like 3000) to proxy requests # to this server. # WARNING: read this before enabling it in your environment: # https://medium.com/@stestagg/stealing-secrets-from-developers-using-websockets-254f98d577a0 from fastapi.middleware.cors import CORSMiddleware app.add_middleware(CORSMiddleware, allow_origins="*", allow_credentials=True, allow_methods=["*"], allow_headers=["*"]) @app.put("/start-process-video", response_model=bool) async def process_video_cfg(): """ Starts the video processing """ logger.info("process-video-cfg requests on api") self.queue_manager.cmd_queue.put(Commands.PROCESS_VIDEO_CFG) logger.info("waiting for core's response...") result = self.queue_manager.result_queue.get() return result @app.put("/stop-process-video", response_model=bool) async def stop_process_video(): """ Stops the video processing """ logger.info("stop-process-video requests on api") self.queue_manager.cmd_queue.put(Commands.STOP_PROCESS_VIDEO) logger.info("waiting for core's response...") result = self.queue_manager.result_queue.get() return result def custom_openapi(): openapi_schema = get_openapi( title="Smart Social Distancing", version="1.0.0", description="Processor API schema", routes=app.routes ) for value_path in openapi_schema["paths"].values(): for value in value_path.values(): # Remove current 422 error message. # TODO: Display the correct validation error schema value["responses"].pop("422", None) app.openapi_schema = openapi_schema return app.openapi_schema app.openapi = custom_openapi return app