Beispiel #1
0
def update_export(redis_pool: redis.ConnectionPool,
                  sv_instance: SharedValues) -> None:
    # input validation
    if "app" not in request.form:
        abort(400, "Missing parameter")
    app_name = request.form['app']
    if app_name not in DJANGO_APP_NAMES:
        abort(400, "Wrong parameter value")

    redis_con = redis.Redis(connection_pool=redis_pool)
    sse_send_export_data(sv_instance, app_name, redis_con)
    redis_con.close()
Beispiel #2
0
def start_export(redis_pool: redis.ConnectionPool,
                 sv_instance: SharedValues) -> None:
    # input validation
    if any(x not in request.form for x in ["threshold", "app"]):
        abort(400, "Missing parameter")
    try:
        threshold = int(request.form['threshold'])
        if threshold < 0 or threshold > 100:
            raise ValueError()
    except ValueError:
        abort(400, "Wrong parameter value")
        return  # senseless but avoids warning
    app_name = request.form['app']
    if app_name not in DJANGO_APP_NAMES:
        abort(400, "Wrong parameter value")

    # check run conditions
    redis_con = redis.Redis(connection_pool=redis_pool)
    if app_name == os.environ['DJANGO_APP_NAME_CONCEPT'] and \
            (redis_con.get(os.environ['REDIS_KEY_INFERENCE_RUN']) or "0") == "1":
        redis_con.close()
        abort(400, "Inference is currently running")

    with export_start_lock:
        redis_reset_startup(
            redis_con, "Export",
            os.environ['REDIS_KEY_EXPORT_RUN'].format(app_name),
            os.environ['REDIS_KEY_EXPORT_TIME'].format(app_name),
            os.environ['REDIS_KEY_EXPORT_TIME_ETE'].format(app_name),
            os.environ['REDIS_KEY_EXPORT_EXCEPTION'].format(app_name),
            os.environ['REDIS_KEY_EXPORT_CURRENT'].format(app_name),
            os.environ['REDIS_KEY_EXPORT_TOTAL'].format(app_name))
        redis_con.set(
            os.environ['REDIS_KEY_EXPORT_THRESHOLD'].format(app_name),
            threshold)

        # set event in shared memory for export start
        sv_instance.export[app_name].start.set()

    sse_send_export_data(sv_instance, app_name, redis_con)
    if app_name == os.environ['DJANGO_APP_NAME_CONCEPT']:
        sse_send_inference_data(sv_instance, redis_con)
    redis_con.close()
Beispiel #3
0
def start_inference(redis_pool: redis.ConnectionPool,
                    sv_instance: SharedValues) -> None:
    redis_con = redis.Redis(connection_pool=redis_pool)
    # input validation
    gpu_selection, batch_size = input_validation_train_infer(
        redis_con, sv_instance.compatible_gpus)

    # check run conditions
    if (redis_con.get(os.environ['REDIS_KEY_EXPORT_RUN'].format(
            os.environ['DJANGO_APP_NAME_CONCEPT'])) or "0") == "1":
        redis_con.close()
        abort(400, "Export is currently running")
    check_if_inference_already_stored()

    with inference_start_lock:
        redis_reset_startup(redis_con, "Inference",
                            os.environ['REDIS_KEY_INFERENCE_RUN'],
                            os.environ['REDIS_KEY_INFERENCE_TIME'],
                            os.environ['REDIS_KEY_INFERENCE_TIME_ETE'],
                            os.environ['REDIS_KEY_INFERENCE_EXCEPTION'],
                            os.environ['REDIS_KEY_INFERENCE_CURRENT'],
                            os.environ['REDIS_KEY_INFERENCE_TOTAL'])
        redis_con.delete(os.environ['REDIS_KEY_INFERENCE_GPUS'])
        [
            redis_con.lpush(os.environ['REDIS_KEY_INFERENCE_GPUS'], gpu_idx)
            for gpu_idx in reversed(gpu_selection)
        ]
        redis_con.set(os.environ['REDIS_KEY_INFERENCE_BATCH_SIZE'], batch_size)

        # set event in shared memory for export start
        sv_instance.inference.start.set()

    sse_send_inference_data(sv_instance, redis_con)
    sse_send_export_data(sv_instance, os.environ['DJANGO_APP_NAME_CONCEPT'],
                         redis_con)
    redis_con.close()
Beispiel #4
0
def export_concept_detections_to_csv_files(
        options: Dict[str, Union[str, int, float]],
        sv_instance: SharedValues,
        postgres_cur: psycopg2.extensions.cursor,
        output_folder: str = 'results',
        log: bool = False) -> None:
    # fetch videos
    videos = get_videos(postgres_cur)
    video_count_tot = len(videos)
    video_count_cur = 0

    if log:
        print("  Video-Query returned", video_count_tot, "results")
        print()

    # create output dir
    output = os.path.join(output_folder, options['model_date_str'],
                          str(options['class_type_id']))
    os.makedirs(output, exist_ok=True)

    # create redis connection for client updates
    app_name = options['app_name']
    redis_con = create_redis_connection()
    sse_update_video_count = int(os.environ['EXPORT_SSE_UPDATE_PER_VIDEOS'])
    redis_con.set(os.environ['REDIS_KEY_EXPORT_TOTAL'].format(app_name),
                  video_count_tot)
    redis_con.set(os.environ['REDIS_KEY_EXPORT_TIME_ETE'].format(app_name),
                  int(datetime.timestamp(datetime.now())))
    sse_send_export_data(sv_instance, app_name, redis_con)

    # fetch keyframes and predictions for each video
    for video_id, video_path in videos:
        video_name = os.path.splitext(os.path.basename(video_path))[0]

        # fetch keyframes
        video_keyframes = get_keyframes(postgres_cur, video_id)
        video_keyframes = [v[0] for v in video_keyframes]
        if log:
            print("  Keyframe-Query for video", video_id, "returned",
                  len(video_keyframes), "results")

        # fetch predictions
        results = get_video_predictions(postgres_cur, video_id,
                                        options['class_type_id'],
                                        options['model_id'],
                                        options['threshold'])
        if log:
            print("  Prediction-Query for video", video_id, "returned",
                  len(results), "results")

        # write grouped results to csv (keeping single frame predictions)
        results_grouped = group_results(results,
                                        video_name,
                                        video_keyframes,
                                        allow_single_frame=True)
        write_csv(
            results_grouped,
            os.path.join(output, os.path.dirname(video_path),
                         '{:s}.csv'.format(video_name)))

        video_count_cur += 1
        if video_count_cur % sse_update_video_count == 0:
            update_status_information(app_name, redis_con, sv_instance,
                                      video_count_cur)

    update_status_information(app_name, redis_con, sv_instance,
                              video_count_cur)
    redis_con.close()
Beispiel #5
0
def update_status_information(app_name: str, redis_con: redis.Redis,
                              sv_instance: SharedValues,
                              videos_cur: int) -> None:
    redis_con.set(os.environ['REDIS_KEY_EXPORT_CURRENT'].format(app_name),
                  videos_cur)
    sse_send_export_data(sv_instance, app_name, redis_con)