def main():
    init_logging()

    config = load_config()
    build_config = config.build
    db = TrackDatabase(os.path.join(config.tracks_folder, "dataset.hdf5"))
    dataset = Dataset(db, "dataset", config)
    tracks_loaded, total_tracks = dataset.load_tracks()
    print(
        "Loaded {}/{} tracks, found {:.1f}k segments".format(
            tracks_loaded, total_tracks, len(dataset.segments) / 1000
        )
    )
    for key, value in dataset.filtered_stats.items():
        if value != 0:
            print("  {} filtered {}".format(key, value))
    print()

    show_tracks_breakdown(dataset)
    print()
    show_segments_breakdown(dataset)
    print()
    show_cameras_breakdown(dataset)
    print()

    print("Splitting data set into train / validation")
    datasets = split_dataset_by_cameras(db, dataset, build_config)
    # if build_config.use_previous_split:
    #     split = get_previous_validation_bins(build_config.previous_split)
    #     datasets = split_dataset(db, dataset, build_config, split)
    # else:
    #     datasets = split_dataset(db, dataset, build_config)

    pickle.dump(datasets, open(dataset_db_path(config), "wb"))
Beispiel #2
0
def main():
    args = parse_args()
    init_logging()

    config = Config.load_from_file(args.config_file)
    thermal_config = ThermalConfig.load_from_file(args.thermal_config_file)
    print("detecting on  " + args.cptv)
    with open(args.cptv, "rb") as f:
        reader = CPTVReader(f)

        headers = HeaderInfo(
            res_x=reader.x_resolution,
            res_y=reader.y_resolution,
            fps=9,
            brand="",
            model="",
            frame_size=reader.x_resolution * reader.y_resolution * 2,
            pixel_bits=16,
        )

        motion_detector = MotionDetector(
            thermal_config, config.tracking.motion_config.dynamic_thresh, None,
            headers)
        for i, frame in enumerate(reader):
            motion_detector.process_frame(frame)
Beispiel #3
0
def main():
    logging.root.removeHandler(absl.logging._absl_handler)
    absl.logging._warn_preinit_stderr = False
    init_logging()
    args = parse_args()

    config = Config.load_from_file(args.config_file)
    thermal_config = ThermalConfig.load_from_file(args.thermal_config_file)

    if args.cptv:
        return parse_cptv(args.cptv, config, thermal_config)

    try:
        os.unlink(SOCKET_NAME)
    except OSError:
        if os.path.exists(SOCKET_NAME):
            raise

    sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
    sock.bind(SOCKET_NAME)
    sock.listen(1)
    while True:
        logging.info("waiting for a connection")
        connection, client_address = sock.accept()
        logging.info("connection from %s", client_address)
        try:
            handle_connection(connection, config, thermal_config)
        finally:
            # Clean up the connection
            connection.close()
def main():
    init_logging()
    args = parse_args()

    config = Config.load_from_file(args.config_file)
    if args.cptv:
        return parse_cptv(args.cptv, config, args.thermal_config_file,
                          args.preview_type)

    try:
        os.unlink(SOCKET_NAME)
    except OSError:
        if os.path.exists(SOCKET_NAME):
            raise

    sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
    sock.bind(SOCKET_NAME)
    sock.listen(1)
    while True:
        logging.info("waiting for a connection")
        connection, client_address = sock.accept()
        logging.info("connection from %s", client_address)
        try:
            handle_connection(connection, config, args.thermal_config_file)
        except:
            logging.error("Error with connection", exc_info=True)
            # return
        finally:
            # Clean up the connection
            try:
                connection.close()
            except:
                pass
Beispiel #5
0
def parse_params():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "-target",
        default=None,
        help=
        'Target to process, "all" processes all folders, "test" runs test cases, "clean" to remove banned clips from db, or a "cptv" file to run a single source.',
    )

    parser.add_argument(
        "-p",
        "--create-previews",
        action="count",
        help="Create MP4 previews for tracks (can be slow)",
    )
    parser.add_argument(
        "-t",
        "--test-file",
        default="tests.txt",
        help="File containing test cases to run",
    )
    parser.add_argument("-v",
                        "--verbose",
                        action="count",
                        help="Display additional information.")
    parser.add_argument(
        "-r",
        "--reprocess",
        action="count",
        help="Re process clips that already exist in the database",
    )
    parser.add_argument(
        "-i",
        "--show-build-information",
        action="count",
        help="Show openCV build information and exit.",
    )
    parser.add_argument("-c",
                        "--config-file",
                        help="Path to config file to use")
    args = parser.parse_args()

    if args.show_build_information:
        print(cv2.getBuildInformation())
        return None, None

    init_logging()

    config = Config.load_from_file(args.config_file)
    if args.create_previews:
        config.loader.preview = "tracking"
    if args.verbose:
        config.tracking.verbose = True

    return config, args
def main():
    init_logging()
    args = parse_args()
    config = load_config(args.config_file)
    # return
    # import yaml
    #
    # with open("defualtstest.yml", "w") as f:
    #     yaml.dump(config.as_dict(), f)
    test_clips = config.build.test_clips()
    if test_clips is None:
        test_clips = []
    logging.info("# of test clips are %s", len(test_clips))
    db_file = os.path.join(config.tracks_folder, "dataset.hdf5")
    dataset = Dataset(db_file,
                      "dataset",
                      config,
                      consecutive_segments=args.consecutive_segments)

    tracks_loaded, total_tracks = dataset.load_tracks()
    dataset.labels.sort()
    print("Loaded {}/{} tracks, found {:.1f}k segments".format(
        tracks_loaded, total_tracks,
        len(dataset.segments) / 1000))
    for key, value in dataset.filtered_stats.items():
        if value != 0:
            print("  {} filtered {}".format(key, value))

    print()
    show_tracks_breakdown(dataset)
    print()
    show_segments_breakdown(dataset)
    print()
    show_sample_frames_breakdown(dataset)
    print()
    show_cameras_breakdown(dataset)
    print()
    print("Splitting data set into train / validation")
    datasets = split_randomly(db_file, dataset, config, args, test_clips)
    validate_datasets(datasets, test_clips, args.date)

    print_counts(dataset, *datasets)

    base_dir = config.tracks_folder
    for dataset in datasets:
        dataset.saveto_numpy(os.path.join(base_dir))

    for dataset in datasets:
        dataset.clear_samples()
        dataset.db = None
        logging.info("saving to %s",
                     f"{os.path.join(base_dir, dataset.name)}.dat")
        pickle.dump(dataset,
                    open(f"{os.path.join(base_dir, dataset.name)}.dat", "wb"))
def main():
    logging.root.removeHandler(absl.logging._absl_handler)
    absl.logging._warn_preinit_stderr = False
    parser = argparse.ArgumentParser()

    parser.add_argument(
        "source",
        help='a CPTV file to process, or a folder name, or "all" for all files within subdirectories of source folder.',
    )
    parser.add_argument(
        "-p",
        "--preview-type",
        help="Create MP4 previews of this type (can be slow), this overrides the config",
    )
    parser.add_argument(
        "-v", "--verbose", action="count", help="Display additional information."
    )
    parser.add_argument(
        "-o",
        "--meta-to-stdout",
        action="count",
        help="Print metadata to stdout instead of saving to file.",
    )
    parser.add_argument("-c", "--config-file", help="Path to config file to use")

    parser.add_argument(
        "-T", "--timestamps", action="store_true", help="Emit log timestamps"
    )

    parser.add_argument(
        "--cache",
        type=str2bool,
        nargs="?",
        const=True,
        default=None,
        help="Dont keep video frames in memory for classification later, but cache them to disk (Best for large videos, but slower)",
    )
    args = parser.parse_args()
    config = Config.load_from_file(args.config_file)
    config.validate()
    init_logging(args.timestamps)

    # parse command line arguments
    if args.preview_type:
        config.classify.preview = args.preview_type

    if args.verbose:
        config.classify_tracking.verbose = True

    if args.meta_to_stdout:
        config.classify.meta_to_stdout = True
    extractor = TrackExtractor(config, cache_to_disk=args.cache)

    extractor.extract(args.source)
Beispiel #8
0
def main():
    conf, job_name = load_config()

    init_logging()
    # tf.logging.set_verbosity(3)

    os.makedirs(conf.train.train_dir, exist_ok=True)

    if job_name == "search":
        axis_search(conf)
    else:
        train_model(job_name, conf, conf.train.hyper_params)
def main():
    conf, args = load_config()

    init_logging()
    # tf.logging.set_verbosity(3)

    os.makedirs(conf.train.train_dir, exist_ok=True)
    train_model(
        args.name,
        conf,
        conf.train.hyper_params,
        grid_search=args.grid,
        weights=args.weights,
    )
def main():
    logging.root.removeHandler(absl.logging._absl_handler)
    absl.logging._warn_preinit_stderr = False
    init_logging()
    args = parse_args()

    config = Config.load_from_file()
    thermal_config = ThermalConfig.load_from_file()
    proccesor = None
    if thermal_config.motion.run_classifier:
        classifier = get_classifier(config)
        proccesor = PiClassifier(config, thermal_config, classifier)
    else:
        proccesor = MotionDetector(
            config.res_x,
            config.res_y,
            thermal_config,
            config.tracking.dynamic_thresh,
            CPTVRecorder(thermal_config),
        )
    if args.cptv:
        with open(args.cptv, "rb") as f:
            reader = CPTVReader(f)
            for frame in reader:
                proccesor.process_frame(frame)

        proccesor.disconnected()
        return

    service = SnapshotService(proccesor)
    try:
        os.unlink(SOCKET_NAME)
    except OSError:
        if os.path.exists(SOCKET_NAME):
            raise

    sock = socket.socket(socket.AF_UNIX, socket.SOCK_SEQPACKET)
    sock.bind(SOCKET_NAME)
    sock.listen(1)
    while True:
        logging.info("waiting for a connection")
        connection, client_address = sock.accept()
        logging.info("connection from %s", client_address)
        try:
            handle_connection(connection, proccesor)
        finally:
            # Clean up the connection
            connection.close()
Beispiel #11
0
def main():
    init_logging()
    args = parse_args()
    conf = load_config(args)

    global null_tags, classes
    null_tags = conf.evaluate.null_tags
    classes = conf.labels

    visits = get_visits(conf.classify.classify_folder,
                        conf.evaluate.new_visit_threshold)

    if conf.evaluate.show_extended_evaluation:
        print_evaluation(visits)
    else:
        print_summary(visits)
def main():
    init_logging()
    args = parse_args()

    config = Config.load_from_file(args.config_file)
    service = ClassifyService(config)
    while True:
        try:
            service.run()
        except KeyboardInterrupt:
            logging.info("Keyboard interupt closing down")
            break
        except PermissionError:
            logging.error("Error with permissions", exc_info=True)
            break
        except:
            logging.error("Error with service restarting", exc_info=True)
def main():
    args = parse_args()
    init_logging()

    config = Config.load_from_file()
    thermal_config = ThermalConfig.load_from_file()
    location_config = LocationConfig.load_from_file()
    res_x = config.res_x
    res_y = config.res_y
    print("detecting on  " + args.cptv)
    motion_detector = MotionDetector(
        res_x,
        res_y,
        thermal_config.motion,
        location_config,
        thermal_config.recorder,
        config.tracking.dynamic_thresh,
        None,
    )
    with open(args.cptv, "rb") as f:
        reader = CPTVReader(f)
        for i, frame in enumerate(reader):
            motion_detector.process_frame(frame)
Beispiel #14
0
def main():
    init_logging()
    args = parse_args()
    config = load_config(args.config_file)
    db = TrackDatabase(os.path.join(config.tracks_folder, "dataset.hdf5"))
    dataset = Dataset(
        db, "dataset", config, consecutive_segments=args.consecutive_segments
    )
    tracks_loaded, total_tracks = dataset.load_tracks(before_date=args.date)
    print(
        "Loaded {}/{} tracks, found {:.1f}k segments".format(
            tracks_loaded, total_tracks, len(dataset.segments) / 1000
        )
    )
    for key, value in dataset.filtered_stats.items():
        if value != 0:
            print("  {} filtered {}".format(key, value))
    print()
    show_tracks_breakdown(dataset)
    print()
    show_segments_breakdown(dataset)
    print()
    show_important_frames_breakdown(dataset)
    print()
    show_cameras_breakdown(dataset)
    print()

    print("Splitting data set into train / validation")
    datasets = split_dataset_by_cameras(db, dataset, config, args)
    if args.date is None:
        args.date = datetime.datetime.now(pytz.utc) - datetime.timedelta(days=7)
    test = test_dataset(db, config, args.date)
    datasets = (*datasets, test)
    print_counts(dataset, *datasets)
    print_cameras(*datasets)
    pickle.dump(datasets, open(dataset_db_path(config), "wb"))
def preloader(
    q,
    epoch_queue,
    labels,
    name,
    db,
    segments_by_id,
    params,
    label_mapping,
    numpy_meta,
):
    global train_queue, jobs
    train_queue = q
    init_logging()
    """add a segment into buffer"""
    logging.info(
        " -started async fetcher for %s augment=%s numpyfile %s preload amount %s mem %s",
        name,
        params.augment,
        numpy_meta.filename,
        params.maximum_preload,
        psutil.Process(os.getpid()).memory_info().rss / 1024**2,
    )
    epoch = 0

    # this does the data pre processing
    p_list = []
    processes = 1
    if name == "train":
        processes = 4

    preload_amount = max(1, params.maximum_preload)
    max_jobs = max(1, 2 * preload_amount // 3)
    while True:
        with multiprocessing.Pool(
                processes,
                init_process,
            (labels, params, label_mapping),
                maxtasksperchild=30,
        ) as pool:
            item = get_with_timeout(epoch_queue, 1,
                                    f"epoch_queue preloader {name}")
            if item == "STOP":
                logging.info("%s preloader received stop", name)
                pool.terminate()
                return
            try:
                epoch, batches = item
                count = 0

                logging.debug(
                    "%s preloader got %s batches for epoch %s mem %s",
                    name,
                    len(batches),
                    epoch,
                    psutil.Process(os.getpid()).memory_info().rss / 1024**2,
                )
                total = 0

                # Once process_batch starts to back up
                loaded_up_to = 0
                while len(batches) > 0:
                    logging.debug(
                        "%s preloader memory %s",
                        name,
                        psutil.Process(os.getpid()).memory_info().rss /
                        1024**2,
                    )

                    while jobs > max_jobs:
                        logging.debug("%s waiting for jobs to complete %s",
                                      name, jobs)
                        time.sleep(5)
                    next_load = batches[:preload_amount]

                    logging.debug(
                        "%s preloader loading %s - %s ",
                        name,
                        loaded_up_to,
                        loaded_up_to + len(next_load),
                    )
                    loaded_up_to = loaded_up_to + len(next_load)

                    batch_data, track_frames = load_batch_frames(
                        numpy_meta,
                        next_load,
                        segments_by_id,
                        name,
                    )
                    chunk_size = len(batch_data) // processes
                    data = []
                    for batch_i, segments in enumerate(batch_data):
                        start = time.time()
                        segment_data = [None] * len(segments)
                        for i, seg in enumerate(segments):
                            frame_data = get_cached_frames(track_frames, seg)
                            segment_data[i] = frame_data
                        data.append((segments, segment_data))
                        batch_data[batch_i] = None
                        if len(data) > chunk_size or batch_i == (
                                len(batch_data) - 1):
                            pool.map_async(process_batch,
                                           data,
                                           callback=processed_data)
                            jobs += len(data)
                            data = []
                    del batch_data
                    del track_frames
                    del batches[:preload_amount]
                    gc.collect()
                    logging.debug(
                        "%s preloader loaded  up to %s",
                        name,
                        loaded_up_to,
                    )
                    total += 1
                del batches
                gc.collect()
                logging.info("%s preloader loaded epoch %s batches", name,
                             epoch)
                pool.close()
                pool.join()
            except Exception as inst:
                logging.error("%s preloader epoch %s error %s",
                              name,
                              epoch,
                              inst,
                              exc_info=True)
def main():
    logging.root.removeHandler(absl.logging._absl_handler)
    absl.logging._warn_preinit_stderr = False
    parser = argparse.ArgumentParser()

    parser.add_argument(
        "source",
        help=
        'a CPTV file to process, or a folder name, or "all" for all files within subdirectories of source folder.',
    )
    parser.add_argument(
        "-p",
        "--preview-type",
        help=
        "Create MP4 previews of this type (can be slow), this overrides the config",
    )
    parser.add_argument("-v",
                        "--verbose",
                        action="count",
                        help="Display additional information.")
    parser.add_argument(
        "--start-date",
        help=
        "Only clips on or after this day will be processed (format YYYY-MM-DD)",
    )
    parser.add_argument(
        "--end-date",
        help=
        "Only clips on or before this day will be processed (format YYYY-MM-DD)",
    )
    parser.add_argument("-c",
                        "--config-file",
                        help="Path to config file to use")
    parser.add_argument(
        "--processor-folder",
        help=
        "When running from thermal-processing use this to specify the folder for both the source cptv and output mp4. With this option the metadata will be sent to stdout.",
    )
    parser.add_argument("-T",
                        "--timestamps",
                        action="store_true",
                        help="Emit log timestamps")
    parser.add_argument(
        "-m",
        "--model-file",
        help="Path to model file to use, will override config model",
    )

    args = parser.parse_args()

    config = Config.load_from_file(args.config_file)
    config.validate()
    init_logging(args.timestamps)

    # parse command line arguments
    if args.preview_type:
        config.classify.preview = args.preview_type

    if args.verbose:
        config.classify_tracking.verbose = True

    if args.processor_folder:
        config.classify.meta_to_stdout = True
        config.base_data_folder = args.processor_folder
        config.classify.classify_folder = args.processor_folder
        config.source_folder = args.processor_folder

    model_file = config.classify.model
    if args.model_file:
        model_file = args.model_file
    clip_classifier = ClipClassifier(config, config.classify_tracking,
                                     model_file)

    # parse start and end dates
    if args.start_date:
        clip_classifier.start_date = datetime.strptime(args.start_date,
                                                       "%Y-%m-%d")
    if args.end_date:
        clip_classifier.end_date = datetime.strptime(args.end_date, "%Y-%m-%d")

    if config.classify.preview != Previewer.PREVIEW_NONE:
        logging.info("Creating previews")

    if not config.use_gpu:
        logging.info("GPU mode disabled.")

    if not os.path.exists(clip_classifier.model_file + ".meta"):
        logging.error(
            "No model found named '{}'.".format(clip_classifier.model_file +
                                                ".meta"))
        exit(13)

    # just fetch the classifier now so it doesn't impact the benchmarking on the first clip analysed.
    _ = clip_classifier.classifier

    if os.path.splitext(args.source)[-1].lower() == ".cptv":
        source_file = tools.find_file_from_cmd_line(config.source_folder,
                                                    args.source)
        if source_file is None:
            return
        clip_classifier.process_file(source_file)
    else:
        folder = config.source_folder
        if args.source != "all":
            os.path.join(config.source_folder, folder)
        clip_classifier.process_all(folder)
def main():
    config, args = parse_params()
    init_logging()

    if config and args:
        load_clips(config, args)
def main():
    logging.root.removeHandler(absl.logging._absl_handler)
    absl.logging._warn_preinit_stderr = False
    parser = argparse.ArgumentParser()

    parser.add_argument(
        "source",
        help=
        'a CPTV file to process, or a folder name, or "all" for all files within subdirectories of source folder.',
    )
    parser.add_argument(
        "-p",
        "--preview-type",
        help=
        "Create MP4 previews of this type (can be slow), this overrides the config",
    )
    parser.add_argument("-v",
                        "--verbose",
                        action="count",
                        help="Display additional information.")
    parser.add_argument("-c",
                        "--config-file",
                        help="Path to config file to use")
    parser.add_argument(
        "-o",
        "--meta-to-stdout",
        action="count",
        help="Print metadata to stdout instead of saving to file.",
    )
    parser.add_argument("-T",
                        "--timestamps",
                        action="store_true",
                        help="Emit log timestamps")
    parser.add_argument(
        "-m",
        "--model-file",
        help="Path to model file to use, will override config model",
    )
    parser.add_argument(
        "--reuse-prediction-frames",
        action="count",
        help="Use supplied prediction frames from metadata.txt",
    )
    parser.add_argument(
        "--cache",
        type=str2bool,
        nargs="?",
        const=True,
        default=None,
        help=
        "Dont keep video frames in memory for classification later, but cache them to disk (Best for large videos, but slower)",
    )
    args = parser.parse_args()
    config = Config.load_from_file(args.config_file)
    config.validate()
    init_logging(args.timestamps)

    # parse command line arguments
    if args.preview_type:
        config.classify.preview = args.preview_type

    if args.verbose:
        config.classify_tracking.verbose = True

    if args.meta_to_stdout:
        config.classify.meta_to_stdout = True
    model = None
    keras_model = None
    if args.model_file:
        model = ModelConfig.load({
            "id": 1,
            "model_file": args.model_file,
            "name": args.model_file
        })
        model.validate()
    clip_classifier = ClipClassifier(
        config,
        model,
    )
    clip_classifier.process(
        args.source,
        cache=args.cache,
        reuse_frames=args.reuse_prediction_frames,
    )