def test_bbb_repo_iter_fnames_filtered(tmpdir):
    repo = Repository(str(tmpdir.join('complex_from_to_and_cam')))
    span = 200
    begin_end_cam_id0 = [(ts, ts + span, 0) for ts in range(0, 10000, span)]
    begin_end_cam_id1 = [(ts, ts + span, 1) for ts in range(0, 10000, span)]

    begin_end_cam_id = begin_end_cam_id0 + begin_end_cam_id1

    fill_repository(repo, begin_end_cam_id)
    begin = 2500
    end = 5000
    cam = 0
    fnames = list(repo.iter_fnames(begin, end, cam))

    selected_fnames = np.random.choice(fnames, size=5, replace=False)
    selected_fnames = set([os.path.basename(f) for f in selected_fnames])

    class FnameFilter():
        def __init__(self, fnames):
            self.fnames = fnames

        def __call__(self, fname):
            return fname in self.fnames

    filtered_fnames = set(repo.iter_fnames(begin, end, cam,
                                           fname_filter=FnameFilter(selected_fnames)))
    filtered_fnames = set([os.path.basename(f) for f in filtered_fnames])

    assert(filtered_fnames == selected_fnames)
Esempio n. 2
0
def test_bbb_repo_iter_fnames_filtered(tmpdir):
    repo = Repository(str(tmpdir.join('complex_from_to_and_cam')))
    span = 200
    begin_end_cam_id0 = [(ts, ts + span, 0) for ts in range(0, 10000, span)]
    begin_end_cam_id1 = [(ts, ts + span, 1) for ts in range(0, 10000, span)]

    begin_end_cam_id = begin_end_cam_id0 + begin_end_cam_id1

    fill_repository(repo, begin_end_cam_id)
    begin = 2500
    end = 5000
    cam = 0
    fnames = list(repo.iter_fnames(begin, end, cam))

    selected_fnames = np.random.choice(fnames, size=5, replace=False)
    selected_fnames = set([os.path.basename(f) for f in selected_fnames])

    class FnameFilter():
        def __init__(self, fnames):
            self.fnames = fnames

        def __call__(self, fname):
            return fname in self.fnames

    filtered_fnames = set(
        repo.iter_fnames(begin,
                         end,
                         cam,
                         fname_filter=FnameFilter(selected_fnames)))
    filtered_fnames = set([os.path.basename(f) for f in filtered_fnames])

    assert (filtered_fnames == selected_fnames)
Esempio n. 3
0
def test_generator_processor_video(tmpdir, bees_video, filelists_path,
                                   pipeline_config):
    repo = Repository(str(tmpdir))
    pipeline = Pipeline([Image, Timestamp], [PipelineResult],
                        **pipeline_config)
    gen_processor = GeneratorProcessor(pipeline,
                                       lambda: BBBinaryRepoSink(repo, camId=0))

    gen = video_generator(bees_video,
                          ts_format="2015",
                          path_filelists=filelists_path)

    gen_processor(gen)
    fnames = list(repo.iter_fnames())
    assert len(fnames) == 1

    last_ts = 0
    num_frames = 0
    for fname in repo.iter_fnames():
        print("{}: {}".format(fname, os.path.getsize(fname)))
        with open(fname, "rb") as f:
            fc = FrameContainer.read(f)
            num_frames += len(list(fc.frames))
        assert fc.dataSources[0].filename == os.path.basename(bees_video)
        assert last_ts < fc.fromTimestamp
        last_ts = fc.fromTimestamp

    assert num_frames == 3
Esempio n. 4
0
def test_generator_processor(tmpdir, bees_image, pipeline_config):
    def image_generator():
        ts = time.time()
        data_source = DataSource.new_message(filename='bees.jpeg')
        for i in range(2):
            img = imread(bees_image)
            yield data_source, img, ts + i

    repo = Repository(str(tmpdir))
    pipeline = Pipeline([Image, Timestamp], [PipelineResult], **pipeline_config)
    gen_processor = GeneratorProcessor(
        pipeline, lambda: BBBinaryRepoSink(repo, camId=2))

    gen_processor(image_generator())
    gen_processor(image_generator())
    fnames = list(repo.iter_fnames())
    assert len(fnames) == 2

    last_ts = 0
    for fname in repo.iter_fnames():
        print("{}: {}".format(fname, os.path.getsize(fname)))
        with open(fname, 'rb') as f:
            fc = FrameContainer.read(f)
        assert fc.dataSources[0].filename == 'bees.jpeg'
        assert last_ts < fc.fromTimestamp
        last_ts = fc.fromTimestamp
Esempio n. 5
0
def test_generator_processor_threads(tmpdir, bees_video, filelists_path,
                                     pipeline_config):
    repo = Repository(str(tmpdir))
    pipelines = [
        Pipeline([Image, Timestamp], [PipelineResult], **pipeline_config)
        for _ in range(3)
    ]
    gen_processor = GeneratorProcessor(pipelines,
                                       lambda: BBBinaryRepoSink(repo, camId=0))

    gen = video_generator(bees_video,
                          ts_format="2015",
                          path_filelists=filelists_path)

    gen_processor(gen)
    fnames = list(repo.iter_fnames())
    assert len(fnames) == 1

    num_frames = 0
    for fname in repo.iter_fnames():
        with open(fname, "rb") as f:
            fc = FrameContainer.read(f)
            num_frames += len(list(fc.frames))

    assert num_frames == 3
def test_bbb_iter_small_frame_window(tmpdir):
    """Tests that iter_frame returns frames if time window is small."""
    repo = Repository(str(tmpdir.join('frames_from_to_small_window')))
    repo_start = 0
    nFC = 10
    span = 1000
    nFrames = nFC * span
    repo_end = repo_start + nFrames
    begin_end_cam_id = [(ts, ts + span, 0)
                        for ts in range(repo_start, repo_end, span)]
    for begin, end, cam_id in begin_end_cam_id:
        fc = build_frame_container(begin, end, cam_id)
        fc.init('frames', span)
        for i, tstamp in enumerate(range(begin, end)):
            frame = fc.frames[i]
            frame.id = tstamp
            frame.timestamp = tstamp
        repo.add(fc)

    begin = 1
    end = 10
    fnames = list(repo.iter_frames(begin, end))
    assert(len(fnames) > 0)

    begin = 1001
    end = 1011
    fnames = list(repo.iter_fnames(begin, end))
    assert(len(fnames) > 0)
Esempio n. 7
0
def test_bbb_repo_iter_fnames_from_to_and_cam(tmpdir):
    repo = Repository(str(tmpdir.join('complex_from_to_and_cam')))
    span = 200
    begin_end_cam_id0 = [(ts, ts + span, 0) for ts in range(0, 10000, span)]
    begin_end_cam_id1 = [(ts, ts + span, 1) for ts in range(0, 10000, span)]

    begin_end_cam_id = begin_end_cam_id0 + begin_end_cam_id1

    fill_repository(repo, begin_end_cam_id)
    begin = 2500
    end = 5000
    cam = 0
    fnames = list(repo.iter_fnames(begin, end, cam))
    for fname in fnames:
        assert os.path.isabs(fname)
    fbasenames = [os.path.basename(f) for f in fnames]
    print(begin_end_cam_id)
    slice_begin_end_cam_id = list(
        filter(lambda p: begin <= p[1] and p[0] < end and p[2] == cam,
               begin_end_cam_id))
    expected_fnames = [
        os.path.basename(repo._get_filename(*p, extension='bbb'))
        for p in slice_begin_end_cam_id
    ]
    assert fbasenames == expected_fnames
Esempio n. 8
0
def test_bbb_iter_small_frame_window(tmpdir):
    """Tests that iter_frame returns frames if time window is small."""
    repo = Repository(str(tmpdir.join('frames_from_to_small_window')))
    repo_start = 0
    nFC = 10
    span = 1000
    nFrames = nFC * span
    repo_end = repo_start + nFrames
    begin_end_cam_id = [(ts, ts + span, 0)
                        for ts in range(repo_start, repo_end, span)]
    for begin, end, cam_id in begin_end_cam_id:
        fc = build_frame_container(begin, end, cam_id)
        fc.init('frames', span)
        for i, tstamp in enumerate(range(begin, end)):
            frame = fc.frames[i]
            frame.id = tstamp
            frame.timestamp = tstamp
        repo.add(fc)

    begin = 1
    end = 10
    fnames = list(repo.iter_frames(begin, end))
    assert (len(fnames) > 0)

    begin = 1001
    end = 1011
    fnames = list(repo.iter_fnames(begin, end))
    assert (len(fnames) > 0)
Esempio n. 9
0
def get_files(path):
	repo = Repository(path)
	file = list(repo.iter_fnames())
	a = [f.split('/')[-1].split("_")[1] for f in file]
	l = len(a)/4
	npa = np.array(file).reshape(int(l),4)
	return npa
Esempio n. 10
0
def test_bbb_repo_iter_fnames_2_files_and_1_symlink_per_directory(tmpdir):
    repo = Repository(str(tmpdir.join('2_files_and_1_symlink_per_directory')))
    span = 500
    begin_end_cam_id = [(ts, ts + span + 100, 0) for ts in range(0, 10000, span)]

    fill_repository(repo, begin_end_cam_id)

    fnames = [os.path.basename(f) for f in repo.iter_fnames()]
    expected_fnames = [os.path.basename(
        repo._get_filename(*p, extension='bbb')) for p in begin_end_cam_id]
    assert fnames == expected_fnames
Esempio n. 11
0
def test_generator_processor_threads(tmpdir, bees_video, filelists_path, pipeline_config):
    repo = Repository(str(tmpdir))
    pipelines = [Pipeline([Image, Timestamp], [PipelineResult], **pipeline_config) for
                 _ in range(3)]
    gen_processor = GeneratorProcessor(
        pipelines, lambda: BBBinaryRepoSink(repo, camId=0))

    gen = video_generator(bees_video, ts_format='2015', path_filelists=filelists_path)

    gen_processor(gen)
    fnames = list(repo.iter_fnames())
    assert len(fnames) == 1

    num_frames = 0
    for fname in repo.iter_fnames():
        with open(fname, 'rb') as f:
            fc = FrameContainer.read(f)
            num_frames += len(list(fc.frames))

    assert(num_frames == 3)
Esempio n. 12
0
def test_bbb_repo_end_after_last_file(tmpdir):
    repo = Repository(str(tmpdir.join('complex_from_to')))
    span = 1500
    begin_end_cam_id = [(ts, ts + span, 0) for ts in range(0, 10000, span)]

    fill_repository(repo, begin_end_cam_id)
    begin = 2500
    end = 11000
    fnames = list(repo.iter_fnames(begin, end))
    for fname in fnames:
        assert os.path.isabs(fname)
Esempio n. 13
0
def test_bbb_repo_end_after_last_file(tmpdir):
    repo = Repository(str(tmpdir.join('complex_from_to')))
    span = 1500
    begin_end_cam_id = [(ts, ts + span, 0)
                        for ts in range(0, 10000, span)]

    fill_repository(repo, begin_end_cam_id)
    begin = 2500
    end = 11000
    fnames = list(repo.iter_fnames(begin, end))
    for fname in fnames:
        assert os.path.isabs(fname)
Esempio n. 14
0
def test_no_detection(tmpdir, pipeline_config):
    repo = Repository(str(tmpdir))
    sink = BBBinaryRepoSink(repo, camId=0)

    pipeline = Pipeline([Image, Timestamp], [PipelineResult], **pipeline_config)

    image = np.zeros((3000, 4000), dtype=np.uint8)

    results = pipeline([image, 0])
    data_source = DataSource.new_message(filename='source')
    sink.add_frame(data_source, results, 0)
    sink.finish()

    assert(len(list(repo.iter_fnames())) == 1)

    for fname in repo.iter_fnames():
        with open(fname, 'rb') as f:
            fc = FrameContainer.read(f)
            assert(len(fc.frames) == 1)
            assert fc.dataSources[0].filename == 'source'

            frame = fc.frames[0]
            assert(len(frame.detectionsUnion.detectionsDP) == 0)
Esempio n. 15
0
def test_bbb_repo_iter_fnames_missing_directories(tmpdir):
    repo = Repository(str(tmpdir.join('missing_directories')))
    span = 1500
    begin_end_cam_id = [(ts, ts + span, 0)
                        for ts in range(0, 10000, span)]

    fill_repository(repo, begin_end_cam_id)
    fnames = list(repo.iter_fnames())
    for fname in fnames:
        assert os.path.isabs(fname)
    fbasenames = [os.path.basename(f) for f in fnames]
    expected_fnames = [os.path.basename(
        repo._get_filename(*p, extension='bbb')) for p in begin_end_cam_id]
    assert fbasenames == expected_fnames
Esempio n. 16
0
def test_bbb_repo_iter_fnames_2_files_and_1_symlink_per_directory(tmpdir):
    repo = Repository(str(tmpdir.join('2_files_and_1_symlink_per_directory')))
    span = 500
    begin_end_cam_id = [(ts, ts + span + 100, 0)
                        for ts in range(0, 10000, span)]

    fill_repository(repo, begin_end_cam_id)

    fnames = [os.path.basename(f) for f in repo.iter_fnames()]
    expected_fnames = [
        os.path.basename(repo._get_filename(*p, extension='bbb'))
        for p in begin_end_cam_id
    ]
    assert fnames == expected_fnames
Esempio n. 17
0
def check_repo(path, bees_video):
    repo = Repository(path)

    last_ts = 0
    num_frames = 0
    for fname in repo.iter_fnames():
        print("{}: {}".format(fname, os.path.getsize(fname)))
        with open(fname, 'rb') as f:
            fc = FrameContainer.read(f)
            num_frames += len(list(fc.frames))
        assert fc.dataSources[0].filename == os.path.basename(bees_video)
        assert last_ts < fc.fromTimestamp
        last_ts = fc.fromTimestamp

    assert (num_frames == 3)
Esempio n. 18
0
def check_repo(path, bees_video):
    repo = Repository(path)

    last_ts = 0
    num_frames = 0
    for fname in repo.iter_fnames():
        print("{}: {}".format(fname, os.path.getsize(fname)))
        with open(fname, 'rb') as f:
            fc = FrameContainer.read(f)
            num_frames += len(list(fc.frames))
        assert fc.dataSources[0].filename == os.path.basename(bees_video)
        assert last_ts < fc.fromTimestamp
        last_ts = fc.fromTimestamp

    assert(num_frames == 3)
Esempio n. 19
0
def test_generator_processor_video(tmpdir, bees_video, filelists_path, pipeline_config):
    repo = Repository(str(tmpdir))
    pipeline = Pipeline([Image, Timestamp], [PipelineResult], **pipeline_config)
    gen_processor = GeneratorProcessor(
        pipeline, lambda: BBBinaryRepoSink(repo, camId=0))

    gen = video_generator(bees_video, ts_format='2015', path_filelists=filelists_path)

    gen_processor(gen)
    fnames = list(repo.iter_fnames())
    assert len(fnames) == 1

    last_ts = 0
    num_frames = 0
    for fname in repo.iter_fnames():
        print("{}: {}".format(fname, os.path.getsize(fname)))
        with open(fname, 'rb') as f:
            fc = FrameContainer.read(f)
            num_frames += len(list(fc.frames))
        assert fc.dataSources[0].filename == os.path.basename(bees_video)
        assert last_ts < fc.fromTimestamp
        last_ts = fc.fromTimestamp

    assert(num_frames == 3)
Esempio n. 20
0
def test_bbb_repo_iter_fnames_missing_directories(tmpdir):
    repo = Repository(str(tmpdir.join('missing_directories')))
    span = 1500
    begin_end_cam_id = [(ts, ts + span, 0) for ts in range(0, 10000, span)]

    fill_repository(repo, begin_end_cam_id)
    fnames = list(repo.iter_fnames())
    for fname in fnames:
        assert os.path.isabs(fname)
    fbasenames = [os.path.basename(f) for f in fnames]
    expected_fnames = [
        os.path.basename(repo._get_filename(*p, extension='bbb'))
        for p in begin_end_cam_id
    ]
    assert fbasenames == expected_fnames
    def handle(self, *args, **options):
        repo = Repository(options['repo_path'])
        fnames = list(repo.iter_fnames())
        for fn in try_tqdm(fnames):
            fc = load_frame_container(fn)
            fco = FrameContainer(fc_id=fc.id,
                                 fc_path=fn,
                                 video_name=fc.dataSources[0].filename)
            fco.save()

            with transaction.atomic():
                for frame in fc.frames:
                    f = Frame(fc=fco,
                              frame_id=frame.id,
                              index=frame.frameIdx,
                              timestamp=frame.timestamp)
                    f.save()


# start with python manage.py make_db_repo [repo_path]
Esempio n. 22
0
def test_bbb_repo_iter_fnames_from_to(tmpdir):
    repo = Repository(str(tmpdir.join('complex_from_to')))
    span = 1500
    begin_end_cam_id = [(ts, ts + span, 0) for ts in range(0, 10000, span)]

    fill_repository(repo, begin_end_cam_id)
    begin = 2500
    end = 5000
    fnames = list(repo.iter_fnames(begin, end))
    for fname in fnames:
        assert os.path.isabs(fname)
    fbasenames = [os.path.basename(f) for f in fnames]
    slice_begin_end_cam_id = list(
        filter(lambda p: begin <= p[1] and p[0] < end, begin_end_cam_id))
    print(slice_begin_end_cam_id)
    expected_fnames = [
        os.path.basename(repo._get_filename(*p, extension='bbb'))
        for p in slice_begin_end_cam_id
    ]
    print(expected_fnames)
    print(fbasenames)
    assert fbasenames == expected_fnames
Esempio n. 23
0
def test_bbb_repo_iter_fnames_from_to(tmpdir):
    repo = Repository(str(tmpdir.join('complex_from_to')))
    span = 1500
    begin_end_cam_id = [(ts, ts + span, 0)
                        for ts in range(0, 10000, span)]

    fill_repository(repo, begin_end_cam_id)
    begin = 2500
    end = 5000
    fnames = list(repo.iter_fnames(begin, end))
    for fname in fnames:
        assert os.path.isabs(fname)
    fbasenames = [os.path.basename(f) for f in fnames]
    slice_begin_end_cam_id = list(filter(lambda p: begin <= p[1] and p[0] < end,
                                         begin_end_cam_id))
    print(slice_begin_end_cam_id)
    expected_fnames = [
        os.path.basename(repo._get_filename(*p, extension='bbb'))
        for p in slice_begin_end_cam_id]
    print(expected_fnames)
    print(fbasenames)
    assert fbasenames == expected_fnames
Esempio n. 24
0
def test_bbb_repo_iter_fnames_from_to_and_cam(tmpdir):
    repo = Repository(str(tmpdir.join('complex_from_to_and_cam')))
    span = 200
    begin_end_cam_id0 = [(ts, ts + span, 0) for ts in range(0, 10000, span)]
    begin_end_cam_id1 = [(ts, ts + span, 1) for ts in range(0, 10000, span)]

    begin_end_cam_id = begin_end_cam_id0 + begin_end_cam_id1

    fill_repository(repo, begin_end_cam_id)
    begin = 2500
    end = 5000
    cam = 0
    fnames = list(repo.iter_fnames(begin, end, cam))
    for fname in fnames:
        assert os.path.isabs(fname)
    fbasenames = [os.path.basename(f) for f in fnames]
    print(begin_end_cam_id)
    slice_begin_end_cam_id = list(filter(
        lambda p: begin <= p[1] and p[0] < end and p[2] == cam,
        begin_end_cam_id))
    expected_fnames = [
        os.path.basename(repo._get_filename(*p, extension='bbb'))
        for p in slice_begin_end_cam_id]
    assert fbasenames == expected_fnames
Esempio n. 25
0
	def load_data( self ):

		if not os.path.exists( config.DATA_FOLDER ):
			print 'Error: folder not found'
			return

		self.block_inputs( True )

		self.dset_store = ds.DetectionSetStore()
		self.path_manager = None
		self.paths_load_progress.setValue( 0 )
		self.paths_load_label.setText( '' )


		try:
			repo = Repository( config.DATA_FOLDER )
			start_time = datetime(
				config.DATE[ 0 ], config.DATE[ 1 ], config.DATE[ 2 ],
				config.TIME[ 0 ], config.TIME[ 1 ],
				tzinfo=pytz.utc
			)

			fnames = repo.iter_fnames( begin=start_time )
			for fname in fnames:

				frame_container = load_frame_container( fname )

				cam = frame_container.camId
				#frame_container.fromTimestamp              # already available
				#frame_container.toTimestamp                # already available

				self.dset_store.source = frame_container.dataSources[ 0 ].filename

				previous_timestamp = None

				self.data_load_progress.setMaximum( config.FRAME_END + 1 - config.FRAME_START )
				self.app.processEvents()

				frame_index = config.FRAME_START

				for frame in list( frame_container.frames )[ config.FRAME_START : config.FRAME_END + 1 ]:

					#timestamp = frame.timestamp  # not included yet
					#frame.id                     # not included yet

					timestamp = ds.TimeStamp( frame_index, cam )
					timestamp.connect_with_previous( previous_timestamp )
					previous_timestamp = timestamp

					dset = ds.DetectionSet()
					self.dset_store.store[ timestamp ] = dset

					data = convert_frame_to_numpy( frame )

					for detection_data in data:

						dset.add_detection( ds.Detection(
							detection_data[ 'idx' ],
							timestamp,
							np.array( [ detection_data[ 'ypos' ], detection_data[ 'xpos' ] ] ),  # rotated, otherwise will be portrait orientation
							detection_data[ 'localizerSaliency' ],
							detection_data[ 'decodedId' ][::-1]  # reversed, we want least significant bit last
						) )

					frame_index += 1

					self.data_load_progress.setValue( frame_index - config.FRAME_START )
					self.app.processEvents()

				self.data_load_label.setText( str( len( self.dset_store.store ) ) + ' frames loaded' )
				self.app.processEvents()

				# break because we only load the first fname
				break

		except:

			pass

		self.block_inputs( False )
Esempio n. 26
0
def get_files(path, camid):
    repo = Repository(path)
    files = list(repo.iter_fnames(cam=camid))
    return files
Esempio n. 27
0
def test_bbb_repo_iter_fnames_empty(tmpdir):
    repo = Repository(str(tmpdir.join('empty')))
    assert list(repo.iter_fnames()) == []
Esempio n. 28
0
def test_bbb_repo_iter_fnames_empty(tmpdir):
    repo = Repository(str(tmpdir.join('empty')))
    assert list(repo.iter_fnames()) == []
Esempio n. 29
0
    def load_data(self):

        if not os.path.exists(config.DATA_FOLDER):
            print 'Error: folder not found'
            return

        self.block_inputs(True)

        self.dset_store = ds.DetectionSetStore()
        self.path_manager = None
        self.paths_load_progress.setValue(0)
        self.paths_load_label.setText('')

        try:
            repo = Repository(config.DATA_FOLDER)
            start_time = datetime(config.DATE[0],
                                  config.DATE[1],
                                  config.DATE[2],
                                  config.TIME[0],
                                  config.TIME[1],
                                  tzinfo=pytz.utc)

            fnames = repo.iter_fnames(begin=start_time)
            for fname in fnames:

                frame_container = load_frame_container(fname)

                cam = frame_container.camId
                #frame_container.fromTimestamp              # already available
                #frame_container.toTimestamp                # already available

                self.dset_store.source = frame_container.dataSources[
                    0].filename

                previous_timestamp = None

                self.data_load_progress.setMaximum(config.FRAME_END + 1 -
                                                   config.FRAME_START)
                self.app.processEvents()

                frame_index = config.FRAME_START

                for frame in list(frame_container.frames
                                  )[config.FRAME_START:config.FRAME_END + 1]:

                    #timestamp = frame.timestamp  # not included yet
                    #frame.id                     # not included yet

                    timestamp = ds.TimeStamp(frame_index, cam)
                    timestamp.connect_with_previous(previous_timestamp)
                    previous_timestamp = timestamp

                    dset = ds.DetectionSet()
                    self.dset_store.store[timestamp] = dset

                    data = convert_frame_to_numpy(frame)

                    for detection_data in data:

                        dset.add_detection(
                            ds.Detection(
                                detection_data['idx'],
                                timestamp,
                                np.array(
                                    [
                                        detection_data['ypos'],
                                        detection_data['xpos']
                                    ]
                                ),  # rotated, otherwise will be portrait orientation
                                detection_data['localizerSaliency'],
                                detection_data['decodedId']
                                [::
                                 -1]  # reversed, we want least significant bit last
                            ))

                    frame_index += 1

                    self.data_load_progress.setValue(frame_index -
                                                     config.FRAME_START)
                    self.app.processEvents()

                self.data_load_label.setText(
                    str(len(self.dset_store.store)) + ' frames loaded')
                self.app.processEvents()

                # break because we only load the first fname
                break

        except:

            pass

        self.block_inputs(False)
Esempio n. 30
0
def get_fc(path, camId):
    repo = Repository(path)
    file = list(repo.iter_fnames(cam=camId))[0]
    fc = load_frame_container(file)
    return fc
Esempio n. 31
0
def run(path_to_db, path_to_repo, conf, start_string, time_delta, year):

    db_path = path_to_db
    conn = sqlite3.connect(db_path)
    c = conn.cursor()
    createAllTables(c)

    repo = Repository(path_to_repo)
    confidence = conf

    start = start_string
    start_dt = datetime.datetime.strptime(
        start, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=pytz.UTC)
    start_ts = start_dt.timestamp()

    end_dt = start_dt + datetime.timedelta(hours=time_delta)
    end_ts = end_dt.timestamp()

    files = list(repo.iter_fnames(begin=start_ts, end=end_ts))
    print("Number of files: {}".format(len(files)))

    # ADD ALL THE STUFF TO THE DB
    #############################
    my_fc_id = 0
    my_frame_id = 0

    # alle dateien bzw. FrameConatiner interieren
    for file in files:
        print("Progess: {}/{}".format(my_fc_id + 1, len(files)))
        fc = load_frame_container(file)

        # pro Framecontainer ein Eintrag in die FrameContainer Table machen
        c.execute(
            "insert into frame_container (fc_id, id, cam_id, from_ts, to_ts) values (?, ?, ?, ?, ?)",
            (my_fc_id, str(fc.id), fc.camId, fc.fromTimestamp, fc.toTimestamp))

        # alle Frames iterieren
        tpls = []

        for f in fc.frames:
            # pro frame einen Eintrag in Frame Tabelle machen
            c.execute(
                "insert into frame (frame_id, fc_id, timestamp) values (?, ?, ?)",
                (my_frame_id, my_fc_id, f.timestamp))

            # alle Detections iterieren
            for d in f.detectionsUnion.detectionsDP:
                d = Detection(my_frame_id, d.xpos, d.ypos, d.zRotation,
                              list(d.decodedId))
                tpls.append(d)

            # hochzaehlen
            my_frame_id += 1

        df = pd.DataFrame(tpls)
        df = prep.calcIds(df, confidence, year)
        df.drop('confidence', axis=1, inplace=True)

        # Detections zu db hinzufuegen
        df.to_sql('DETECTIONS', conn, if_exists='append', index=False)

        # hochzaehlen!
        my_fc_id += 1

    conn.commit()
    conn.close()
Esempio n. 32
0
def run(path, conf, cpus, year, nameDB):

    pd.options.mode.chained_assignment = None
    pool = multiprocessing.Pool(cpus)

    repo = Repository(path)

    file_list = []
    for f in repo.iter_fnames():
        string = f.split(year)
        datum = string[2].split("/")
        file_list.append([datum[1], datum[2], datum[3], f])

    # DataFrame with all files
    df = DataFrame(file_list, columns=['m', 'd', 'h', 'file'])

    # Group by hours
    gr = df.groupby(by=['m', 'd', 'h'])

    # Pro Gruppe einen Task erstellen
    tasks = []

    for group in gr:
        files = list(group[1].file)
        tasks.append(
            (group[0][0], group[0][1], group[0][2], files, conf, int(year)))

    results = [pool.apply_async(getIDs, t) for t in tasks]

    # Write results straight to DB
    conn = sqlite3.connect(nameDB)
    c = conn.cursor()
    c.execute('''DROP TABLE IF EXISTS IDS''')
    c.execute('''CREATE TABLE IDS
		   (MONTH   CHARACTER(20)   NOT NULL,
		   DAY   CHARACTER(20)   NOT NULL,
		   HOUR  CHARACTER(20)   NOT NULL,
		   ID   INT   NOT NULL,
		   COUNT   INT   NOT NULL);''')

    for result in results:

        res = result.get()
        m = res[0]
        d = res[1]
        h = res[2]

        for e, r in enumerate(res[3]):
            c.execute(
                "insert into ids (month, day, hour, id, count) values (?, ?, ?, ?, ?)",
                (m, d, h, e, int(r)))

        print(m, d, h, "inserted")

    pool.close()
    pool.join()

    counting = c.execute('SELECT count(*) FROM IDS')

    conn.commit()
    conn.close()