예제 #1
0
    def start(self):

        self.dset_store.clear()
        for m in self.modules:
            m.reset()

        start_timestamp = ds.TimeStamp(config.FRAME_START, config.CAM)
        duration = config.FRAME_END - config.FRAME_START + 1

        previous_timestamp = start_timestamp
        for x in range(config.FRAME_START + 1, config.FRAME_END + 1):
            timestamp = ds.TimeStamp(x, config.CAM)
            timestamp.connect_with_previous(previous_timestamp)
            previous_timestamp = timestamp

        print 'start validation'
        print('  date = ' + str(config.DATE[0]) + '/' + str(config.DATE[1]) +
              '/' + str(config.DATE[2]) + ', ' + 'time = ' +
              str(config.TIME[0]) + ':' + str(config.TIME[1]))
        print '  cam = ' + str(config.CAM) + ', frames = ' + str(
            config.FRAME_START) + ' til ' + str(config.FRAME_END)

        database_connection = db.Connection()
        timestamp = start_timestamp

        for x in range(0, duration):

            print 'processing timestamp ' + timestamp.time_name

            dset = self.dset_store.get(timestamp, database_connection)

            for d in dset.detections:

                updated_id = database_connection.get_updated_id(d)
                truth_id = database_connection.get_truth_id(d)
                path_number = database_connection.get_path_number(d)

                for m in self.modules:
                    m.update(d, updated_id, truth_id, path_number)

            timestamp = timestamp.get_next()
            if timestamp is None:
                break

        print 'validation finished'
        print '--------------------------------'

        result_text = ''
        for m in self.modules:
            result_text += m.get_result()

        print result_text
예제 #2
0
    def start(self):

        self.path_manager.clear()
        self.dset_store.clear()

        start_timestamp = ds.TimeStamp(config.FRAME_START, config.CAM)
        duration = config.FRAME_END - config.FRAME_START + 1

        previous_timestamp = start_timestamp
        for x in range(config.FRAME_START + 1, config.FRAME_END + 1):
            timestamp = ds.TimeStamp(x, config.CAM)
            timestamp.connect_with_previous(previous_timestamp)
            previous_timestamp = timestamp

        print 'start filtering'
        print('  date = ' + str(config.DATE[0]) + '/' + str(config.DATE[1]) +
              '/' + str(config.DATE[2]) + ', ' + 'time = ' +
              str(config.TIME[0]) + ':' + str(config.TIME[1]))
        print '  cam = ' + str(config.CAM) + ', frames = ' + str(
            config.FRAME_START) + ' til ' + str(config.FRAME_END)

        database_connection = db.Connection()
        timestamp = start_timestamp

        # computing
        for x in range(0, duration):

            print 'processing timestamp ' + timestamp.time_name

            self.process_timestamp(timestamp, database_connection)

            print('  paths: ' + str(len(self.path_manager.open_paths)) +
                  ' open, ' + str(len(self.path_manager.closed_paths)) +
                  ' closed')

            timestamp = timestamp.get_next()
            if timestamp is None:
                break

        print 'filtering finished'

        # saving
        self.path_manager.close_all_paths()
        self.path_manager.save_closed_paths(database_connection)
        print str(len(self.path_manager.closed_paths)
                  ) + ' paths saved to file ' + config.PATHS_FILE
        print '--------------------------------'
예제 #3
0
	def start( self ):

		self.dset_store.clear()
		self.path_manager.clear()

		start_timestamp = ds.TimeStamp( config.FRAME_START, config.CAM )
		duration = config.FRAME_END - config.FRAME_START + 1

		previous_timestamp = start_timestamp
		for x in range( config.FRAME_START+1, config.FRAME_END+1 ):
			timestamp = ds.TimeStamp( x, config.CAM )
			timestamp.connect_with_previous( previous_timestamp )
			previous_timestamp = timestamp

		print 'start generation'
		print (
			'  date = ' + str( config.DATE[ 0 ] ) + '/' + str( config.DATE[ 1 ] ) + '/' + str( config.DATE[ 2 ] ) + ', '
			+ 'time = ' + str( config.TIME[ 0 ] ) + ':' + str( config.TIME[ 1 ] )
		)
		print '  cam = ' + str( config.CAM ) + ', frames = ' + str( config.FRAME_START ) + ' til ' + str( config.FRAME_END )

		database_connection = db.Connection()
		timestamp = start_timestamp


		hammings = np.zeros( 13, dtype = np.int )  # nach Hammingabstand 0-12
		gaps = {}

		for x in range( 0, duration ):

			print 'processing timestamp ' + timestamp.time_name

			dset = self.dset_store.get( timestamp, database_connection )

			for d in dset.detections:

				truth_id = database_connection.get_truth_id( d )
				if truth_id is not None:

					hamming_dis = aux.hamming_distance( truth_id, d.decoded_mean )
					hammings[ hamming_dis ] += 1

					path = self.path_manager.get_path( truth_id )
					gap = path.add_detection_and_return_gap( d )
					if gap is not None:
						gap = min(gap,16)  # greater or equal 16 not listed separately
						if not gap in gaps:
							gaps[ gap ] = 0
						gaps[ gap ] += 1

			timestamp = timestamp.get_next()
			if timestamp is None:
				break

		print 'generation finished'
		print '--------------------------------'

		detection_count = sum( hammings )

		hammings_percentage = hammings*1.0 / detection_count


		print 'validation finished'
		print '\nhammings result:'
		print np.round( hammings_percentage*100, 1 )
		print '\ndetection count:'
		print detection_count

		print '\ngaps:'
		print [ g for g in sorted( gaps.items() ) ]
		print 'mean gap length: ' + str(sum([ l*n for l,n in gaps.items() ])*1.0/sum([ n for l,n in gaps.items() ]))

		print 'gaps count: ' + str( sum([ n for l,n in gaps.items() ] ) )
예제 #4
0
	def load_data( self ):

		self.load_button.setDisabled( True )
		self.goto_editor_button.setDisabled( True )
		self.app.processEvents()

		start_timestamp = ds.TimeStamp( self.date_input.date(), self.start_time_input.time(), self.cam_input.value() )
		end_timestamp = ds.TimeStamp( self.date_input.date(), self.end_time_input.time(), self.cam_input.value() )

		if start_timestamp < end_timestamp and start_timestamp.exists():

			dset_store = self.parent.dset_store
			dset_store.clear()

			path_manager = self.parent.path_manager
			path_manager.clear()

			if self.load_source_truth.isChecked():
				path_manager.data_source = 0  # truth_id
			else:
				path_manager.data_source = 1  # updated_id

			diff = start_timestamp.frames_difference( end_timestamp )
			self.load_progress.setMaximum( diff+1 )
			self.app.processEvents()

			database_connection = db.Connection()

			loop_timestamp = start_timestamp
			loop_index = 0

			while ( loop_timestamp is not None ) and ( not end_timestamp < loop_timestamp ):

				dset = dset_store.get( loop_timestamp, database_connection )
				for d in dset.detections:

					if path_manager.data_source == 0:
						truth_id = database_connection.get_truth_id( d )
						if truth_id is not None:
							path_manager.add_detection( d, truth_id )

					else:
						updated_id = database_connection.get_updated_id( d )
						if updated_id is not None:
							path_number = database_connection.get_path_number( d )
							if path_number is not None:
								path_manager.add_detection_to_path( d, updated_id, path_number )
							else:
								path_manager.add_detection( d, updated_id )

				loop_timestamp = loop_timestamp.get_next( database_connection )
				loop_index += 1
				self.load_progress.setValue( loop_index )
				self.app.processEvents()

			database_connection.close()

			self.load_progress.setValue( diff+1 )

		self.load_button.setDisabled( False )
		self.goto_editor_button.setDisabled( False )
		self.app.processEvents()
예제 #5
0
    def load_data(self):

        if not os.path.exists(config.DATA_FOLDER):
            print 'Error: folder not found'
            return

        self.block_inputs(True)

        self.dset_store = ds.DetectionSetStore()
        self.path_manager = None
        self.paths_load_progress.setValue(0)
        self.paths_load_label.setText('')

        try:
            repo = Repository(config.DATA_FOLDER)
            start_time = datetime(config.DATE[0],
                                  config.DATE[1],
                                  config.DATE[2],
                                  config.TIME[0],
                                  config.TIME[1],
                                  tzinfo=pytz.utc)

            fnames = repo.iter_fnames(begin=start_time)
            for fname in fnames:

                frame_container = load_frame_container(fname)

                cam = frame_container.camId
                #frame_container.fromTimestamp              # already available
                #frame_container.toTimestamp                # already available

                self.dset_store.source = frame_container.dataSources[
                    0].filename

                previous_timestamp = None

                self.data_load_progress.setMaximum(config.FRAME_END + 1 -
                                                   config.FRAME_START)
                self.app.processEvents()

                frame_index = config.FRAME_START

                for frame in list(frame_container.frames
                                  )[config.FRAME_START:config.FRAME_END + 1]:

                    #timestamp = frame.timestamp  # not included yet
                    #frame.id                     # not included yet

                    timestamp = ds.TimeStamp(frame_index, cam)
                    timestamp.connect_with_previous(previous_timestamp)
                    previous_timestamp = timestamp

                    dset = ds.DetectionSet()
                    self.dset_store.store[timestamp] = dset

                    data = convert_frame_to_numpy(frame)

                    for detection_data in data:

                        dset.add_detection(
                            ds.Detection(
                                detection_data['idx'],
                                timestamp,
                                np.array(
                                    [
                                        detection_data['ypos'],
                                        detection_data['xpos']
                                    ]
                                ),  # rotated, otherwise will be portrait orientation
                                detection_data['localizerSaliency'],
                                detection_data['decodedId']
                                [::
                                 -1]  # reversed, we want least significant bit last
                            ))

                    frame_index += 1

                    self.data_load_progress.setValue(frame_index -
                                                     config.FRAME_START)
                    self.app.processEvents()

                self.data_load_label.setText(
                    str(len(self.dset_store.store)) + ' frames loaded')
                self.app.processEvents()

                # break because we only load the first fname
                break

        except:

            pass

        self.block_inputs(False)
예제 #6
0
	def start( self ):

		self.dset_store.clear()
		self.path_manager.clear()

		start_timestamp = ds.TimeStamp( config.FRAME_START, config.CAM )
		duration = config.FRAME_END - config.FRAME_START + 1

		previous_timestamp = start_timestamp
		for x in range( config.FRAME_START+1, config.FRAME_END+1 ):
			timestamp = ds.TimeStamp( x, config.CAM )
			timestamp.connect_with_previous( previous_timestamp )
			previous_timestamp = timestamp

		print 'start generation'
		print (
			'  date = ' + str( config.DATE[ 0 ] ) + '/' + str( config.DATE[ 1 ] ) + '/' + str( config.DATE[ 2 ] ) + ', '
			+ 'time = ' + str( config.TIME[ 0 ] ) + ':' + str( config.TIME[ 1 ] )
		)
		print '  cam = ' + str( config.CAM ) + ', frames = ' + str( config.FRAME_START ) + ' til ' + str( config.FRAME_END )

		database_connection = db.Connection()
		timestamp = start_timestamp

		for x in range( 0, duration ):

			print 'processing timestamp ' + timestamp.time_name

			dset = self.dset_store.get( timestamp, database_connection )

			for d in dset.detections:

				truth_id = database_connection.get_truth_id( d )

				if truth_id is not None:
					path = self.path_manager.get_path( truth_id )
					path.add_detection( d )

			timestamp = timestamp.get_next()
			if timestamp is None:
				break

		print 'calculate average ids...'

		total = 0
		mean_rights       = 0
		mean_mean_rights  = 0
		weigneig_rights   = 0
		saliency_rights   = 0
		confidence_rights = 0

		false_text = ''

		for truth_id, path in self.path_manager.paths.iteritems():

			if len( path.detections ) >= 10:

				for iterations in range( 100 ):

					#path_sample = path
					subset_size = 20
					path_sample = path.random_subset( subset_size )

					total += 1
					mean_id       = path_sample.determine_average_id_by_mean()
					mean_mean_id  = path_sample.determine_average_id_by_mean_mean()
					weigneig_id   = path_sample.determine_average_id_by_weighted_neighbourhood()
					saliency_id   = path_sample.determine_average_id_with_saliency()
					confidence_id = path_sample.determine_average_id_with_confidence()

					if mean_id == truth_id:
						mean_rights += 1
					else:
						false_text += '\n----\n' + str( aux.int_id_to_binary( truth_id ) ) + ' (truth)'
						false_text += ' id=' + str(truth_id) + ' len=' + str( len( path_sample.detections ) )
						false_text += '\n' + str( path_sample.ids_sum / path_sample.ids_count ) + ' (determined)'
						false_text += '\n' + str( aux.int_id_to_binary( mean_id ) ) + ' (determined rounded)'
						#for d in path_sample.detections.values():
						#	false_text += '\n' + str( aux.int_id_to_binary( d.decoded_mean ) )

					if mean_mean_id == truth_id:
						mean_mean_rights += 1

					if weigneig_id == truth_id:
						weigneig_rights += 1

					if saliency_id == truth_id:
						saliency_rights += 1
					#else:
					#	false_text += '\n----\n' + str( aux.int_id_to_binary( truth_id ) ) + ' (truth)'
					#	false_text += ' id=' + str(truth_id) + ' len=' + str( len( path_sample.detections ) )
					#	false_text += '\n' + str( path_sample.ids_sum_saliency / path_sample.saliency_count ) + ' (determined)'
					#	false_text += '\n' + str( aux.int_id_to_binary( saliency_id ) ) + ' (determined rounded)'

					if confidence_id == truth_id:
						confidence_rights += 1

		print 'validation finished'
		print '--------------------------------'

		result_text = 'Average ID Results:\n'
		result_text += 'of ' + str(total) + ' paths:\n'
		result_text += ( "%.3f" % (mean_rights*1.0/total       )) + ' correct through determine_average_id_by_mean()\n'
		result_text += ( "%.3f" % (mean_mean_rights*1.0/total  )) + ' correct through determine_average_id_by_mean_mean()\n'
		result_text += ( "%.3f" % (weigneig_rights*1.0/total   )) + ' correct through determine_average_id_by_weighted_neighbourhood()\n'
		result_text += ( "%.3f" % (saliency_rights*1.0/total   )) + ' correct through determine_average_id_with_saliency()\n\n'
		result_text += ( "%.3f" % (confidence_rights*1.0/total )) + ' correct through determine_average_id_with_confidence()\n\n'
		#result_text += false_text

		print result_text