def __init__(self, query_id, topic = None, user = None, condition = None, autocomplete = None, query_text = None, session = None, precision = None): DataRecord.__init__( self, str(query_id) ) HasDocuments.__init__( self ) self.topic = topic self.user = user self.condition = condition self.autocomplete = bool_(autocomplete) self.query_text = query_text self.session = session self.precision = precision self.result_list = QueryResultList(self)
def create_dataset(tfrecord_path, forgd_video_dir, backd_video_dir, split_ratio=TRAIN_TEST_SPLIT_RATIO, frame_size=CNN_FRAME_SIZE, width=CNN_VIDEO_WIDTH, height=CNN_VIDEO_HEIGHT, phase="train", fps=FRAMES_BY_SECOND, max_samples_by_video=MAX_SAMPLES_BY_VIDEO): """ Create tfrecord datasets. """ # Create data record. tfrecord_path = tfrecord_path.replace(".tfrecord", "_%s.tfrecord" % phase) data_record = DataRecord(frame_size, height, width) data_record.open(tfrecord_path, mode="w") # Create generator. generator = sample_generator(forgd_video_dir, backd_video_dir, dataset=phase, split_ratio=split_ratio, frame_size=frame_size, width=width, height=height, fps=fps, max_samples_by_video=max_samples_by_video) # Generate samples. last_sample = False while not last_sample: try: data = next(generator) data_record.write(data) except StopIteration: last_sample = True data_record.close()
def test_df(self): data_record = DataRecord() data_record.cache.append({ ("TP9", "DELTA"): 0, ("TP9", "ALPHA"): 2, }) data_record.cache.append({ ("TP9", "DELTA"): 1, ("TP9", "ALPHA"): 3, }) data_record.cache.append({ ("TP9", "DELTA"): 9, ("TP9", "ALPHA"): 9, }) multi_index = pd.MultiIndex.from_product( iterables=[[c.name for c in utils.Channel], [b.name for b in utils.Band]], names=["channel", "band"]) # cols = list(product( # [c.name for c in utils.Channel], # [b.name for b in utils.Band] )) cols = [("TP9", "DELTA"), ("TP9", "ALPHA")] df = pd.DataFrame.from_items(items=data_record.cache, orient="index", columns=cols) print(df)
def write_individual_record(self, individual): """ Write a data record for an individual: - Arrival date - Wait - Service start date - Service time - Service end date - Blocked - Exit date - Node - Destination - Previous class - Queue size at arrival - Queue size at departure """ record = DataRecord(individual.arrival_date, individual.service_time, individual.service_start_date, individual.exit_date, self.id_number, individual.destination, individual.previous_class, individual.queue_size_at_arrival, individual.queue_size_at_departure) if self.id_number in individual.data_records: individual.data_records[self.id_number].append(record) else: individual.data_records[self.id_number] = [record] individual.arrival_date = False individual.service_time = False individual.service_start_date = False individual.service_end_date = False individual.exit_date = False individual.queue_size_at_arrival = False individual.queue_size_at_departure = False individual.destination = False
def __init__(self): self.data_obj = DataRecord() self.radar_para_obj = RadarParameter() self.udp_manage_obj = UdpManage() self.signal_processing_manage_obj = SignalManage() self.data_trans_manage_obj = DataTransManage() self.udp_manage_obj.get_handle(self) self.data_trans_manage_obj.get_handle(self) self.signal_processing_manage_obj.get_handle(self) self.start_process()
class BatchGenerator: """ Batch generator. """ def __init__(self, dataset, session=None, tfrecord_path=TFRECORD_PATH, frame_size=CNN_FRAME_SIZE, height=CNN_VIDEO_HEIGHT, width=CNN_VIDEO_WIDTH, batch_size=BATCH_SIZE): if session is None: self.session = tf.Session() else: self.session = session if dataset not in tfrecord_path: tfrecord_path = tfrecord_path.replace(".tfrecord", "_%s.tfrecord" % dataset) self.data_record = DataRecord(frame_size, height, width, dataset, batch_size) self.data_record.open(tfrecord_path) self.next = self.data_record.get_next() def get_next(self): """ Generate batches. """ while True: try: return self.session.run(self.next) except Exception as e: print(str(e)) self.data_record.reset(self.session) def close(self): """ Close session. """ self.session.close()
def __init__(self, dataset, session=None, tfrecord_path=TFRECORD_PATH, frame_size=CNN_FRAME_SIZE, height=CNN_VIDEO_HEIGHT, width=CNN_VIDEO_WIDTH, batch_size=BATCH_SIZE): if session is None: self.session = tf.Session() else: self.session = session if dataset not in tfrecord_path: tfrecord_path = tfrecord_path.replace(".tfrecord", "_%s.tfrecord" % dataset) self.data_record = DataRecord(frame_size, height, width, dataset, batch_size) self.data_record.open(tfrecord_path) self.next = self.data_record.get_next()
def __init__(self, timestamp, condition, session, action_type, query, action_parameters, serp_page_num=None): DataRecord.__init__(self, session.record_id + '-' + str(timestamp)) self.timestamp = timestamp self.session = session self.topic = self.session.topic self.condition = condition self.action_type = action_type self.query = query self.bare_action_parameters = action_parameters self.serp_page_num = serp_page_num self.__parse_action_parameters() self.__update_session() self.__update_global_stats() self.__index()
def write_individual_record(self, individual): """ Write a data record for an individual: - Arrival date - Wait - Service start date - Service time - Service end date - Blocked - Exit date An example showing the data records written; can only write records once an exit date has been determined. >>> from simulation import Simulation >>> from individual import Individual >>> seed(7) >>> Q = Simulation('datafortesting/logs_test_for_simulation/') >>> N = Q.transitive_nodes[0] >>> ind = Individual(6) >>> N.accept(ind, 3) >>> ind.service_start_date = 3.5 >>> ind.exit_date = 9 >>> N.write_individual_record(ind) >>> ind.data_records[1][0].arrival_date 3 >>> ind.data_records[1][0].wait 0.5 >>> ind.data_records[1][0].service_start_date 3.5 >>> round(ind.data_records[1][0].service_time, 5) 0.0559 >>> round(ind.data_records[1][0].service_end_date, 5) 3.5559 >>> round(ind.data_records[1][0].blocked, 5) 5.4441 >>> ind.data_records[1][0].exit_date 9 """ record = DataRecord(individual.arrival_date, individual.service_time, individual.service_start_date, individual.exit_date, self.id_number) if self.id_number in individual.data_records: individual.data_records[self.id_number].append(record) else: individual.data_records[self.id_number] = [record] individual.arrival_date = False individual.service_time = False individual.service_start_date = False individual.service_end_date = False individual.exit_date = False
def __init__(self, topic, document, relevance_level): DataRecord.__init__(self, topic.record_id + '-' + document.record_id) self.topic = topic self.document = document self.relevance_level = uint8(relevance_level) self.document.add_relevance( self )
def __init__(self, condition_id): DataRecord.__init__( self, uint8(condition_id) )
def __init__(self, user_id): DataRecord.__init__(self, user_id)
def __init__(self, condition_id): DataRecord.__init__(self, uint8(condition_id))
def __init__(self, docid): DataRecord.__init__( self, docid ) self.relevances = {}
def __init__(self, user_id): DataRecord.__init__( self, user_id )
def __init__(self, topic_identifier): DataRecord.__init__(self, uint16(topic_identifier)) self.relevances = []
def __init__(self, topic_identifier): DataRecord.__init__( self, uint16(topic_identifier) ) self.relevances = []
def __init__(self, topic, document, relevance_level): DataRecord.__init__(self, topic.record_id + '-' + document.record_id) self.topic = topic self.document = document self.relevance_level = uint8(relevance_level) self.document.add_relevance(self)
def __init__(self, session_id, user, topic, condition): DataRecord.__init__(self, session_id) ActsAsSession.__init__(self, user, topic, condition)