def get_messages(self, cond): log.debug('Prepare to get messages.') if cond: data = json.dumps(cond) else: data = {} return requests.post(self.search_path, data=data)
def __init__(self, index_prefix, doc_type): self.uri = 'http://localhost:9200/' if self.uri.strip()[-1] != '/': self.uri += '/' self.doc_type = doc_type self.index_prefix = index_prefix self.drop_data = False self.index = '_v1' self.id_field = 'id' self.search_path = '%s%s%s/%s/_search' % (self.uri, self.index_prefix, self.index, self.doc_type) self.scan_path = '%s%s%s/%s/_search?search_type=scan&scroll=1m' % (self.uri, self.index_prefix, self.index, self.doc_type) self.doc_path = '%s%s%s/%s/' % (self.uri, self.index_prefix, self.index, self.doc_type) log.debug('ElasticSearch Connection initialized successfully!')
def get_mkt_data_by_id(self, id): id =test_id resp = self.es_reader.get_message_by_id(id) if resp.status_code != 200: log.debug("Error, can't get mkt data from es!") source = json.loads(resp.text) if not source['found']: log.debug("Error, can't get mkt data from es!") source = source['_source'] return source
def get_mkt_data_by_id(self, id): id = test_id resp = self.es_reader.get_message_by_id(id) if resp.status_code != 200: log.debug("Error, can't get mkt data from es!") source = json.loads(resp.text) if not source['found']: log.debug("Error, can't get mkt data from es!") source = source['_source'] return source
def __init__(self, index_prefix, doc_type): self.uri = 'http://localhost:9200/' if self.uri.strip()[-1] != '/': self.uri += '/' self.doc_type = doc_type self.index_prefix = index_prefix self.drop_data = False self.index = '_v1' self.id_field = 'id' self.search_path = '%s%s%s/%s/_search' % (self.uri, self.index_prefix, self.index, self.doc_type) self.scan_path = '%s%s%s/%s/_search?search_type=scan&scroll=1m' % ( self.uri, self.index_prefix, self.index, self.doc_type) self.doc_path = '%s%s%s/%s/' % (self.uri, self.index_prefix, self.index, self.doc_type) log.debug('ElasticSearch Connection initialized successfully!')
def get_message_by_id(self, id): log.debug('Prepare to get messages by id.') path = self.doc_path + id log.debug('Search path:' + path) res = requests.get(path) log.debug('Msg get with response code: %s' % res.status_code) return res
def get_daily_mkt_data_by_time_order(self, symbol, date, bar='10m'): term1 = {'term': {'date': date}} term2 = {'term': {'bar': bar}} must = [term1, term2] filter = {'filter': {'bool': {'must': must}}} query = {'query': {'filtered': filter}, 'size' : 1000} query["sort"] = { "time": { "order": "asc" }} resp = self.es_reader.get_messages(query) if resp.status_code != 200: log.debug("Error, can't get mkt data from es!") hit = json.loads(resp.text) if hit['timed_out']: log.debug("Error, time out in get mkt data from es!") hits = hit['hits']['hits'] sources= [] for hit in hits: sources.append(hit['_source']) return sources
def get_daily_mkt_data_by_time_order(self, symbol, date, bar='10m'): term1 = {'term': {'date': date}} term2 = {'term': {'bar': bar}} must = [term1, term2] filter = {'filter': {'bool': {'must': must}}} query = {'query': {'filtered': filter}, 'size': 1000} query["sort"] = {"time": {"order": "asc"}} resp = self.es_reader.get_messages(query) if resp.status_code != 200: log.debug("Error, can't get mkt data from es!") hit = json.loads(resp.text) if hit['timed_out']: log.debug("Error, time out in get mkt data from es!") hits = hit['hits']['hits'] sources = [] for hit in hits: sources.append(hit['_source']) return sources
def get_message_by_id(self, id): log.debug('Prepare to get messages by id.') path = self.doc_path + id log.debug('Search path:' + path) res = requests.get(path) log.debug('Msg get with response code: %s' % res.status_code) return res # def scan_messages(self, cond): # log.debug('Prepare to scan messages.') # if cond: # data = json.dumps(cond) # else: # data = {} # return requests.post(self.scan_path, data=data) # # def post_messages(self, msg, id): # LOG.debug('Prepare to post messages.') # if self.drop_data: # return 204 # else: # index = self.index_strategy.get_index() # path = '%s%s%s/%s/' % (self.uri, self.index_prefix, # index, self.doc_type) # # res = requests.post(path + id, data=msg) # LOG.debug('Msg post with response code: %s' % res.status_code) # return res.status_code # # def put_messages(self, msg, id): # LOG.debug('Prepare to put messages.') # if self.drop_data: # return 204 # else: # index = self.index_strategy.get_index() # path = '%s%s%s/%s/' % (self.uri, self.index_prefix, # index, self.doc_type) # # res = requests.put(path + id, data=msg) # LOG.debug('Msg put with response code: %s' % res.status_code) # return res.status_code # # def del_messages(self, id): # LOG.debug('Prepare to delete messages.') # if self.drop_data: # return 204 # else: # index = self.index_strategy.get_index() # path = '%s%s%s/%s/' % (self.uri, self.index_prefix, # index, self.doc_type) # # res = requests.delete(path + id) # LOG.debug('Msg delete with response code: %s' % res.status_code) # return res.status_code
def update(self, obj): log.debug('Prepare to update obj.') if self.drop_data: return else: # figure out id situation _id = '' if self.id_field: _id = obj.get(self.id_field) if not _id: log.debug('Msg does not have required id field %s' % self.id_field) return 400 # index may change over the time, it has to be called for each # request path = '%s%s%s/%s/%s' % (self.uri, self.index_prefix, self.index, self.doc_type, _id) msg = json.dumps(obj) res = requests.put(path, data=msg) log.debug('Msg post target=%s' % path) log.debug('Msg posted with response code: %s' % res.status_code) return res.status_code
def run_detection(conf, record): map_infos = {} for key in conf['map_infos']: map_infos[int(key)] = conf['map_infos'][key] car_ids = conf['car_ids'] obs_ids = conf['obs_ids'] img_size = tuple(conf['img_size']) cams = [] for cam_name, cam_info in conf['cam_infos'].items(): cam = Camera( name=cam_name, rtsp_link=cam_info['rtsp_link'], map_positions=cam_info['map_positions'], img_size=img_size, calibration=cam_info['calibration'], flip=cam_info['flip'], ) cams.append(cam) view_size = tuple(conf['view_size']) r_scale = conf['resize_scale'] udp_sock = socket.socket(family=socket.AF_INET, type=socket.SOCK_DGRAM) server_address = tuple(conf['server_address']) frame_idx = 0 while True: final_obj_abs_coords = defaultdict(lambda: []) s = time.time() for cam in cams: log.debug('') img = cam.read_last_frame() if img is None: continue detected_markers, contours = detect_markers(img, scale=1) # img = draw_contours(img, contours) img = draw_marker_boxes(img, detected_markers) map_markers, car_markers, obs_markers = \ classify_markers(detected_markers, cam.map_positions, car_ids, obs_ids) map_coords = get_map_coords(map_infos, map_markers) if map_coords is None: if cam.last_map_pix is None: log.info('Map markers must be detected') show_img(cam.name, img, view_size) continue else: map_coords = cam.last_map_pix else: cam.last_map_pix = map_coords obj_abs_coords = get_obj_absolute_coords( [*car_markers, *obs_markers], *map_coords) set_final_obj_abs_coords_per_cam(final_obj_abs_coords, obj_abs_coords, img) show_img(cam.name, img, view_size) if record: cam.record_list.append(img) if cv2.waitKey(1) & 0xFF == ord('q'): break set_final_obj_abs_coords(final_obj_abs_coords) send_data(final_obj_abs_coords, car_ids, obs_ids, udp_sock, server_address) log.debug(f'Final time {time.time()-s:.3f} sec') cv2.destroyAllWindows() if record: for cam in cams: out = cv2.VideoWriter(f'{cam.name}.mkv', cv2.VideoWriter_fourcc(*'H264'), 8, (1920, 1080)) log.info(f'{cam.name} video save') for img in cam.record_list: out.write(img) out.release()