def process(self): try: if self.preData: self.enqueueData() return input_data = self.input.get_nowait() except QueueEmpty: time.sleep(QUEUE_EMPTY_SLEEP_TIME) return if not input_data.has_key('imageData') or not input_data['imageData']: down_start = time.time() image = download(input_data['ImageURL']) down_cost = round((time.time() - down_start) * 1000, 2) if image is None: increase(self.stats, 'down_get_false_total', self.threadId) logging.error( 'image download error. id: %s, url[%s], use time: %sms.' % (input_data['id'], input_data['ImageURL'], down_cost)) return image_size = round(sys.getsizeof(image) / 1024, 2) if image_size <= 10: increase(self.stats, 'down_img_false_total', self.threadId) logging.error( 'image binary size[%sK] less than 10K, url[%s].' % (image_size, input_data['ImageURL'])) return input_data['imageData'] = base64.b64encode(image) self.preData = input_data logging.info( 'image downloaded! id: %s, url: %s, down time: %sms, image size: %sK.' % (input_data['id'], input_data['ImageURL'], down_cost, image_size))
def process(self): if self.preData: self.enqueueData() return self.now = now() next_time = time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime( time.mktime( time.strptime(self.current_time, '%Y-%m-%d %H:%M:%S')) + READ_ONCE_WALK_TIME)) if seconds_between(next_time, self.now) <= READ_RESERVE_TIME: time.sleep(READ_RESERVE_SLEEP_TIME) return exec_sql = self.proc_setting['setting']['query_temp'] % ( self.current_time, next_time) query_start = time.time() try: connection = self.new_connection() if connection == None: time.sleep(READ_CONNECT_RETRY_TIME) return cursor = connection.cursor() cursor.execute(exec_sql) rows = cursor.fetchall() connection.close() except Exception as e: query_cost = round((time.time() - query_start) * 1000, 2) logging.error( 'err occurs when exec sql: %s, use time: %sms, sql: %s' % (e, query_cost, exec_sql)) return query_cost = round((time.time() - query_start) * 1000, 2) logging.info('read time: %sms, result.size: %s <== sql: %s' % (query_cost, rows and len(rows), exec_sql)) if rows is not None and len(rows) != 0: for row in rows: data = data_mapping(row) if data['ImageURL'] is None: increase(self.stats, 'read_false_total', self.threadId) continue if TOLL_FILTER_TYPE == 1: if data['deviceId'] not in TOLLGATE_IDS: continue elif TOLL_FILTER_TYPE == 2: if data['deviceId'] in TOLLGATE_IDS: continue self.preData.append(data) self.current_time = next_time try: self.config.set('current_time', self.current_time) self.config.set('now', self.now) self.config.save() except Exception as e: self.current_time = self.config.get('current_time', now()) logging.error("Failed to update config file %s: %s." % (self.config.filepath, e)) return
def process(self): if self.preData: self.enqueueData() return self.now = now() exec_sql = self.proc_setting['setting'][ 'query_temp'] % self.current_index query_start = time.time() try: connection = self.new_connection() if connection == None: time.sleep(READ_CONNECT_RETRY_TIME) return cursor = connection.cursor() cursor.execute(exec_sql) rows = cursor.fetchall() connection.close() except Exception as e: query_cost = round((time.time() - query_start) * 1000, 2) logging.error( 'err occurs when exec sql: %s, use time: %sms, sql: %s' % (e, query_cost, exec_sql)) return query_cost = round((time.time() - query_start) * 1000, 2) logging.info('read time: %sms, result.size: %s <== sql: %s' % (query_cost, rows and len(rows), exec_sql)) if rows is not None and len(rows) != 0: for row in rows: data = data_mapping(row) if data['id'] > self.current_index: self.current_index = data['id'] if data['snapshotTime'] > to_mstimestamp(self.current_time): self.current_time = to_string(data['snapshotTime']) if data['ImageURL'] is None: increase(self.stats, 'read_false_total', self.threadId) # logging.error('Data image url error: %s' % str(row)) continue if TOLL_FILTER_TYPE == 1: if data['deviceId'] not in TOLLGATE_IDS: continue elif TOLL_FILTER_TYPE == 2: if data['deviceId'] in TOLLGATE_IDS: continue self.preData.append(data) else: time.sleep(READ_EMPTY_SLEEP_TIME) return try: self.config.set('current_index', self.current_index) self.config.set('current_time', self.current_time) self.config.set('now', self.now) self.config.save() except Exception as e: self.current_index = self.config.get('current_id') self.current_time = self.config.get('current_time', now()) logging.error("Failed to update config file %s: %s." % (self.config.filepath, e))
def enqueueData(self): one = self.preData.popleft() try: self.output.put(one, block=False) logging.info('enqueue new data: %s, %s' % (one['deviceId'], one['ImageURL'])) increase(self.stats, 'read_success_total', self.threadId) except QueueFull: self.preData.appendleft(one)
def enqueueData(self): try: self.output.put(self.preData, block=False) increase(self.stats, 'down_success_total', self.threadId) logging.info('enqueue new data: %s, %s.' % (self.preData['id'], self.preData['ImageURL'])) self.preData = None except QueueFull: pass
def fetchmany(self): fetch_start = time.time() connection = self.new_connection() if connection != None: try: channel = connection.channel( channel_number=int(self.name.split('-')[1])) channel.queue_declare( queue=self.proc_setting['setting']['que_name'], passive=True) while True: if self.exit.is_set(): connection.close() break method, properties, body = channel.basic_get( self.proc_setting['setting']['que_name']) if method is None: connection.close() if self.preData: fetch_cost = round( (time.time() - fetch_start) * 1000, 2) logging.debug('fetch %s msg use %sms' % (len(self.preData), fetch_cost)) time.sleep(READ_EMPTY_SLEEP_TIME) break data = data_mapping(json.loads(body)) channel.basic_ack(method.delivery_tag) if data['ImageURL'] is None: increase(self.stats, 'read_false_total', self.threadId) else: if TOLL_FILTER_TYPE == 1: if data['deviceId'] in TOLLGATE_IDS: self.preData.append(data) elif TOLL_FILTER_TYPE == 2: if data['deviceId'] not in TOLLGATE_IDS: self.preData.append(data) else: self.preData.append(data) if method.delivery_tag == MQ_ONCE_COSUME_NUM: connection.close() fetch_cost = round((time.time() - fetch_start) * 1000, 2) logging.debug('fetch %s msg use %sms' % (len(self.preData), fetch_cost)) break except Exception: connection.close() logging.error(traceback.format_exc())
def process(self): if self.preData: self.enqueueData() return self.now = now() headers = { 'Accept': 'application/json', 'Content-Type': 'application/json;charset=UTF-8', 'authorization': self.proc_setting['setting']['authcode'] } query_data = { 'startId': self.current_index, 'endId': (self.current_index + self.proc_setting['setting']['pagesize']), 'page': { 'pageNo': 1, 'pageSize': self.proc_setting['setting']['pagesize'] } } query_url = self.proc_setting['setting']['resturl'] + '?q=%s' % ( json.dumps(query_data)) session = requests.Session() try: response = session.get(url=query_url, headers=headers) if response and response.status_code == 200: res_data = json.loads(response.content) if res_data['code'] == 100: rows = res_data['data']['rows'] logging.info('result.size: %s, startId: %s.' % (rows and len(rows), self.current_index)) if rows is not None and len(rows) > 1: for row in rows[1:]: data = data_mapping(row) if data['id'] > self.current_index: self.current_index = data['id'] if data['snapshotTime'] > to_mstimestamp( self.current_time): self.current_time = to_string( data['snapshotTime']) if data['ImageURL'] is None: increase(self.stats, 'read_false_total', self.threadId) # logging.error('Data image url error: %s' % str(row)) continue if TOLL_FILTER_TYPE == 1: if data['deviceId'] not in TOLLGATE_IDS: continue elif TOLL_FILTER_TYPE == 2: if data['deviceId'] in TOLLGATE_IDS: continue self.preData.append(data) # self.output.put(data, block=True) # logging.info('enqueue new data: %s, %s' % (data['id'], data['ImageURL'])) else: time.sleep(READ_EMPTY_SLEEP_TIME) return else: logging.error( 'exception occurs when access rest api, url: %s, response err: %s' % (query_url, res_data['msg'])) else: logging.error( 'exception occurs when access rest api, url: %s, rest api may stopped!' % query_url) except Exception as e: logging.error( 'exception occurs when access rest api, url: %s, err: \n%s' % (query_url, traceback.format_exc())) return try: self.config.set('current_index', self.current_index) self.config.set('current_time', self.current_time) self.config.set('now', self.now) self.config.save() except Exception as e: logging.error("Failed to update config file %s: %s." % (self.config.filepath, e)) self.current_index = int(self.config.get('current_index', 0)) self.current_time = self.config.get('current_time', now()) self.now = self.config.get('now')
def process(self): if self.preData: self.enqueueData() return self.now = now() next_time = time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime( time.mktime( time.strptime(self.current_time, '%Y-%m-%d %H:%M:%S')) + READ_ONCE_WALK_TIME)) if seconds_between(next_time, self.now) <= READ_RESERVE_TIME: time.sleep(READ_RESERVE_SLEEP_TIME) return headers = { 'Accept': 'application/json', 'Content-Type': 'application/json;charset=UTF-8', 'authorization': self.proc_setting['setting']['authcode'] } query_data = { 'startDate': self.current_time, 'endDate': next_time, 'page': { 'pageNo': 1, 'pageSize': self.proc_setting['setting']['pagesize'] } } query_url = self.proc_setting['setting']['resturl'] + '?q=%s' % ( json.dumps(query_data)) session = requests.Session() try: response = session.get(url=query_url, headers=headers) if response and response.status_code == 200: res_data = json.loads(response.content) if res_data['code'] == 100: rows = res_data['data']['rows'] logging.info('result.size: %s, startId: %s.' % (rows and len(rows), self.current_time)) if rows is not None and len(rows) > 0: repeat_list = [] for row in rows: data = data_mapping(row) if data['snapshotTime'] == next_time: repeat_list.append(data['id']) if data['ImageURL'] is None: increase(self.stats, 'read_false_total', self.threadId) # logging.error('Data image url error: %s' % str(row)) continue if TOLL_FILTER_TYPE == 1: if data['deviceId'] not in TOLLGATE_IDS: continue elif TOLL_FILTER_TYPE == 2: if data['deviceId'] in TOLLGATE_IDS: continue self.preData.append(data) # self.output.put(data, block=True) # logging.info('enqueue new data: %s, %s' % (data['id'], data['ImageURL'])) self.repeatData = repeat_list logging.debug('new repeat list: %s.' % str(self.repeatData)) self.current_time = next_time else: logging.error( 'exception occurs when access rest api, url: %s, response err: %s' % (query_url, res_data['msg'])) else: logging.error( 'exception occurs when access rest api, url: %s, rest api may stopped!' % query_url) except Exception: logging.error( 'exception occurs when access rest api, url: \n%s, err: %s' % (query_url, traceback.format_exc())) return try: self.config.set('current_time', self.current_time) self.config.set('now', self.now) self.config.save() except Exception as e: logging.error("Failed to update config file %s: %s." % (self.config.filepath, e)) self.current_time = self.config.get('current_time', now()) self.now = self.config.get('now')
def process(self): try: input_data = self.input.get_nowait() input_data['imageUrl'] = input_data['IMageURL'] except QueueEmpty: time.sleep(QUEUE_EMPTY_SLEEP_TIME) return if RUNNING_ENVIRON: write_start = time.time() post_result = post(self.service_url, param=input_data) write_cost = round((time.time() - write_start) * 1000, 2) if post_result is None: increase(self.stats, 'write_post_false_total', self.threadId) return result = json.loads(post_result.text) if not result['code'] == '0': if 'tollgate' in result['message']: increase(self.stats, 'write_toll_false_total', self.threadId) else: increase(self.stats, 'write_result_false_total', self.threadId) del input_data['imageData'] logging.error( 'call vehicle service error. data[%s], result[%s], use time: %sms.' % (json.dumps(input_data), result['message'], write_cost)) return increase(self.stats, 'write_success_total', self.threadId) logging.info( 'new data send to vehicle service done! result: %s, id: %s, write time: %sms.' % ('True', input_data['id'], write_cost)) else: seed = 1 if randint(1, 10) <= 9 else 0 if seed: increase(self.stats, 'write_success_total', self.threadId) logging.info( 'new data send to vehicle service done! result: %s, id: %s.' % ('True', input_data['id'])) else: increase(self.stats, 'write_post_false_total', self.threadId) logging.info( 'new data send to vehicle service done! result: %s, id: %s.' % ('False', input_data['id']))
def process(self): if self.preData: self.enqueueData() return self.now = now() if seconds_between(self.current_time, self.now) <= READ_RESERVE_TIME: time.sleep(READ_RESERVE_SLEEP_TIME) return exec_sql = self.proc_setting['setting'][ 'query_temp'] % self.current_time query_start = time.time() try: connection = self.new_connection() if connection == None: time.sleep(READ_CONNECT_RETRY_TIME) return cursor = connection.cursor() cursor.execute(exec_sql) rows = cursor.fetchall() connection.close() except Exception as e: query_cost = round((time.time() - query_start) * 1000, 2) logging.error( 'err occurs when exec sql: %s, use time: %sms, sql: %s' % (e, query_cost, exec_sql)) return query_cost = round((time.time() - query_start) * 1000, 2) logging.info('read time: %sms, result.size: %s <== sql: %s' % (query_cost, rows and len(rows), exec_sql)) if rows is not None and len(rows) != 0: last_data = data_mapping(rows[-1]) # 此次查询数据的所有过车时间都和上一次查询数据的最大过车时间相同 if last_data['snapshotTime'] == to_mstimestamp(self.current_time): self.current_time = time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime( time.mktime( time.strptime(self.current_time, '%Y-%m-%d %H:%M:%S')) + 1)) repeat_time = self.current_time else: repeat_time = last_data['snapshotTime'] repeat_list = [] for row in rows: data = data_mapping(row) if data['snapshotTime'] > to_mstimestamp(self.current_time): self.current_time = to_string(data['snapshotTime']) if data['id'] in self.repeatData: logging.error('repeat data: %s' % data['id']) continue if data['snapshotTime'] == repeat_time: repeat_list.append(data['id']) if data['ImageURL'] is None: increase(self.stats, 'read_false_total', self.threadId) # logging.error('Data image url error: %s' % str(row)) continue if TOLL_FILTER_TYPE == 1: if data['deviceId'] not in TOLLGATE_IDS: continue elif TOLL_FILTER_TYPE == 2: if data['deviceId'] in TOLLGATE_IDS: continue self.preData.append(data) self.repeatData = repeat_list logging.debug('repeat time: %s, new repeat list: %s.' % (to_string(repeat_time), str(self.repeatData))) else: time.sleep(READ_EMPTY_SLEEP_TIME) return try: self.config.set('current_time', self.current_time) self.config.set('now', self.now) self.config.save() except Exception as e: self.current_time = self.config.get('current_time', now()) logging.debug("Failed to update config file %s: %s." % (self.config.filepath, e)) return
def process(self): if self.preData: self.enqueueData() return if not self.regist_stat: self.register() return self.now = now() next_time = time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime( time.mktime( time.strptime(self.current_time, '%Y-%m-%d %H:%M:%S')) + READ_ONCE_WALK_TIME)) if seconds_between(next_time, self.now) <= READ_RESERVE_TIME: time.sleep(READ_RESERVE_SLEEP_TIME) return headers = { 'Content-Type': 'application/json;charset=UTF-8', 'User-Identify': self.proc_setting['setting']['identify'] } query_param = '''/VIID/MotorVehicles?RecordStartNo=%s&PageRecordNum=%s&(MotorVehicle.CreateTime BETWEEN '%s' and '%s')&(Sort = MotorVehicle.CreateTime)''' % ( self.current_startno, self.proc_setting['setting']['pagesize'], self.current_time, next_time) query_url = self.proc_setting['setting'][ 'resturl'] + query_param.replace(' ', '%20') session = requests.Session() try: response = session.get(url=query_url, headers=headers) if response and response.status_code == 200: res_data = json.loads(response.content) if isinstance(res_data, dict): if res_data.has_key('ResponseStatusObject') and res_data[ 'ResponseStatusObject']['StatusCode'] == 4: self.regist_stat = False return else: logging.error( 'huazun rest api err, please call data_aceess_addmin' ) logging.error(response.content) return else: if self.current_startno <= res_data[0][ 'MotorVehiclesListObject']['Pages']: logging.info( 'result.size: %s, StartDateTime: %s, RecordStartNo: %s' % (res_data[0]['MotorVehiclesListObject'] ['PageRecordNum'], self.current_time, self.current_startno)) logging.debug( 'RecordStartNo:%s,PageRecordNum:%s,MaxNumRecordReturn:%s,Offset:%s,Pages:%s' % (res_data[0]['MotorVehiclesListObject'] ['RecordStartNo'], res_data[0] ['MotorVehiclesListObject']['PageRecordNum'], res_data[0]['MotorVehiclesListObject'] ['MaxNumRecordReturn'], res_data[0]['MotorVehiclesListObject']['Offset'], res_data[0]['MotorVehiclesListObject']['Pages'])) for row in res_data[0]['MotorVehiclesListObject'][ 'MotorVehiclesObject']: data = data_mapping(row) if data['ImageURL'] is None: increase(self.stats, 'read_false_total', self.threadId) # logging.error('Data image url error: %s' % str(row)) continue if data['deviceId'] is None: increase(self.stats, 'read_false_total', self.threadId) # logging.error('Data image url error: %s' % str(row)) continue if TOLL_FILTER_TYPE == 1: if data['deviceId'] not in TOLLGATE_IDS: continue elif TOLL_FILTER_TYPE == 2: if data['deviceId'] in TOLLGATE_IDS: continue self.preData.append(data) self.current_startno += 1 else: self.current_startno = 1 self.current_time = next_time else: logging.error( 'exception occurs when access rest api, url: %s, rest api may stopped!' % query_url) except Exception: logging.error( 'exception occurs when access rest api, url: \n%s, err: %s' % (query_url, traceback.format_exc())) return try: self.config.set('current_time', self.current_time) self.config.set('current_startno', self.current_startno) self.config.set('now', self.now) self.config.save() except Exception as e: logging.error("Failed to update config file %s: %s." % (self.config.filepath, e)) self.current_time = self.config.get('current_time', now()) self.current_startno = self.config.get('current_startno', 1) self.now = self.config.get('now')