def update_hrtsdata(site_id, datatime, hrinterval, data): _id = site_id + '-pressure-' + str(str(datatime)[0:7] + '000000') str_data = '' for i in data: str_data = str_data + tool.base62_encode(i) condition = {'_id': _id} hrtsda = collection.find_one(condition) if hrtsda: hrtsda['lastmod'] = datetime.datetime.now() hrtsda['values'][str(datatime)] = str_data collection.update(condition, hrtsda) log.info('hrtsdata_import ' + _id + ' ' + str(datatime) + 'success') else: hrtsda = dict() values = dict() hrtsda['_id'] = _id hrtsda['timestamp'] = datetime.datetime.fromtimestamp( int(str(datatime)[0:7] + '000')) hrtsda['lastmod'] = datetime.datetime.now() hrtsda['created'] = datetime.datetime.now() hrtsda['converge_rate'] = converge_rate hrtsda['hrinterval'] = hrinterval values[str(datatime)] = str_data hrtsda['values'] = values collection.insert(hrtsda) log.info('hrtsdata_import ' + _id + ' ' + str(datatime) + 'success')
def get_mongo_client(): global MONGO_CLIENT if MONGO_CLIENT is None: log.info("MONGO_CLIENT not defined, initialized MongoDB Client.") mongo_server = config.get_config('Database', 'mongodb.client') MONGO_CLIENT = pymongo.MongoClient(mongo_server) return MONGO_CLIENT
def handle(self): while True: data = [] try: data = self.request.recv(1024 * 2) if len(data) < 17: continue log.info("{} send_len: ".format(self.client_address[0]) + str(len(data))) # tmp_dir = os.path.join(data_dir, 'tmp') # os.makedirs(tmp_dir, 0o775, True) # tmp_file = os.path.join(tmp_dir, datetime.datetime.now().strftime('%Y%m%d%H%M%S%f') # + '.bin') # with open(tmp_file, 'wb') as f: # f.write(data) # # rtu_data = parser.parse(tmp_file) rtu_data = parser.parse_by_bytes(data) log.info( "rtu meta: ip: " + self.client_address[0] + ", s_id: " + rtu_data['site_id'] + ", read_t: " + rtu_data['read_time'] + ", send_t: " + rtu_data['send_time'] # + ", JZXS: "+rtu_data['JZXS'] # + ", k: " + str(rtu_data['k']) # + ", b: " + str(rtu_data['b']) + ", len: " + str(len(rtu_data['data'])) + ", reading: " + str(rtu_data['data'][0])) if not rtu_data['site_id'].startswith("00"): raise Exception("Invalid site id: " + rtu_data['site_id']) out_file = open( os.path.join( data_dir, rtu_data['site_id'] + "_" + rtu_data['read_time'] + ".json"), "w+") out_file.write(json.dumps(rtu_data)) out_file.close() ack = parser.generate_ack(data, rtu_data) log.info("ack: " + ack.hex()) self.request.sendall(ack) except ConnectionResetError as e: print("err ", e) log.info("ConnectionResetError:" + str(e)) except Exception as exception: error_fname = datetime.datetime.now().strftime( '%Y%m%d%H%M%S%f') + '.bin' log.info("Error data (" + error_fname + ") Exception:" + str(exception)) write_error_data(data, error_fname) exstr = traceback.format_exc() print(exstr)
def load_file(file, analyzer): file_path = os.path.join(root_dir, sub_dir, file) try: fobj = open(file_path, 'r') except IOError as e: log.info('Fail to open file: ' + file_path, e) else: for eachLine in fobj: analyzer.process_line(eachLine) fobj.close()
def move_file(srcfile, dstfile): if not os.path.isfile(srcfile): log.info("%s not exist!" % (srcfile)) else: fpath = os.path.dirname(srcfile) + '/' + dstfile + '' filenewpath = fpath + '/' + os.path.basename(srcfile) if os.path.exists(filenewpath): os.remove(filenewpath) if not os.path.exists(fpath): os.makedirs(fpath) # 创建路径 shutil.move(srcfile, fpath) # 移动文件 log.info("move %s -> %s" % (srcfile, dstfile))
def data_calibrate(data, k, b): log.info("k and b velaue" + str(k) + ' ' + str(b)) new_data = list() for i in data: if i == 0: new_data.append(0) else: # 电压值 v = int(i) * k + b # 电压转水柱 d = (v * 5 - 2.5) * 10.0025607225 if 0 <= d <= 200: new_data.append(d) return new_data
def importer(): dirlist = os.listdir(path) time.sleep(1) log.info("importer start") for dir_entry in dirlist: dir_entry_path = os.path.join(path, dir_entry) print('import file', dir_entry_path) if os.path.isfile(dir_entry_path) and dir_entry_path.endswith('.json'): try: with open(dir_entry_path, 'r') as my_file: mode = os.stat(dir_entry_path).st_mode; print(mode) if not os.access(dir_entry_path, os.F_OK) \ or not os.access(dir_entry_path, os.W_OK) \ or not os.access(dir_entry_path, os.R_OK): continue load_dict = json.load(my_file) datatime = int(time.mktime(time.strptime(load_dict['read_time'], datafomat)) * 1000) siteid = load_dict['site_id'] condition = {'device_ref': siteid[-8:]} site = collection.find_one(condition) if site: site_id = site['_id'] log.info('site_id' + str(site_id)) hrinterval = 1000 / (int(load_dict['frequency'][-2:], 16)) log.info('hrinterval' + str(hrinterval)) data = data_calibrate(load_dict['data'], load_dict['k'], load_dict['b']) hrtsda.update_hrtsdata(site_id, datatime, hrinterval, data) tsda.update_tsdata(site_id, datatime, data) else: log.info(siteid + 'site can not be found') raise Exception(siteid + 'site can not be found') move_file(dir_entry_path, 'archive') except Exception as e: move_file(dir_entry_path, 'error') log.info("importer error: " + str(e)) traceback.print_exc()
datatime = int(time.mktime(time.strptime(load_dict['read_time'], datafomat)) * 1000) siteid = load_dict['site_id'] condition = {'device_ref': siteid[-8:]} site = collection.find_one(condition) if site: site_id = site['_id'] log.info('site_id' + str(site_id)) hrinterval = 1000 / (int(load_dict['frequency'][-2:], 16)) log.info('hrinterval' + str(hrinterval)) data = data_calibrate(load_dict['data'], load_dict['k'], load_dict['b']) hrtsda.update_hrtsdata(site_id, datatime, hrinterval, data) tsda.update_tsdata(site_id, datatime, data) else: log.info(siteid + 'site can not be found') raise Exception(siteid + 'site can not be found') move_file(dir_entry_path, 'archive') except Exception as e: move_file(dir_entry_path, 'error') log.info("importer error: " + str(e)) traceback.print_exc() if __name__ == '__main__': while True: try: importer() time.sleep(1) except Exception as e: log.info("importer main error: " + str(e)) traceback.print_exc()