def dataflow_remote_run(sensor_id): try: f = open("/opt/foxconn/mock_data/data.json", "r", encoding="utf-8") full_data = json.loads(f.read()) data = full_data[:2048] created = int(time.time()*1000) model = model_util() print(json.dumps(model.call_model_test(sensor_id, data, created))) except: sys.stderr.write("error")
def __init__(self): common_config = cf.get_common_config() self.breakdown_type_config = cf.get_breakdown_type() self.breakdown_type = {} self.file_load = FileLoad() self.model = model_util() self.offline_config = cf.get_offline_config() # 标签的个数 self.label_num = 3 # 单个样本的个数 self.spot_num = 2048 # 样本数 self.sample_num = 500 self.count_times = common_config['db']['schedule']['count_iter_times'] self.fetch_size = common_config['db']['schedule']['fetch_size'] self.count_number = common_config['db']['schedule']['count_number'] self.get_breakdown_type() self.by_count_job = schedule.every(self.count_times).seconds.do( self.schedule_by_count) # minutes
def __init__(self): self.model = model_util() self.producer = KafkaProducerService() self.consumer = KafkaConsumerService() self.result_dir = {} self.data_dir = {}