def handle_put(post_data: Optional[ApiRequest]) -> ApiResponse: response_object: ApiResponse = ( ApiResponse(status=ReturnCode.OK.value)) if post_data is None \ or not is_ApiRequest(cast(Optional[Dict], post_data)): response_object['status'] = ReturnCode.NO_DATA.value response_object['message'] = "post failed " return response_object try: updated = update(cast(Dict, post_data["request"])) if updated: commit() response_object['message'] = "update succeeded!" response_object["data"] = updated.to_json() else: response_object['status'] = ReturnCode.NO_DATA.value response_object['message'] = "update failed" except IntegrityError: response_object['status'] = ReturnCode.NO_DATA.value response_object["message"] = ( "update failed!, integrity error. might be missing a field") except Exception as e: response_object['status'] = ReturnCode.NO_DATA.value response_object['message'] = f"update failed {e}" finally: return response_object
def test_sport_record(self): """ use the device 20205754003878404097 """ with self.app.app_context(): from app.models import Device did: int = ( Device .query .filter(Device.device_name == "20205754003878404097") ).first().device_id time_range = (datetime(2019, 9, 23, 00), datetime(2019, 9, 24, 00)) records = self.j.spot_record(did, time_range) # NEED a property test all the way to database commit. with self.app.app_context(): from app.modelOperations import ModelOperations, commit def worker(record): ModelOperations.Add.add_spot_record(record) for i in thunk_iter(records): print(i) worker(i) commit()
def _model_co(op: Callable, alwaycommit=True) -> Generator[None, Co_T, None]: """ a family of coroutines to interact with the database. op is a method from `modelOperations`. """ while True: data: Co_T = yield try: op(data) if alwaycommit: commit() except Exception: # should never stop logger.warning('modelcoro: error when add spot record')
def handle_delete() -> ApiResponse: response_object: ApiResponse = ( ApiResponse(status=ReturnCode.OK.value)) try: if some_id is None: raise Exception("Error when deleting, id is None") delete(some_id) commit() response_object["message"] = "remvoe succeeded" except Exception as e: response_object["status"] = ReturnCode.BAD_REQUEST.value response_object["message"] = f"failed to remove: {e}" finally: return response_object
def test_moco(self): from app.modelOperations import commit with self.app.app_context(): from app.models import Device did: int = ( Device .query .filter(Device.device_name == "20205754003878404097") ).first().device_id time_range = (datetime(2019, 9, 23, 00), datetime(2019, 9, 24, 00)) records = self.j.spot_record(did, time_range) with self.app.app_context(): for i in thunk_iter(records): print(i) record_send(i) commit()
def run(self): """ periodically fetch new data """ while True: msg: FetchMsg = self.recv() print("--> Fetech Actro: msg", msg) # if it is a overall update, did and time_range will be # none. # if all parameters of spot_record() are none it will # start a all fetch, which has its own fetching rule # embeded in it's corresponding SpotData implementation. did, chsz, max_threads, time_range = msg jobs: RecordThunkIter jobs = self.datagen.spot_record(did, time_range) with self.datagen.app.app_context(): """ Order of package is completely random. It depends on IO. buffer is used to improve execution record speed and avoid the speed difference between network IO and db IO cause thread pool piles up too much jobs. """ for jobchunk in chunks(jobs, size=chsz if chsz is not None else 10): logger.warning("actor start new chunk") buf = iter([]) # temporary accumulator. logger.warning("actor fetching") for gen in thunk_iter_(jobchunk, max_threads=max_threads if max_threads else 30): buf = chain(buf, gen) logger.warning("actor recording") for r in buf: # spped up a bit. record__no_commit_send(r) commit() print(threading.enumerate())
def update_device(self): """ generate new list, store it into database. """ self.update_actor.xiaomi_actor.datagen.make_device_list() self.update_actor.jianyanyuan_actor.datagen.make_device_list() devices = chain( self.update_actor.xiaomi_actor.datagen.normed_device_list, self.update_actor.jianyanyuan_actor.datagen.normed_device_list) with self.app.app_context(): from app.models import Device as MD from app.modelOperations import ModelOperations, commit for device in devices: # dataType.Device dname = device.get("device_name") if (dname is not None and MD.query .filter(MD.device_name == dname) .count() == 0): ModelOperations.Add.add_device(cast(Dict, device)) commit()
def test_sport_record(self): with self.app.app_context(): from app.models import Device did: int = ( Device .query .filter(Device.device_name == "lumi.158d0001fd5c50") ).first().device_id print(did) time_range = (datetime(2020, 6, 5, 16), datetime(2020, 6, 6, 18)) records = self.x.spot_record(did, time_range) # NEED a property test all the way to database commit. with self.app.app_context(): from app.modelOperations import ModelOperations, commit def worker(record): ModelOperations.Add.add_spot_record(record) it = thunk_iter(records) for i in it: worker(i) commit()
def gen_fake(): """ avoid circular import """ from app import modelOperations as mops from app.modelOperations import commit from app import models as m import db_init db_init.db_init() full = db_init.JianyanyuanLoadFull() full.load_devices(raw=True) location = { "province": "Province", "city": "City", "climate_area_name": "A1" } mops.ModelOperations.Add.add_location(location) project = { "location": { "province": "Province", "city": "City" }, "floor": "4", "tech_support_company": { "company_name": "TechSupportCompany" }, "construction_company": { "company_name": "ConstrutionCompany" }, "description": "", "project_name": "Project", "latitude": "31.908271", "building_height": 23, # not necessary for all records to be string. "demo_area": "2311.94", "longitude": "121.172900", "building_type": "House", "started_time": "2017-12-18T00:00:00", "finished_time": "2018-02-18T00:00:00", "project_company": { "company_name": "ProjectCompany" }, "outdoor_spot": "", "district": "Discrict", "record_started_from": datetime(2019, 4, 20), "area": "2311.94" } mops.ModelOperations.Add.add_project(project) spot = { "project": m.Project.query.first().project_id, "spot_name": "Spot", "spot_type": "Bedroom", "image": b"asjdlasd" } mops.ModelOperations.Add.add_spot(spot) device = { "device_name": "Device", "device_type": "Temperature", "spot": m.Spot.query.first().spot_id, "online": 1, "create_time": "2019-04-20T00:00:00", "modify_time": datetime(2019, 4, 24) } mops.ModelOperations.Add.add_device(device) spot_record = { "spot_record_time": datetime(2019, 9, 24, 12, 30), # "spot_record_time": "2019-09-24T12:30:00", "device": m.Device.query.first().device_id, "window_opened": "true", "temperature": "34", "humidity": "89", "ac_power": "2000", "pm25": "34", "co2": "22" } mops.ModelOperations.Add.add_spot_record(spot_record) commit()
def load_devices(self, raw=False): """ Like Spot devices has two sources to determine its Spot. method 1: deduce from j_project_device_table.json file. method 2: to determine from the location_info from Device TypedDict """ def handle_location_info(location_info) -> Optional[Spot]: """ Deduce Spot by given location_info. if cannot find a result return None. priority of elements of location_info: 1. address 2. extra Because city and province are login location so they are generally incorrect. If address and extra doesn't yield a Spot, either go with a json table search or skip it. Note for Jianyanyuan spots are basically the same as projects. """ _, _, address, extra = itemgetter('province', 'city', 'address', 'extra')(location_info) projects: Tuple = tuple(Project.query.all()) # fuzzy match based on address and extra infos. fuzz_address_results: List[float] = list( map(lambda p: fuzz.partial_ratio(p.project_name, address), projects)) fuzz_extra_results: List[float] = list( map(lambda p: fuzz.partial_ratio(p.project_name, extra), projects)) max_address_ratio = max(fuzz_address_results) max_extra_ratio = max(fuzz_extra_results) # ratio > 40 indicate a good match. if max_address_ratio < 80 and max_extra_ratio < 80: return None project: Optional[Project] = None # get project for spot. if max_address_ratio > max_extra_ratio: project = projects[fuzz_address_results.index( max_address_ratio)] else: project = projects[fuzz_extra_results.index(max_extra_ratio)] if not project: return None # use project to query spot. spot: Spot = Spot.query.filter_by(project=project).first() if not spot: return None return spot # Note: Two methods both loop through device list. def load_by_table_lookup(d: DGType.Device, json_data: Dict, json_spot_list: List[Spot]): """ method 1 load project name from json table for given device id """ did = d.get('device_name') for project_name, did_lists in json_data.items(): if did in did_lists: spot = next( filter( lambda s: s.project.project_name == project_name, json_spot_list)) device_post_data = { 'device_name': did, 'device_type': d.get('device_type'), 'spot': spot, 'online': d.get('online'), 'create_time': d.get('create_time'), 'modify_time': d.get('modify_time') } ModelOperations.Add.add_device(device_post_data) def load_by_location_info(d: DGType.Device): """ method 2, deduce the spot by location_info typedict come with Device typedict """ spot: Optional[Spot] = handle_location_info(d['location_info']) device_post_data = { 'device_name': d.get('device_name'), 'device_type': d.get('device_type'), 'spot': spot, 'online': d.get('online'), 'create_time': d.get('create_time'), 'modify_time': d.get('modify_time') } ModelOperations.Add.add_device(device_post_data) # Jianyanyuan devices. devices: Optional[Generator] = self.j.device() if not devices: logger.warning('empty device from JianyanyuanData') return # NOTE: THis is for debug only # when raw is true, load device without any extra project # information. if raw: for device in devices: # dataType.Device ModelOperations.Add.add_device(cast(Dict, device)) commit() else: # read json files with open('app/dataGetter/static/j_project_device_table.json', 'r') as f: json_data: Dict = json.loads(f.read()) json_spot_list = [(Project.query.filter_by( project_name=pn)).first().spot.first() for pn in json_data.keys()] for d in devices: # consumer # second operation will overwrite the first one. # table lookup has higher accuracy so has higher priority # than fuzzy match. load_by_table_lookup(d, json_data, json_spot_list) load_by_location_info(d) logger.info('finished loading device')