def to_model(self, info_data, data_info_type=None): """ change info type to DB model type :param info_data: corresponding class instance to DB model :return: converted DB model data """ route_attrs = data_info_type._route_attrs_ if data_info_type else self.route_attrs dt_attrs = data_info_type._dt_attrs_ if data_info_type else self.dt_attrs rel_attrs = data_info_type._rel_attrs_ if data_info_type else self.rel_attrs enum_attrs = data_info_type._enum_attrs_ if data_info_type else self.enum_attrs json_attrs = data_info_type._json_attrs_ if data_info_type else self.json_attrs model_data = self.dbModel() for key, value in info_data.__dict__.items(): valueToSet = value if key in dt_attrs: if value: valueToSet = datetime.datetime.strptime( value, '%Y-%m-%d %H:%M:%S') elif key in route_attrs: valueToSet = json.dumps(value, only_name=True) elif key in enum_attrs: if not value: valueToSet = value elif isinstance(value.value, tuple): valueToSet = value.value[0] else: valueToSet = value.value elif key in json_attrs: valueToSet = json.dumps(value) setattr(model_data, key, valueToSet) for attr_name, rel_info in rel_attrs.items(): value = getattr(info_data, attr_name, None) valueToSet = value if value: continue rel_key = rel_info['key'] rel_model_cls = rel_info['model_cls'] try: rel_id = getattr(info_data, rel_key) if rel_id: with self.session.no_autoflush: valueToSet = self.session.query(rel_model_cls).filter( rel_model_cls.id == rel_id).first() except NoResultFound as ex: pass setattr(model_data, attr_name, valueToSet) return model_data
def update(self, id, field_data, autocommit=False): """ update nsr_data :param id: index :param field_data: dictionary nsr_data (key must be same to field name of the model) :param autocommit: :return: boolean """ if not id: raise ValueError('`id` must be provided') converted = {} for key, value in field_data.items(): if key in self.dt_attrs and not isinstance(value, datetime.datetime): converted[key] = datetime.datetime.strptime(value, '%Y-%m-%d %H:%M:%S') elif key in self.route_attrs and isinstance(value, Route): converted[key] = json.dumps(value) elif key in self.enum_attrs: if isinstance(value.value, tuple): converted[key] = value.value[0] else: converted[key] = value.value else: converted[key] = value ret = self.session.query(self.dbModel).filter_by(id=id).update(converted) if autocommit: self.session.commit() return True if ret else False
def initialize_system_config_info(): da = ConfigDataAccess() config_data = da.get_by_name(cfg.OPT_NAME_SYSCONFIG) if not config_data or not config_data.content: syscfg = get_system_config_info() da.insert_or_update(cfg.OPT_NAME_SYSCONFIG, json.dumps(syscfg)) da.commit() else: syscfg = json.loads(config_data.content) cfg.DATA_ARCHIVE_START_YEAR = syscfg.data_archive_start_year cfg.DAILY_JOB_OFFSET_DAYS = syscfg.daily_job_offset_days cfg.DAILY_JOB_START_TIME = syscfg.daily_job_start_time cfg.WEEKLY_JOB_START_WEEKDAY = syscfg.weekly_job_start_day cfg.WEEKLY_JOB_START_TIME = syscfg.weekly_job_start_time cfg.MONTHLY_JOB_START_DAY = syscfg.monthly_job_start_date cfg.MONTHLY_JOB_START_TIME = syscfg.monthly_job_start_time cfg.INCIDENT_DOWNSTREAM_DISTANCE_LIMIT = syscfg.incident_downstream_distance_limit cfg.INCIDENT_UPSTREAM_DISTANCE_LIMIT = syscfg.incident_upstream_distance_limit cfg.WZ_DOWNSTREAM_DISTANCE_LIMIT = syscfg.workzone_downstream_distance_limit cfg.WZ_UPSTREAM_DISTANCE_LIMIT = syscfg.workzone_upstream_distance_limit cfg.SE_ARRIVAL_WINDOW = syscfg.specialevent_arrival_window cfg.SE_DEPARTURE_WINDOW1 = syscfg.specialevent_departure_window1 cfg.SE_DEPARTURE_WINDOW2 = syscfg.specialevent_departure_window2 # faverolles 1/12/2020: Adding AdminClient MOE Config Parameters cfg.MOE_CRITICAL_DENSITY = syscfg.moe_critical_density cfg.MOE_LANE_CAPACITY = syscfg.moe_lane_capacity cfg.MOE_CONGESTION_THRESHOLD_SPEED = syscfg.moe_congestion_threshold_speed da.close_session()
def update(self, id, field_data, **kwargs): """ update data :param id: index :param field_data: dictionary data (key must be same to field name of the model) :return: boolean """ print_exception = kwargs.get('print_exception', DEFAULT_PRINT_EXCEPTION) if not id: raise ValueError('`id` must be provided') converted = {} for key, value in field_data.items(): if key in self.dt_attrs and not isinstance(value, datetime.datetime): converted[key] = datetime.datetime.strptime(value, '%Y-%m-%d %H:%M:%S') elif key in self.route_attrs and isinstance(value, Route): converted[key] = json.dumps(value) elif key in self.enum_attrs: if isinstance(value.value, tuple): converted[key] = value.value[0] else: converted[key] = value.value else: converted[key] = value try: self.session.query(self.dbModel).filter_by(id=id).update(converted) return True except Exception as ex: if print_exception: tb.traceback(ex) return False
def get_dict(self): field_data = {} for key, value in self.__dict__.items(): if key.startswith('_'): continue if key in self._route_attrs_ or key in self._json_attrs_: value = json.dumps(value, only_name=True) field_data[key] = value return field_data
def cache(group_id, site_id, date, wd, src_type): """ :type group_id: str :type site_id: str :type date: datetime.datetime :type wd: ScanWebData """ cache_path = _cache_path(_CACHE_RWIS, group_id, site_id, date, src_type) util.save_file_contents(cache_path, json.dumps(wd))
def save_route(r, **kwargs): """ :type r: Route """ r_json = json.dumps(r) filepath = get_file_path(r.name, **kwargs) overwrite = kwargs.get('overwrite', False) if not overwrite and os.path.exists(filepath): return False util.save_file_contents(filepath, r_json) return os.path.exists(filepath)
def _meta_data(r): """ :type r: pyticas.ttypes.Route :rtype: list[str] """ json_str = json.dumps(r, sort_keys=True, indent=4) chunk_length = 10240 meta_data = [ json_str[i:i + chunk_length] for i in range(0, len(json_str), chunk_length) ] return meta_data
def set_system_config_info(syscfg): """ :rtype: pyticas_tetres.ttypes.SystemConfigInfo """ da = ConfigDataAccess() inserted = da.insert_or_update(cfg.OPT_NAME_SYSCONFIG, json.dumps(syscfg)) if inserted == False or not da.commit(): da.close_session() return False prev_syscfg = SystemConfigInfo() prev_syscfg.data_archive_start_year = cfg.DATA_ARCHIVE_START_YEAR prev_syscfg.daily_job_offset_days = cfg.DAILY_JOB_OFFSET_DAYS prev_syscfg.daily_job_start_time = cfg.DAILY_JOB_START_TIME prev_syscfg.weekly_job_start_day = cfg.WEEKLY_JOB_START_WEEKDAY prev_syscfg.weekly_job_start_time = cfg.WEEKLY_JOB_START_TIME prev_syscfg.monthly_job_start_date = cfg.MONTHLY_JOB_START_DAY prev_syscfg.monthly_job_start_time = cfg.MONTHLY_JOB_START_TIME prev_syscfg.incident_downstream_distance_limit = cfg.INCIDENT_DOWNSTREAM_DISTANCE_LIMIT prev_syscfg.incident_upstream_distance_limit = cfg.INCIDENT_UPSTREAM_DISTANCE_LIMIT prev_syscfg.workzone_downstream_distance_limit = cfg.WZ_DOWNSTREAM_DISTANCE_LIMIT prev_syscfg.workzone_upstream_distance_limit = cfg.WZ_UPSTREAM_DISTANCE_LIMIT prev_syscfg.specialevent_arrival_window = cfg.SE_ARRIVAL_WINDOW prev_syscfg.specialevent_departure_window1 = cfg.SE_DEPARTURE_WINDOW1 prev_syscfg.specialevent_departure_window2 = cfg.SE_DEPARTURE_WINDOW2 # faverolles 1/12/2020: Adding AdminClient MOE Config Parameters prev_syscfg.moe_critical_density = cfg.MOE_CRITICAL_DENSITY prev_syscfg.moe_lane_capacity = cfg.MOE_LANE_CAPACITY prev_syscfg.moe_congestion_threshold_speed = cfg.MOE_CONGESTION_THRESHOLD_SPEED cfg.DATA_ARCHIVE_START_YEAR = syscfg.data_archive_start_year cfg.DAILY_JOB_OFFSET_DAYS = syscfg.daily_job_offset_days cfg.DAILY_JOB_START_TIME = syscfg.daily_job_start_time cfg.WEEKLY_JOB_START_WEEKDAY = syscfg.weekly_job_start_day cfg.WEEKLY_JOB_START_TIME = syscfg.weekly_job_start_time cfg.MONTHLY_JOB_START_DAY = syscfg.monthly_job_start_date cfg.MONTHLY_JOB_START_TIME = syscfg.monthly_job_start_time cfg.INCIDENT_DOWNSTREAM_DISTANCE_LIMIT = syscfg.incident_downstream_distance_limit cfg.INCIDENT_UPSTREAM_DISTANCE_LIMIT = syscfg.incident_upstream_distance_limit cfg.WZ_DOWNSTREAM_DISTANCE_LIMIT = syscfg.workzone_downstream_distance_limit cfg.WZ_UPSTREAM_DISTANCE_LIMIT = syscfg.workzone_upstream_distance_limit cfg.SE_ARRIVAL_WINDOW = syscfg.specialevent_arrival_window cfg.SE_DEPARTURE_WINDOW1 = syscfg.specialevent_departure_window1 cfg.SE_DEPARTURE_WINDOW2 = syscfg.specialevent_departure_window2 # faverolles 1/12/2020: Adding AdminClient MOE Config Parameters cfg.MOE_CRITICAL_DENSITY = syscfg.moe_critical_density cfg.MOE_LANE_CAPACITY = syscfg.moe_lane_capacity cfg.MOE_CONGESTION_THRESHOLD_SPEED = syscfg.moe_congestion_threshold_speed return prev_syscfg
def _save_json(json_data, json_path): json_str = json.dumps(json_data, indent=4) with open(json_path, 'w') as f: f.write(json_str)
def save(self): with open(self.filepath, 'w') as f: self.logs = self.logs[-1*self.capacity:] f.write(json.dumps({'registry' : self.registry, 'logs' : self.logs}))
def response(code, message, obj=None, only_name=True, **kwargs): jdata = {'code': code, 'message': message} jdata['obj'] = obj return json.dumps(jdata, only_name=only_name, indent=4)
def _calculate_for_a_regime(ttri, regime_type, sdate, edate, stime, etime, target_days=(1, 2, 3), except_dates=(), remove_holiday=True, **kwargs): """ :type ttri: pyticas_tetres.ttypes.TTRouteInfo :type regime_type: int :type sdate: datetime.date :type edate: datetime.date :type stime: datetime.time :type etime: datetime.time :type target_days: tuple[int] :rtype: bool """ # Regime Filter ext_filter = _ext_filter(regime_type) lock = kwargs.get('lock', nonop_with()) extractor.extract_tt(ttri.id, sdate, edate, stime, etime, [ext_filter], target_days=target_days, remove_holiday=remove_holiday, except_dates=except_dates) # print('# ', ext_filter.label) da = TODReliabilityDataAccess() # delete existings ttwis = [ttwi for ttwi in da.list_by_route(ttri.id, regime_type)] ttwi_ids = [v.id for v in ttwis] with lock: is_deleted = da.delete_items(ttwi_ids) if not is_deleted or not da.commit(): return False tod_res = [] cursor = datetime.datetime.combine(datetime.date.today(), stime) cursor += datetime.timedelta(seconds=cfg.TT_DATA_INTERVAL) edatetime = datetime.datetime.combine(datetime.date.today(), etime) dict_data = [] while cursor <= edatetime: ctime = cursor.strftime('%H:%M:00') res = [ extdata for extdata in ext_filter.whole_data if ctime == extdata.tti.time.strftime('%H:%M:00') ] ttr_res = reliability.calculate(ttri, res) tod_res.append(ttr_res) dict_data.append({ 'regime_type': regime_type, 'route_id': ttri.id, 'hour': cursor.hour, 'minute': cursor.minute, 'result': json.dumps(ttr_res), }) cursor += datetime.timedelta(seconds=cfg.TT_DATA_INTERVAL) with lock: is_inserted = da.bulk_insert(dict_data) if not is_inserted or not da.commit(): return False return True