def _worker_process_to_calculate_tod_reliabilities(idx, queue, lck, data_path, db_info): import gc from pyticas.tool import tb from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra logger = getLogger(__name__) # initialize logger.debug('[TOD Reliability Worker %d] starting...' % (idx)) ticas.initialize(data_path) Infra.get_infra() conn.connect(db_info) logger.debug('[TOD Reliability Worker %d] is ready' % (idx)) while True: ttr_id, target_date, num, total = queue.get() if target_date is None: exit(1) try: logger.debug('[TOD Reliability Worker %d] (%d/%d) calculating for route=%s at %s' % ( idx, num, total, ttr_id, target_date.strftime('%Y-%m-%d'))) traveltime_info.calculate_TOD_reliabilities(ttr_id, target_date, lock=lck) gc.collect() except Exception as ex: tb.traceback(ex) continue
def delete_range(self, corridor_route, corridor_dir, start_time, end_time, **kwargs): """ :type corridor_route: str :type corridor_dir: str :type start_time: datetime.datetime :type end_time: datetime.datetime :rtype: bool """ print_exception = kwargs.get('print_exception', False) dbModel = self.da_base.dbModel qry = self.da_base.session.query(dbModel) try: qry = qry.filter(dbModel.road == corridor_route) qry = qry.filter(dbModel.direction == corridor_dir) qry = qry.filter( or_(and_(dbModel.cdts >= start_time, dbModel.cdts <= end_time), and_(dbModel.udts >= start_time, dbModel.udts <= end_time), and_(dbModel.xdts >= start_time, dbModel.xdts <= end_time))) qry = qry.delete(synchronize_session=False) return True except Exception as ex: if print_exception: tb.traceback(ex) return False
def update(self, id, field_data, **kwargs): """ update data :param id: index :param field_data: dictionary data (key must be same to field name of the model) :return: boolean """ print_exception = kwargs.get('print_exception', DEFAULT_PRINT_EXCEPTION) if not id: raise ValueError('`id` must be provided') converted = {} for key, value in field_data.items(): if key in self.dt_attrs and not isinstance(value, datetime.datetime): converted[key] = datetime.datetime.strptime(value, '%Y-%m-%d %H:%M:%S') elif key in self.route_attrs and isinstance(value, Route): converted[key] = json.dumps(value) elif key in self.enum_attrs: if isinstance(value.value, tuple): converted[key] = value.value[0] else: converted[key] = value.value else: converted[key] = value try: self.session.query(self.dbModel).filter_by(id=id).update(converted) return True except Exception as ex: if print_exception: tb.traceback(ex) return False
def delete_range(self, ttri_id, sdt, edt, **kwargs): """ :type ttri_id: int :type sdt: datetime.datetime :type edt: datetime.datetime """ print_exception = kwargs.get('print_exception', False) item_ids = kwargs.get('item_ids', None) try: tt_ids = (self.session.query(self.ttModel.id) .filter(self.ttModel.route_id == ttri_id) .filter(self.ttModel.time <= edt) .filter(self.ttModel.time >= sdt)) qry = (self.session.query(self.dbModel).filter(self.dbModel.tt_id.in_(tt_ids.subquery()))) if item_ids and hasattr(self.dbModel, 'oc_field'): qry = qry.filter(getattr(self.dbModel, self.dbModel.oc_field).in_(item_ids)) qry.delete(synchronize_session=False) return True except Exception as ex: if print_exception: tb.traceback(ex) return False
def delete_range(self, usaf, wban, start_time, end_time, **kwargs): """ :type usaf: str :type wban: str :type start_time: datetime.datetime :type end_time: datetime.datetime :rtype: bool """ print_exception = kwargs.get('print_exception', False) dbModel = self.da_base.dbModel try: qry = self.da_base.session.query(dbModel) if usaf is not None: qry = qry.filter(dbModel.usaf == usaf) if wban is not None: qry = qry.filter(dbModel.wban == wban) if start_time is not None and end_time is not None: qry = qry.filter(and_(dbModel.dtime >= start_time, dbModel.dtime <= end_time)) qry.delete(synchronize_session=False) return True except Exception as ex: if print_exception: tb.traceback(ex) return False
def commit(self, **kwargs): try: self.session.commit() return True except Exception as ex: if kwargs.get('print_exception', DEFAULT_PRINT_EXCEPTION): tb.traceback(ex) return False
def run(**kwargs): try: _01_check_tt_data.run() except Exception as ex: tb.traceback(ex) getLogger(__name__).warning( 'Exception occured while performing monthly task')
def run(**kwargs): today = datetime.datetime.today() target_day = today - datetime.timedelta(days=cfg.DAILY_JOB_OFFSET_DAYS) try: _01_pre_calculation_of_tod_ttr.run(target_day) except Exception as ex: tb.traceback(ex) getLogger(__name__).warning('Exception occured while performing weekly task')
def _estimation_process(id, queue, counters, lock, data_path, DB_INFO, CAD_DB_INFO, IRIS_DB_INFO): """ :type id: int :type queue: Queue :type counters: dict :type lock: Lock :type data_path: str :type DB_INFO: dict :type CAD_DB_INFO: dict :type IRIS_DB_INFO: dict """ from pyticas_tetres.db.tetres import conn from pyticas_tetres.db.iris import conn as iris_conn from pyticas_tetres.db.cad import conn as cad_conn logger = getLogger(__name__) # initialize logger.debug('[EST WORKER %d] starting...' % (id)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(DB_INFO) cad_conn.connect(CAD_DB_INFO) iris_conn.connect(IRIS_DB_INFO) # db session is created here ttr_da = TTRouteDataAccess() logger.debug('[EST WORKER %d] is ready' % (id)) while True: (a_route_id, eparam, uid) = queue.get() try: logger.debug('[EST WORKER %d] >>>>> start estimation (uid=%s, route=%d)' % (id, uid, a_route_id)) _eparam = eparam.clone() try: _eparam.add_start_time_offset(offset=5) except Exception as e: logger.debug('Could not add five minutes offset to the starting time. Error: {}'.format(e)) _eparam.travel_time_route = ttr_da.get_by_id(a_route_id) estimation.estimate(_eparam, uid) logger.debug('[EST WORKER %d] <<<<< end of estimation (uid=%s, route=%d)' % (id, uid, a_route_id)) except Exception as ex: tb.traceback(ex) logger.debug('[EST WORKER %d] <<<<< end of task (exception occured) (uid=%s)' % (id, uid)) should_pack = False with lock: counters[uid] = counters[uid] - 1 if counters[uid] <= 0: del counters[uid] should_pack = True if should_pack: logger.debug('[EST WORKER %d] >>> make compressed file (uid=%s)' % (id, uid)) _pack_result(uid) logger.debug('[EST WORKER %d] <<< end of making compressed file (uid=%s)' % (id, uid))
def _worker_process_to_specific_categorization(idx, queue, lck, data_path, db_info, **kwargs): from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra from pyticas.tool import tb from pyticas_tetres.rengine.cats import incident, snowmgmt, specialevent, weather, workzone logger = getLogger(__name__) # initialize logger.debug('[TT-Categorization Worker %d] starting...' % (idx)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(db_info) categorizers = [] categorizer_names = kwargs.get("categorizer_names") categorizer_map = { "incident": incident, "snowmgmt": snowmgmt, "specialevent": specialevent, "weather": weather, "workzone": workzone } for categorizer_name in categorizer_names: categorizers.append(categorizer_map.get(categorizer_name)) da_route = TTRouteDataAccess() logger.debug('[TT-Categorization Worker %d] is ready' % (idx)) while True: ttr_id, prd, num, total = queue.get() if prd is None: da_route.close_session() exit(1) try: ttri = da_route.get_by_id(ttr_id) if not ttri: logger.debug( '[TT-Categorization Worker %d] route is not found (%s)' % (idx, ttr_id)) continue logger.debug( '[TT-Categorization Worker %d] (%d/%d) %s (id=%s) at %s' % (idx, num, total, ttri.name, ttri.id, prd.get_date_string())) tt_da = TravelTimeDataAccess(prd.start_date.year) tt_data_list = tt_da.list_by_period(ttri.id, prd) tt_da.close_session() for cidx, categorizer in enumerate(categorizers): n_inserted = categorizer.categorize(ttri, prd, tt_data_list, lock=lck) gc.collect() except Exception as ex: tb.traceback(ex) continue
def _worker_process_to_calculate_tt_and_categorize(idx, queue, lck, data_path, db_info): from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra from pyticas.tool import tb from pyticas_tetres.rengine.cats import weather, incident, snowmgmt, specialevent, workzone logger = getLogger(__name__) # initialize logger.debug('[TT-Categorization Worker %d] starting...' % (idx)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(db_info) categorizers = [weather, incident, workzone, specialevent, snowmgmt] da_route = TTRouteDataAccess() logger.debug('[TT-Categorization Worker %d] is ready' % (idx)) while True: ttr_id, prd, num, total = queue.get() if prd is None: da_route.close_session() exit(1) try: ttri = da_route.get_by_id(ttr_id) if not ttri: logger.debug( '[TT-Categorization Worker %d] route is not found (%s)' % (idx, ttr_id)) continue logger.debug( '[TT-Categorization Worker %d] (%d/%d) %s (id=%s) at %s' % (idx, num, total, ttri.name, ttri.id, prd.get_date_string())) is_inserted = traveltime.calculate_a_route( prd, ttri, dbsession=da_route.get_session(), lock=lck) if not is_inserted: logger.warning( '[TT-Categorization Worker %d] - fail to add travel time data' % idx) tt_da = TravelTimeDataAccess(prd.start_date.year) tt_data_list = tt_da.list_by_period(ttri.id, prd) tt_da.close_session() for cidx, categorizer in enumerate(categorizers): n_inserted = categorizer.categorize(ttri, prd, tt_data_list, lock=lck) gc.collect() except Exception as ex: tb.traceback(ex) continue
def get_model_by_id(self, id, **kwargs): """ return DB model data by id :type id: int :rtype: dbModel """ print_exception = kwargs.get('print_exception', DEFAULT_PRINT_EXCEPTION) try: return self.session.query(self.dbModel).filter(getattr(self.dbModel, self.primary_key) == id).first() except Exception as ex: if print_exception: tb.traceback(ex) return None
def get_model_by_name(self, name, **kwargs): """ return DB model data by name (use only if applicable) :type name: str :rtype: dbModel """ print_exception = kwargs.get('print_exception', DEFAULT_PRINT_EXCEPTION) try: return self.session.query(self.dbModel).filter(self.dbModel.name == name).first() except Exception as ex: if print_exception: tb.traceback(print_exception) return None
def write(uid, eparam, operating_conditions, whole, yearly, monthly, daily): """ :type uid: str :type eparam: pyticas_tetres.ttypes.EstimationRequestInfo :type operating_conditions: list[pyticas_tetres.rengine.filter.ftypes.ExtFilterGroup] :type whole: list[dict] :type yearly: list[(list[dict], list[int])] :type monthly: list[(list[dict], list[[int, int]])] :type daily: list[(list[dict], list[datetime.date])] """ output_dir = util.output_path( '%s/%s - %s' % (uid, eparam.travel_time_route.corridor, eparam.travel_time_route.name)) # result file output_file = os.path.join(output_dir, 'reliabilities-by-indices (whole-time-period).xlsx') wb = xlsxwriter.Workbook(output_file) try: report_helper.write_operating_condition_info_sheet(eparam, wb) except Exception as ex: getLogger(__name__).warning('Exception occured when writing data table : %s' % tb.traceback(ex, f_print=False)) try: _write_whole_result_sheet(eparam, operating_conditions, wb, whole) except Exception as ex: getLogger(__name__).warning('Exception occured when writing data table : %s' % tb.traceback(ex, f_print=False)) if yearly: try: _write_yearly_result_sheet(eparam, operating_conditions, wb, yearly) except Exception as ex: getLogger(__name__).warning( 'Exception occured when writing data table : %s' % tb.traceback(ex, f_print=False)) if monthly: try: _write_monthly_result_sheet(eparam, operating_conditions, wb, monthly) except Exception as ex: getLogger(__name__).warning( 'Exception occured when writing data table : %s' % tb.traceback(ex, f_print=False)) if daily: try: _write_daily_result_sheet(eparam, operating_conditions, wb, daily) except Exception as ex: getLogger(__name__).warning( 'Exception occured when writing data table : %s' % tb.traceback(ex, f_print=False)) wb.close()
def _retrieve_data_from_db(route_id, operating_conditions, sdate, edate, start_time, end_time, target_days, remove_holiday, **kwargs): """ :type route_id: int :type operating_conditions: list[pyticas_tetres.rengine.filter.ExtFilterGroup] :type sdate: datetime.datetime :type edate: datetime.datetime :type start_time: datetime.time :type end_time: datetime.time :type target_days: list[int] :type remove_holiday: bool """ # logger = getLogger(__name__) prd = period.Period(sdate, edate, cfg.TT_DATA_INTERVAL) # proc_start_time = time.time() # logger.debug('>>>> retrieving data for %s' % prd.get_date_string()) year = sdate.year da_tt = tt.TravelTimeDataAccess(year) # generator traveltimes = da_tt.list_by_period(route_id, prd, start_time=start_time, end_time=end_time, weekdays=target_days, as_model=True) """:type: list[pyticas_tetres.db.model.TravelTime] """ for ttm in traveltimes: dt = str2datetime(ttm.time) if remove_holiday and period.is_holiday(dt.date()): continue _tt_weathers = list(ttm._tt_weathers) _tt_incidents = list(ttm._tt_incidents) _tt_workzones = list(ttm._tt_workzones) _tt_specialevents = list(ttm._tt_specialevents) _tt_snowmanagements = list(ttm._tt_snowmanagements) if not _tt_weathers: getLogger(__name__).warning('No weather data for route(%d) at %s' % (route_id, dt.strftime('%Y-%m-%d %H:%M'))) continue extdata = ExtData(ttm, _tt_weathers[0], _tt_incidents, _tt_workzones, _tt_specialevents, _tt_snowmanagements) for fidx, ef in enumerate(operating_conditions): try: ef.check(extdata) except Exception as ex: tb.traceback(ex) continue
def run(): if '_01_tt' not in sys.modules: from pyticas_tetres.sched.daily_tasks import _01_tt, _02_load_weather, _03_load_incident, _04_tagging periods = [] # faverolles 1/16/2020 NOTE: always starts at datetime.today today = datetime.datetime.today() target_day = today - datetime.timedelta(days=cfg.DAILY_JOB_OFFSET_DAYS) last_day_with_tt_data = _find_last_date(today.year) if last_day_with_tt_data and last_day_with_tt_data <= target_day: periods = period.create_periods(last_day_with_tt_data.date(), target_day.date(), '00:00:00', '23:59:00', cfg.TT_DATA_INTERVAL) else: prd = period.create_period(target_day.strftime("%Y-%m-%d 00:00:00"), target_day.strftime("%Y-%m-%d 23:59:00"), cfg.TT_DATA_INTERVAL) periods.append(prd) try: non_completed_dates_and_routes = _check_logs() periods = [_prd for _route_id, _prd in non_completed_dates_and_routes ] + periods except Exception as ex: getLogger(__name__).warning( 'error occured when checking daily-processing log : %s' % tb.traceback(ex, f_print=False)) periods = list(set(periods)) if len(periods) > N_LIMIT_OF_DAYS_TO_PROCESS: getLogger(__name__).warning( 'too many days to process. please use data loader program to process the long-time periods' ) return for prd in periods: getLogger(__name__).info('>> running daily task for %s' % prd.get_date_string()) try: _01_tt.run(prd) _02_load_weather.run(prd) _03_load_incident.run(prd) _04_tagging.run(prd) except Exception as ex: tb.traceback(ex) getLogger(__name__).warning( 'Exception occured while performing daily task')
def _moe(moe_func, moe_name, **kwargs): """ :type moe_func: callable :type moe_name: str :return: """ try: route_json = request.form.get('route', None) periods = request.form.get('periods', None) if not route_json or not periods: return prot.response_error('Invalid Parameter') r = json2route(route_json) period_list = [] for prdinfo in json.loads(periods): prd = period.create_period( (prdinfo['start_year'], prdinfo['start_month'], prdinfo['start_date'], prdinfo['start_hour'], prdinfo['start_min']), (prdinfo['end_year'], prdinfo['end_month'], prdinfo['end_date'], prdinfo['end_hour'], prdinfo['end_min']), prdinfo['interval'] ) period_list.append(prd) tmp_dir = Infra.get_infra().get_path('moe_tmp', create=True) uid = str(uuid.uuid4()) est_file = os.path.join(tmp_dir, '%s.xlsx' % uid) res = moe_func(r, period_list) write = kwargs.get('write_function', writer.write) write(est_file, r, res, **kwargs) encoded = None with open(est_file, 'rb') as f: xlsx_content = f.read() encoded = base64.b64encode(xlsx_content) if not encoded: return prot.response_error('ERROR : %s' % moe_name) os.remove(est_file) return prot.response_success(obj=encoded.decode('utf-8')) except Exception as ex: tb.traceback(ex) return prot.response_error('ERROR : %s' % moe_name)
def delete_all_for_a_route(self, ttr_id, **kwargs): """ :type ttr_id: int """ print_exception = kwargs.get('print_exception', False) try: dbModel = self.da_base.dbModel stmt = dbModel.__table__.delete(synchronize_session=False).where(dbModel.route_id == ttr_id) self.execute(stmt) return True except Exception as ex: if print_exception: tb.traceback(ex) return False
def delete_range(self, start_time=None, end_time=None, action_types=None, handled=None, target_datatypes=None, **kwargs): """ :param start_time: e.g. 2013-12-04 12:00:00 :type start_time: Union(str, datetime.datetime) :param end_time: e.g. 2013-12-04 12:00:00 :type end_time: Union(str, datetime.datetime) :type action_types: list[str] :type handled: bool :type target_datatypes: list[str] :rtype: Union(list[ActionLogInfo], list[model.ActionLog]) """ print_exception = kwargs.get('print_exception', False) if isinstance(start_time, str): start_time = datetime.datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S') if isinstance(end_time, str): end_time = datetime.datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S') dbModel = model.ActionLog session = self.da_base.session qry = session.query(dbModel) if start_time: qry = qry.filter(dbModel.reg_date >= start_time) if end_time: qry = qry.filter(dbModel.reg_date <= end_time) if action_types: cond = or_(*[dbModel.action_type == action_type for action_type in action_types]) qry = qry.filter(cond) if handled: qry = qry.filter(dbModel.handled == handled) if target_datatypes: cond = or_(*[dbModel.target_datatype == target_datatype for target_datatype in target_datatypes]) qry = qry.filter(cond) try: qry.delete(synchronize_session=False) return True except Exception as ex: if print_exception: tb.traceback(ex) return False
def get_by_station(self, year, station_id, **kwargs): """ :type year: int :rtype: itypes.NormalFunctionInfo """ try: md = self.da_base.session.query(model.NormalFunction).filter( model.NormalFunction.station_id == station_id, model.NormalFunction._winterseason.has(year=year)).one() return self.da_base.to_info(md) except NoResultFound as ex: return None except Exception as ex: from pyticas.tool import tb tb.traceback(ex) return None
def delete(self, pkey, **kwargs): """ delete object :type pkey: int """ print_exception = kwargs.get('print_exception', DEFAULT_PRINT_EXCEPTION) try: (self.session.query(self.dbModel) .filter(getattr(self.dbModel, self.primary_key) == pkey) .delete(synchronize_session=False)) return True except Exception as ex: if print_exception: tb.traceback(ex) return False
def delete_all_for_a_route(self, ttri_id, **kwargs): """ :type ttri_id: int """ print_exception = kwargs.get('print_exception', False) try: qry = (self.da_base.session.query(self.da_base.dbModel).filter( self.da_base.dbModel.route_id == ttri_id)) qry.delete(synchronize_session=False) return True except Exception as ex: if print_exception: tb.traceback(ex) return False
def run(target_date): """ :type target_date: datetime.datetime :return: """ ttr_route_da = TTRouteDataAccess() route_list = ttr_route_da.list() ttr_route_da.close_session() for ttri in route_list: try: traveltime_info.calculate_TOD_reliabilities(ttri.id, target_date) except Exception as ex: tb.traceback(ex) getLogger(__name__).warning('Fail to calculate TOD reliabilities for route=%d' % ttri.id)
def _handler_ttroute(da, item, action_log): """ :type da: pyticas_tetres.da.route.TTRouteDataAccess :type item: pyticas_tetres.ttypes.TTRouteInfo :type action_log: pyticas_tetres.ttypes.ActionLogInfo """ # 1. calculate travel time # 2. categorize (all) try: from pyticas_tetres.util.traffic_file_checker import has_traffic_files start = datetime.date(cfg.DATA_ARCHIVE_START_YEAR, 1, 1) last_day = datetime.date.today() - datetime.timedelta(days=cfg.DAILY_JOB_OFFSET_DAYS) start_date_str, end_date_str = start.strftime('%Y-%m-%d'), last_day.strftime('%Y-%m-%d') if not has_traffic_files(start_date_str, end_date_str): return False, "Missing traffic files for the given time range from {} to {}.".format(start_date_str, end_date_str) except Exception as e: getLogger(__name__).warning( 'Exception occured while checking if traffic files exist during handling travel time routes. Error: {}'.format( e)) daily_periods = _get_all_daily_periods() cnt = 0 for prd in daily_periods: try: inserted_ids = traveltime.calculate_a_route(prd, item) if inserted_ids: categorization.categorize(item, prd) cnt += len(inserted_ids) except Exception as ex: getLogger(__name__).warning( 'Exception occured when handling route changes : %s' % tb.traceback(ex, f_print=False)) return cnt > 0
def _handler_ttroute(da, item, action_log): """ :type da: pyticas_tetres.da.route.TTRouteDataAccess :type item: pyticas_tetres.ttypes.TTRouteInfo :type action_log: pyticas_tetres.ttypes.ActionLogInfo """ # 1. calculate travel time # 2. categorize (all) # faverolles 1/16/2020 NOTE: always starts at datetime.today daily_periods = _get_all_daily_periods() cnt = 0 try: for prd in daily_periods: inserted_ids = traveltime.calculate_a_route(prd, item) if inserted_ids: categorization.categorize(item, prd) if inserted_ids is not False: cnt += len(inserted_ids) return cnt > 0 except Exception as ex: getLogger(__name__).warning( 'Exception occured when handling route changes : %s' % tb.traceback(ex, f_print=False)) return False
def estimate(num, tsi, edata): """ :type num: int :type tsi: pyticas_ncrtes.itypes.TargetStationInfo :type edata: pyticas_ncrtes.core.etypes.ESTData :rtype: pyticas_ncrtes.core.etypes.ESTData """ if not _check_edata(tsi, edata): return edata logger = getLogger(__name__) logger.info('>>> Determine NCRT Process for Target Station %s' % (edata.target_station.station_id)) try: _pre_process_before_determination(edata) has_recovered_region = wnffs_finder.find(edata) if not has_recovered_region: _chart(edata, edata.ratios, edata.lsratios, edata.sratios) return wn_uk.make(edata) ncrt_finder.find(edata) _chart(edata, edata.ratios, edata.lsratios, edata.sratios) except Exception as ex: logger.error(tb.traceback(ex, f_print=False)) logger.info('<<< End of NCRT Determination Process for Target Station %s' % (edata.target_station.station_id)) return edata
def _handler_specialevent(da, item, action_log): """ :type da: pyticas_tetres.da.specialevent.SpecialEventDataAccess :type item: pyticas_tetres.ttypes.SpecialEventInfo :type action_log: pyticas_tetres.ttypes.ActionLogInfo """ sdt = item.str2datetime(item.start_time) edt = item.str2datetime(item.end_time) sdt = sdt - datetime.timedelta(minutes=SE_ARRIVAL_WINDOW) edt = edt + datetime.timedelta(minutes=SE_DEPARTURE_WINDOW2) periods = _get_daily_periods(sdt, edt) try: for prd in periods: _categorize_for_a_day(prd, categorization.specialevent, specialevents=[item]) return True except Exception as ex: getLogger(__name__).warning( 'Exception occured when handling specialevent changes : %s' % tb.traceback(ex, f_print=False)) return False
def insert(self, data, **kwargs): """ insert data :param data: dataInfoType :return: dbModel data """ print_exception = kwargs.get('print_exception', DEFAULT_PRINT_EXCEPTION) model_data = self.to_model(data) try: self.session.add(model_data) data.id = model_data.id return model_data except Exception as ex: if print_exception: tb.traceback(ex) return False
def delete_if_route_is_deleted(self, **kwargs): """ :rtype: bool """ print_exception = kwargs.get('print_exception', False) sess = self.get_session() dbModel = self.da_base.dbModel try: ex = sess.query(model.TTRoute).filter(dbModel.route_id == model.TTRoute.id) if ex.count(): sess.query(dbModel).filter(~ex.exists()).delete(synchronize_session=False) return True except Exception as ex: if print_exception: tb.traceback(ex) return False
def tetres_admin_xlsx_content_from_route(): route_content = request.form.get('route') r = json2route(route_content) try: tmp_dir = Infra.get_infra().get_path('tmp', create=True) uid = str(uuid.uuid4()) filepath = os.path.join(tmp_dir, '%s.xlsx' % uid) if not r.cfg: r.cfg = route_config.create_route_config(r.rnodes) rc.writer.write(filepath, r) with open(filepath, 'rb') as f: file_content = f.read() encoded = base64.b64encode(file_content) return prot.response_success(obj=encoded.decode('utf-8')) except Exception as ex: tb.traceback(ex) return prot.response_fail('fail to write route')