Esempio n. 1
0
    def insert_all(self):

        if self.requires_auth and not admin_auth.check_auth():
            return admin_auth.authenticate()

        json_str = request.form.get('data')
        json_str_list = python_json.loads(json_str)

        # db session is created and share the session with all other database access module
        da_instance = self.da_class()

        obj_list = []
        dict_list = []
        for idx, json_data in enumerate(json_str_list):
            obj = self.json2obj(json_data)
            dict_list.append(obj.get_dict())
            obj_list.append(obj)

        inserted_ids = da_instance.bulk_insert(dict_list)
        if not inserted_ids or not da_instance.commit():
            da_instance.close_session()
            return prot.response_fail("Fail to add data into database")

        tablename = da_instance.get_tablename()
        for obj_id in inserted_ids:
            insertedObj = da_instance.get_model_by_id(obj_id)
            self.add_actionlog(ActionLogDataAccess.INSERT,
                               tablename,
                               obj_id,
                               ActionLogDataAccess.data_description(self.datatype, insertedObj),
                               handled=False)

        da_instance.close_session()
        return prot.response_success()
Esempio n. 2
0
    def tetres_snowroute_update():
        id = request.form.get('id')
        route_json = request.form.get('data')

        snowRouteDA = SnowRouteDataAccess()

        ex = snowRouteDA.get_by_id(id)
        if not ex:
            snowRouteDA.close_session()
            return prot.response_invalid_request()

        info = json2snri(route_json)
        is_updated = snowRouteDA.update(
            id, {
                'name': info.name,
                'description': info.description,
                'prj_id': info.prj_id
            })
        if not is_updated:
            return prot.response_fail('fail to update database')

        if not snowRouteDA.commit():
            return prot.response_fail('fail to update database (commit fail)')

        tetresApi.add_actionlog(ActionLogDataAccess.INSERT,
                                snowRouteDA.get_tablename(),
                                id,
                                ActionLogDataAccess.data_description(
                                    ActionLogDataAccess.DT_SNOWROUTE, info),
                                handled=True,
                                dbsession=snowRouteDA.get_session())

        return prot.response_success(id)
Esempio n. 3
0
    def insert(self):

        if self.requires_auth and not admin_auth.check_auth():
            return admin_auth.authenticate()

        # db session is created and share the session with all other database access module
        da_instance = self.da_class()

        json_data = request.form.get('data')
        obj = self.json2obj(json_data)

        # obj.id = da_instance.da_base.get_next_pk()
        model_data = da_instance.insert(obj)

        if model_data is False or not da_instance.commit():
            da_instance.close_session()
            return prot.response_fail("Fail to insert data")

        inserted_id = model_data.id

        self.add_actionlog(ActionLogDataAccess.INSERT,
                           da_instance.get_tablename(),
                           model_data.id,
                           ActionLogDataAccess.data_description(self.datatype, model_data),
                           handled=False,
                           dbsession=da_instance.get_session())

        da_instance.close_session()

        return prot.response_success(obj=inserted_id)
    def tetres_actionlog_list():
        limit = request.form.get('limit', 100)

        da = ActionLogDataAccess()
        data_list = da.list(limit=limit, order_by=('id', 'desc'))
        da.close_session()

        da_config = ConfigDataAccess()
        running_id_item = da_config.get_by_name(
            cfg.OPT_NAME_ACTIONLOG_ID_IN_PROCESSING)
        da_config.close_session()

        if running_id_item and running_id_item.content:
            running_id = int(running_id_item.content)
            for item in data_list:
                if item.id == running_id:
                    item.status = ActionLogDataAccess.STATUS_RUNNING

        return prot.response_success({'list': data_list})
Esempio n. 5
0
def _initialize_actionlog_status():
    """ Delete running action log id from `config` table and delete

    :return:
    :rtype:
    """
    da_config = ConfigDataAccess()
    da_config.insert_or_update(cfg.OPT_NAME_ACTIONLOG_ID_IN_PROCESSING, '')
    da_config.commit()
    da_config.close_session()

    now = datetime.datetime.now()

    da_actionlog = ActionLogDataAccess()
    running_items = da_actionlog.list(
        status=ActionLogDataAccess.STATUS_RUNNING)
    for item in running_items:
        da_actionlog.update(
            item.id, {
                'status': ActionLogDataAccess.STATUS_STOPPED,
                'status_updated_date': now
            })
    da_actionlog.commit()
    da_actionlog.close_session()
Esempio n. 6
0
    def _wz_insert_from_wz(wzi):
        """
        :type wzi: WorkZoneInfo
        """
        if not isinstance(wzi,
                          WorkZoneInfo) or not wzi.route1 or not wzi.route2:
            return prot.response_invalid_request()

        wzDA = WorkZoneDataAccess()

        wzi.route1.name = 'route1 - %s' % wzi.route1.rnodes[0].corridor.name
        wzi.route1.desc = ''
        wzi.route2.name = 'route2 - %s' % wzi.route2.rnodes[0].corridor.name
        wzi.route2.desc = ''
        # wzi.id = wzDA.da_base.get_next_pk()

        wzm = wzDA.insert(wzi)
        if wzm is False or not wzDA.commit():
            return prot.response_fail('fail to save workzone route data (1)')

        wzi.id = wzm.id

        inserted = _wz_insert_feature(wzi)

        if inserted:

            inserted_id = wzi.id

            tetres_api.add_actionlog(ActionLogDataAccess.INSERT,
                                     wzDA.get_tablename(),
                                     inserted_id,
                                     ActionLogDataAccess.data_description(
                                         ActionLogDataAccess.DT_WORKZONE, wzi),
                                     handled=False,
                                     dbsession=wzDA.get_session())

            wzDA.close_session()
            return prot.response_success(obj=inserted_id)
        else:
            # if failed to add features
            wzDA.delete(wzm.id)
            wzDA.close_session()
            return prot.response_fail('fail to save workzone route data (2)')
def add(action_type,
        datatype,
        tablename,
        target_id,
        data_desc,
        handled=False,
        dbsession=None,
        commit=True):
    """

    :type action_type: str
    :type datatype: str
    :type tablename: str
    :type target_id: int
    :type data_desc: str
    :type handled: bool
    :type dbsession: sqlalchemy.orm.Session
    :type commit: bool
    :rtype; Union(pyticas_tetres.db.model.ActionLog, False)
    """
    if dbsession:
        da_actionlog = ActionLogDataAccess(session=dbsession)
    else:
        da_actionlog = ActionLogDataAccess()

    a = ActionLogInfo()
    a.action_type = action_type
    a.target_datatype = datatype
    a.target_table = tablename
    a.target_id = int(target_id) if target_id else None
    a.data_desc = data_desc
    a.handled = handled
    a.user_ip = request.remote_addr
    a.handled_date = None if not handled else datetime.datetime.now().strftime(
        '%Y-%m-%d %H:%M:%S')

    # print('Action Log ====================')
    # import pprint
    # pprint.pprint(a.get_dict())
    # print('================================')

    inserted = da_actionlog.insert(a, print_exception=True)

    if commit or not dbsession:
        da_actionlog.commit()
Esempio n. 8
0
    def tetres_snowroute_insert():
        route_json = request.form.get('data')
        info = json2snri(route_json)

        snowRouteDA = SnowRouteDataAccess()

        ex = snowRouteDA.get_by_name(info.name)
        if ex:
            snowRouteDA.close_session()
            return prot.response_fail('already exist')

        route2 = route.opposite_route(info.route1)
        cfg2 = info.route1.cfg.clone()
        rc.route_config.reverse(cfg2)
        route2.cfg = cfg2
        info.route2 = route2
        info.route1.name = 'route1 - %s' % info.route1.rnodes[0].corridor.name
        info.route1.desc = ''
        info.route2.name = 'route2 - %s' % info.route2.rnodes[0].corridor.name
        info.route2.desc = ''

        snrm = snowRouteDA.insert(info)
        if not snrm:
            snowRouteDA.close_session()
            return prot.response_fail('Fail to insert data')

        snowRouteDA.commit()

        tetresApi.add_actionlog(ActionLogDataAccess.INSERT,
                                snowRouteDA.get_tablename(),
                                snrm.id,
                                ActionLogDataAccess.data_description(
                                    ActionLogDataAccess.DT_SNOWROUTE, snrm),
                                handled=False,
                                dbsession=snowRouteDA.get_session())

        inserted_id = snrm.id

        snowRouteDA.close_session()

        return prot.response_success(obj=inserted_id)
Esempio n. 9
0
    def update(self):
        if self.requires_auth and not admin_auth.check_auth():
            return admin_auth.authenticate()

        id = request.form.get('id')
        json_data = request.form.get('data')

        # db session is created and share the session with all other database access module
        da_instance = self.da_class()

        obj = self.json2obj(json_data)
        exobj = da_instance.get_by_id(id)
        if not exobj:
            da_instance.close_session()
            return prot.response_fail("item does not exist (id={})".format(id))

        is_updated = da_instance.update(id, obj.get_dict())
        if not is_updated:
            da_instance.close_session()
            return prot.response_fail("fail to update (id={})".format(id))

        if not da_instance.commit():
            return prot.response_fail("fail to update (id={})".format(id))

        callback = getattr(self, 'on_update_success', None)
        if callback:
            callback(obj, da_instance.get_session())

        exobj = da_instance.get_by_id(id)
        self.add_actionlog(ActionLogDataAccess.UPDATE,
                           da_instance.get_tablename(),
                           id,
                           ActionLogDataAccess.data_description(self.datatype, exobj),
                           self._is_handled(exobj, obj),
                           dbsession=da_instance.get_session())

        da_instance.close_session()

        return prot.response_success(obj=id)
Esempio n. 10
0
    def delete(self):
        if self.requires_auth and not admin_auth.check_auth():
            return admin_auth.authenticate()

        json_str = request.form.get('ids')
        ids = python_json.loads(json_str)

        # db session is created and share the session with all other database access module
        da_instance = self.da_class()

        deleted_objs = [da_instance.get_by_id(id) for id in ids]
        deleted_objs = [v for v in deleted_objs if v]
        ex_ids = [v.id for v in deleted_objs if v]
        is_deleted = da_instance.delete_items(ex_ids, print_exception=True)
        if not is_deleted:
            da_instance.close_session()
            return prot.response_fail("fail to delete items")

        if not da_instance.commit():
            return prot.response_fail("fail to delete items")

        callback = getattr(self, 'on_delete_success', None)
        if callback:
            callback(deleted_objs, da_instance.get_session())

        for idx, obj_id in enumerate(ex_ids):
            deletedObj = deleted_objs[idx]
            self.add_actionlog(ActionLogDataAccess.DELETE,
                               da_instance.get_tablename(),
                               obj_id,
                               ActionLogDataAccess.data_description(self.datatype, deletedObj),
                               handled=True,
                               dbsession=da_instance.get_session())

        da_instance.close_session()

        return prot.response_success(ids)
Esempio n. 11
0
    def tetres_workzone_update():
        wz_id = request.form.get('id')
        wz_json = request.form.get('data')

        wzDA = WorkZoneDataAccess()

        exWZObj = wzDA.get_by_id(wz_id)
        if not exWZObj:
            wzDA.close_session()
            return prot.response_invalid_request()

        info = json2wzi(wz_json)
        route2 = route.opposite_route(info.route1)
        cfg2 = info.route1.cfg.clone()
        rc.route_config.reverse(cfg2)
        route2.cfg = cfg2
        info.route2 = route2
        info.route1.name = 'route1 - %s' % info.route1.rnodes[0].corridor.name
        info.route1.desc = ''
        info.route2.name = 'route2 - %s' % info.route2.rnodes[0].corridor.name
        info.route2.desc = ''

        if not isinstance(info.route2, Route):
            wzDA.close_session()
            return prot.response_fail(
                'fail to load_data route configuration file')

        wzgDA = WZGroupDataAccess(session=wzDA.get_session())

        is_updated = wzDA.update(wz_id, info.get_dict())
        if not is_updated or not wzDA.commit():
            wzDA.rollback()
            wzDA.close_session()
            return prot.response_fail('fail to update database (1)')

        is_updated = wzgDA.update_years(exWZObj.wz_group_id)
        if not is_updated or not wzgDA.commit():
            wzgDA.rollback()
            wzgDA.close_session()
            return prot.response_fail('fail to update database (2)')

        updatedWZObj = wzDA.get_by_id(wz_id)

        inserted = _wz_insert_feature(updatedWZObj)
        if not inserted:
            wzDA.close_session()
            return prot.response_fail('fail to update database (3)')

        # commit here
        # if not wzDA.commit():
        #     return prot.response_fail('fail to update database (4)')

        tetres_api.add_actionlog(ActionLogDataAccess.UPDATE,
                                 wzDA.get_tablename(),
                                 wz_id,
                                 ActionLogDataAccess.data_description(
                                     ActionLogDataAccess.DT_WORKZONE,
                                     updatedWZObj),
                                 handled=_should_be_set_as_handled(
                                     exWZObj, updatedWZObj),
                                 dbsession=wzDA.get_session())

        wzDA.close_session()

        return prot.response_success(wz_id)
Esempio n. 12
0
def run():
    # faverolles 1/16/2020 NOTE: Fired when admin client issues
    #   /tetres/adm/syscfg/update
    """
    :rtype:
    """
    logger = getLogger(__name__)

    OPT_NAME = cfg.OPT_NAME_ACTIONLOG_ID_IN_PROCESSING
    da_config = ConfigDataAccess()
    running_config = da_config.insert_or_update(OPT_NAME, '')
    if running_config is False or not da_config.commit():
        logger.warning('cannot update "%s" option' % OPT_NAME)
        return
    running_config_id = running_config.id

    da_actionlog = ActionLogDataAccess()
    action_logs = da_actionlog.list(start_time=None, handled=False)

    handlers = {
        ActionLogDataAccess.DT_TTROUTE: _handler_ttroute,
        ActionLogDataAccess.DT_INCIDENT: _handler_incident,
        ActionLogDataAccess.DT_WORKZONE: _handler_workzone,
        ActionLogDataAccess.DT_SPECIALEVENT: _handler_specialevent,
        ActionLogDataAccess.DT_SNOWMGMT: _handler_snowmanagement,
        ActionLogDataAccess.DT_SNOWROUTE: _handler_snowroute,
        ActionLogDataAccess.DT_SNOWEVENT: _handler_snowevent,
        ActionLogDataAccess.DT_SYSTEMCONFIG: _handler_systemconfig,
    }

    logger.debug('>>> Doing post-processing of admin actions')

    handled = []

    for action_log in action_logs:

        logger.debug('  - processing for %s ' % action_log)

        key = '%s - %s' % (action_log.target_table, action_log.target_id)

        da_actionlog.update(action_log.id, {'status': ActionLogDataAccess.STATUS_RUNNING,
                                            'status_updated_date': datetime.datetime.now()})

        da_config.update(running_config_id, {'content': action_log.id})
        if not da_config.commit():
            da_config.rollback()
            logger.warning('cannot update "%s" option' % OPT_NAME)

        # skip if the item is already handled (user can modify the same data several times)
        if key in handled and action_log.action_type in [ActionLogDataAccess.INSERT, ActionLogDataAccess.UPDATE]:
            logger.debug('    : skip : already processed')
            da_actionlog.update(action_log.id, {'handled': True,
                                                'handled_date': datetime.datetime.now(),
                                                'status': ActionLogDataAccess.STATUS_DONE,
                                                'status_updated_date': datetime.datetime.now()
                                                })
            da_actionlog.commit()

            da_config.update(running_config_id, {'content': ''})
            da_config.commit()
            continue

        # find insance of data access module
        da = tablefinder.get_da_instance_by_tablename(action_log.target_table)

        if not da:
            da_actionlog.update(action_log.id, {'status': ActionLogDataAccess.STATUS_FAIL,
                                                'reason': 'Database Access module is not found',
                                                'status_updated_date': datetime.datetime.now()})
            da_actionlog.commit()

            da_config.update(running_config_id, {'content': ''})
            da_config.commit()

            logger.warning('    : skip : cannot find database access module (tablename=%s)' % action_log.target_table)
            continue

        # retrieve target item
        item = da.get_data_by_id(action_log.target_id)

        # if item is deleted...
        if not item:
            logger.debug('    : skip : item is not found')
            da_actionlog.update(action_log.id, {'handled': True,
                                                'handled_date': datetime.datetime.now(),
                                                'status': ActionLogDataAccess.STATUS_DONE,
                                                'reason': 'target data is not found',
                                                'status_updated_date': datetime.datetime.now()
                                                })
            da_actionlog.commit()

            da_config.update(running_config_id, {'content': ''})
            da_config.commit()

            continue

        # proceed by data type
        handler = handlers.get(action_log.target_datatype)
        if not handler:
            da_actionlog.update(action_log.id, {'status': ActionLogDataAccess.STATUS_FAIL,
                                                'reason': 'handler for the data not found',
                                                'status_updated_date': datetime.datetime.now()})
            da_actionlog.commit()

            da_config.update(running_config_id, {'content': ''})
            da_config.commit()

            logger.debug('    : skip : handler is not found')
            continue
        try:
            reason = ''
            is_handled = handler(da, item, action_log)
            if isinstance(is_handled, tuple):
                is_handled, reason = is_handled[0], is_handled[1]
        except Exception as ex:
            tb.traceback(ex)
            da_actionlog.update(action_log.id, {'status': ActionLogDataAccess.STATUS_FAIL,
                                                'reason': 'exception occured during processing data',
                                                'status_updated_date': datetime.datetime.now()})
            da_actionlog.commit()

            da_config.update(running_config_id, {'content': ''})
            da_config.commit()

            continue

        if is_handled:
            da_actionlog.update(action_log.id, {'handled': True,
                                                'handled_date': datetime.datetime.now(),
                                                'status': ActionLogDataAccess.STATUS_DONE,
                                                'status_updated_date': datetime.datetime.now()
                                                })
            da_actionlog.commit()
            if key != ActionLogDataAccess.DELETE:
                handled.append(key)
        else:
            da_actionlog.update(action_log.id, {'status': ActionLogDataAccess.STATUS_FAIL,
                                                'reason': reason if reason else 'target data is not handled',
                                                'status_updated_date': datetime.datetime.now()
                                                })
            da_actionlog.commit()

        da_config.update(running_config_id, {'content': ''})
        da_config.commit()

        if not da_actionlog.commit():
            da_actionlog.rollback()
            da_actionlog.close_session()
            logger.debug('  - fail to update %s ' % action_log)
            return

        logger.debug('     : end of processing for %s ' % action_log)

    da_actionlog.close_session()
    da_config.close_session()

    logger.debug('<<< End of post-processing of admin actions')
def put_task_to_actionlog(prev_syscfg):
    """

    :type prev_syscfg: pyticas_tetres.ttypes.SystemConfigInfo
    """
    da_config = ConfigDataAccess()
    syscfg = da_config.get_by_name(cfg.OPT_NAME_SYSCONFIG)
    if not syscfg:
        getLogger(__name__).warning('Cannot find the updated system configuration from `config` table')
        da_config.close_session()
        return

    should_run_actionlog_handler = False
    da_actionlog = ActionLogDataAccess()

    is_data_archive_start_year_extended = False
    is_data_archive_start_year_shrinked = False

    if cfg.DATA_ARCHIVE_START_YEAR != prev_syscfg.data_archive_start_year:
        if cfg.DATA_ARCHIVE_START_YEAR < prev_syscfg.data_archive_start_year:
            is_data_archive_start_year_extended = True
        elif cfg.DATA_ARCHIVE_START_YEAR > prev_syscfg.data_archive_start_year:
            is_data_archive_start_year_shrinked = True

    # cancled already posted action logs
    if is_data_archive_start_year_shrinked or is_data_archive_start_year_extended:
        ex_logs = da_actionlog.search(
            searches=[('target_datatype', ActionLogDataAccess.DT_SYSTEMCONFIG), ('handled', False)],
            op='and',
            cond='match',
            as_model=True)
        for a_log in ex_logs:
            a_log.handled = True
            a_log.handled_date = datetime.datetime.now()
            a_log.status = 'Cancled due to another action'
            a_log.status_updated_date = datetime.datetime.now()

        da_actionlog.commit()

    if is_data_archive_start_year_extended:
        should_run_actionlog_handler = True
        actionlog.add(ActionLogDataAccess.UPDATE,
                      ActionLogDataAccess.DT_SYSTEMCONFIG,
                      Config.__tablename__,
                      syscfg.id,
                      'DATA_ARCHIVE_START_YEAR_EXTENDED: %d -> %d' % (
                          prev_syscfg.data_archive_start_year, cfg.DATA_ARCHIVE_START_YEAR),
                      handled=False)

    elif is_data_archive_start_year_shrinked:
        # add log for re-calculation
        should_run_actionlog_handler = True
        actionlog.add(ActionLogDataAccess.UPDATE,
                      ActionLogDataAccess.DT_SYSTEMCONFIG,
                      Config.__tablename__,
                      syscfg.id,
                      'DATA_ARCHIVE_START_YEAR_SHRINKED: %d -> %d' % (
                          prev_syscfg.data_archive_start_year, cfg.DATA_ARCHIVE_START_YEAR),
                      handled=False)

    if (cfg.INCIDENT_DOWNSTREAM_DISTANCE_LIMIT != prev_syscfg.incident_downstream_distance_limit
            or cfg.INCIDENT_UPSTREAM_DISTANCE_LIMIT != prev_syscfg.incident_upstream_distance_limit):
        should_run_actionlog_handler = True
        actionlog.add(ActionLogDataAccess.UPDATE,
                      ActionLogDataAccess.DT_SYSTEMCONFIG,
                      Config.__tablename__,
                      syscfg.id,
                      ActionLogDataAccess.DT_INCIDENT,
                      handled=False)

    if (cfg.WZ_DOWNSTREAM_DISTANCE_LIMIT != prev_syscfg.workzone_downstream_distance_limit
            or cfg.WZ_UPSTREAM_DISTANCE_LIMIT != prev_syscfg.workzone_upstream_distance_limit):
        should_run_actionlog_handler = True
        actionlog.add(ActionLogDataAccess.UPDATE,
                      ActionLogDataAccess.DT_SYSTEMCONFIG,
                      Config.__tablename__,
                      syscfg.id,
                      ActionLogDataAccess.DT_WORKZONE,
                      handled=False)

    if (cfg.SE_ARRIVAL_WINDOW != prev_syscfg.specialevent_arrival_window
            or cfg.SE_DEPARTURE_WINDOW1 != prev_syscfg.specialevent_departure_window1
            or cfg.SE_DEPARTURE_WINDOW2 != prev_syscfg.specialevent_departure_window2):
        should_run_actionlog_handler = True
        actionlog.add(ActionLogDataAccess.UPDATE,
                      ActionLogDataAccess.DT_SYSTEMCONFIG,
                      Config.__tablename__,
                      syscfg.id,
                      ActionLogDataAccess.DT_SPECIALEVENT,
                      handled=False)

    # restart scheduler
    if (cfg.DAILY_JOB_START_TIME != prev_syscfg.daily_job_start_time
            or cfg.DAILY_JOB_OFFSET_DAYS != prev_syscfg.daily_job_offset_days
            or cfg.WEEKLY_JOB_START_WEEKDAY != prev_syscfg.weekly_job_start_day
            or cfg.WEEKLY_JOB_START_TIME != prev_syscfg.weekly_job_start_time
            or cfg.MONTHLY_JOB_START_DAY != prev_syscfg.monthly_job_start_date
            or cfg.MONTHLY_JOB_START_TIME != prev_syscfg.monthly_job_start_time):
        scheduler.restart()

        if cfg.DAILY_JOB_START_TIME != prev_syscfg.daily_job_start_time:
            actionlog.add(ActionLogDataAccess.UPDATE,
                          ActionLogDataAccess.DT_SYSTEMCONFIG,
                          Config.__tablename__,
                          syscfg.id,
                          'DAILY_JOB_START_TIME is updated : %s -> %s' % (
                          prev_syscfg.daily_job_start_time, cfg.DAILY_JOB_START_TIME),
                          handled=True)

        if cfg.DAILY_JOB_OFFSET_DAYS != prev_syscfg.daily_job_offset_days:
            actionlog.add(ActionLogDataAccess.UPDATE,
                          ActionLogDataAccess.DT_SYSTEMCONFIG,
                          Config.__tablename__,
                          syscfg.id,
                          'DAILY_JOB_OFFSET_DAYS is updated: %s -> %s' % (
                          prev_syscfg.daily_job_offset_days, cfg.DAILY_JOB_OFFSET_DAYS),
                          handled=True)

        if cfg.WEEKLY_JOB_START_WEEKDAY != prev_syscfg.weekly_job_start_day:
            actionlog.add(ActionLogDataAccess.UPDATE,
                          ActionLogDataAccess.DT_SYSTEMCONFIG,
                          Config.__tablename__,
                          syscfg.id,
                          'WEEKLY_JOB_START_WEEKDAY is updated: %s -> %s' % (
                          prev_syscfg.weekly_job_start_day, cfg.WEEKLY_JOB_START_WEEKDAY),
                          handled=True)

        if cfg.WEEKLY_JOB_START_TIME != prev_syscfg.weekly_job_start_time:
            actionlog.add(ActionLogDataAccess.UPDATE,
                          ActionLogDataAccess.DT_SYSTEMCONFIG,
                          Config.__tablename__,
                          syscfg.id,
                          'WEEKLY_JOB_START_TIME is updated: %s -> %s' % (
                          prev_syscfg.weekly_job_start_time, cfg.WEEKLY_JOB_START_TIME),
                          handled=True)

        if cfg.MONTHLY_JOB_START_DAY != prev_syscfg.monthly_job_start_date:
            actionlog.add(ActionLogDataAccess.UPDATE,
                          ActionLogDataAccess.DT_SYSTEMCONFIG,
                          Config.__tablename__,
                          syscfg.id,
                          'MONTHLY_JOB_START_DAY is updated: %s -> %s' % (
                          prev_syscfg.monthly_job_start_date, cfg.MONTHLY_JOB_START_DAY),
                          handled=True)

        if cfg.MONTHLY_JOB_START_TIME != prev_syscfg.monthly_job_start_time:
            actionlog.add(ActionLogDataAccess.UPDATE,
                          ActionLogDataAccess.DT_SYSTEMCONFIG,
                          Config.__tablename__,
                          syscfg.id,
                          'MONTHLY_JOB_START_TIME is updated: %s -> %s' % (
                          prev_syscfg.monthly_job_start_time, cfg.MONTHLY_JOB_START_TIME),
                          handled=True)

    if not should_run_actionlog_handler:
        unhandled = da_actionlog.list(target_datatypes=[ActionLogDataAccess.DT_SYSTEMCONFIG], handled=False)
        if unhandled:
            should_run_actionlog_handler = True

    da_actionlog.close_session()
    da_config.close_session()

    # add actionlog handler to the task queue in the worker process
    if should_run_actionlog_handler:
        getLogger(__name__).debug('System configurations are updated and the handler process is posted')
        worker.add_task(actionlog_proc.run)
    else:
        getLogger(__name__).debug('System configurations are updated and the handler process is NOT posted')
Esempio n. 14
0
        print("Missing traffic files for the given time range.")
        print(
            "Please check if you have put the traffic files in the proper directory structure."
        )
        print("Failed to calculate moe for the given time range.")
        exit(1)

    print('')
    print('!! Data during the given time period will be deleted.')
    res = input('!! Do you want to proceed data loading process ? [N/y] : ')
    if res.lower() not in ['y', 'ye', 'yes']:
        print('\nAported!')
        exit(1)

    filename = '_initial_data_maker.log'
    da_actionlog = ActionLogDataAccess()
    action_logs = da_actionlog.list(handled=False,
                                    action_types=["insert"],
                                    target_datatypes=["tt_route"])
    route_ids = [int(action_log.target_id) for action_log in action_logs]
    with open(filename, 'w') as f:
        f.write('started at ' +
                datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + '\n')

    from pyticas_tetres.systasks import initial_data_maker

    try:
        if route_ids:
            initial_data_maker._calculate_tt_and_categorize(
                sdate,
                edate,