def run_topics_rules( topic_name: Optional[str] = None, frequency: Optional[MonitorRuleStatisticalInterval] = None, process_date: Optional[str] = None, tenant_id: Optional[TenantId] = None, principal_service: PrincipalService = Depends(get_any_admin_principal) ) -> None: principal_service = ask_principal_service(principal_service, tenant_id) if is_not_blank(topic_name): schema = get_topic_service(principal_service).find_schema_by_name( topic_name, principal_service.get_tenant_id()) if schema is None: raise_404(f'Topic[name={topic_name}] not found.') topic_id = schema.get_topic().topicId else: topic_id = None if is_not_blank(process_date): parsed, parsed_date = is_date(process_date, ask_all_date_formats()) if not parsed: raise_400(f'Given process date[{process_date}] cannot be parsed.') process_date = parsed_date else: process_date = get_current_time_in_seconds() process_date = truncate_time(process_date) now = truncate_time(get_current_time_in_seconds()) if process_date.year > now.year: raise_400(f'Given process date[{process_date}] cannot be in future.') if process_date.year == now.year and process_date.month > now.month: raise_400(f'Given process date[{process_date}] cannot be in future.') if process_date.year == now.year and process_date.month == now.month and process_date.day > now.day: raise_400(f'Given process date[{process_date}] cannot be in future.') if frequency == MonitorRuleStatisticalInterval.MONTHLY: # given process date is in this month, run previous month # otherwise, run the given month if process_date.year == now.year and process_date.month == now.month: process_date = to_previous_month(process_date) SelfCleaningMonitorRulesRunner(principal_service) \ .run(process_date, topic_id, MonitorRuleStatisticalInterval.MONTHLY) elif frequency == MonitorRuleStatisticalInterval.WEEKLY: # given process date is in this week, run previous week # otherwise, run the given week if process_date.year == now.year and int( process_date.strftime('%U')) == int(now.strftime('%U')): process_date = to_previous_week(process_date) SelfCleaningMonitorRulesRunner(principal_service) \ .run(process_date, topic_id, MonitorRuleStatisticalInterval.WEEKLY) elif frequency == MonitorRuleStatisticalInterval.DAILY: # given process date is today, run yesterday # otherwise, run the given day if process_date.year == now.year and process_date.month == now.month and process_date.day == now.day: process_date = to_yesterday(process_date) SelfCleaningMonitorRulesRunner(principal_service) \ .run(process_date, topic_id, MonitorRuleStatisticalInterval.DAILY) else: raise_400(f'Given frequency[{frequency}] is not supported.')
def action() -> ConnectedSpaceWithSubjects: space_service = get_space_service(connected_space_service) space: Optional[Space] = space_service.find_by_id(space_id) if space is None: raise_400('Incorrect space id.') if space.tenantId != principal_service.get_tenant_id(): raise_403() template_connected_spaces = find_template_connected_spaces_by_ids( connected_space_service, template_ids, space_id, space.tenantId) connected_space = ConnectedSpace(spaceId=space_id, name=name, isTemplate=False) connected_space_service.redress_storable_id(connected_space) connected_space.userId = principal_service.get_user_id() connected_space.tenantId = principal_service.get_tenant_id() connected_space.lastVisitTime = get_current_time_in_seconds() # noinspection PyTypeChecker connected_space: ConnectedSpace = connected_space_service.create( connected_space) subjects_with_reports = ArrayHelper(template_connected_spaces) \ .map(lambda x: copy_to_connected_space(x, connected_space, connected_space_service)) \ .flatten().to_list() connected_space_with_subjects = ConnectedSpaceWithSubjects( **connected_space.dict()) connected_space_with_subjects.subjects = subjects_with_reports return connected_space_with_subjects
class CompetitiveWorker(Storable): ip: Optional[str] = get_host_ip() processId: Optional[str] = str(getpid()) dataCenterId: int = None workerId: int = None registeredAt: Optional[datetime] = get_current_time_in_seconds() lastBeatAt: datetime = None
def action(connected_space: ConnectedSpace) -> ConnectedSpace: space_id = connected_space.spaceId if is_blank(space_id): raise_400('Space id is required.') space_service = get_space_service(connected_space_service) space: Optional[Space] = space_service.find_by_id(space_id) if space is None: raise_400('Incorrect space id.') if space.tenantId != principal_service.get_tenant_id(): raise_403() connected_space.userId = principal_service.get_user_id() connected_space.tenantId = principal_service.get_tenant_id() connected_space.lastVisitTime = get_current_time_in_seconds() if connected_space_service.is_storable_id_faked( connected_space.connectId): connected_space_service.redress_storable_id(connected_space) # noinspection PyTypeChecker connected_space: ConnectedSpace = connected_space_service.create( connected_space) else: # noinspection PyTypeChecker existing_connected_space: Optional[ConnectedSpace] = \ connected_space_service.find_by_id(connected_space.connectId) if existing_connected_space is not None: if existing_connected_space.tenantId != connected_space.tenantId: raise_403() if existing_connected_space.userId != connected_space.userId: raise_403() # noinspection PyTypeChecker connected_space: ConnectedSpace = connected_space_service.update( connected_space) return connected_space
def action() -> ClientPat: pat = PersonalAccessToken(token=token_urlsafe(16), userId=principal_service.get_user_id(), username=principal_service.get_user_name(), tenantId=principal_service.get_tenant_id(), note=params.note, expired=params.expired, permissions=[], createdAt=get_current_time_in_seconds()) pat_service.create(pat) return ClientPat(patId=pat.patId, token=pat.token, note=pat.note)
def action() -> LastSnapshot: last_snapshot = last_snapshot_service.find_by_user_id( principal_service.get_user_id(), principal_service.get_tenant_id()) if last_snapshot is None: last_snapshot = build_empty_last_snapshot( principal_service.get_tenant_id(), principal_service.get_user_id()) else: last_snapshot.lastVisitTime = get_current_time_in_seconds() last_snapshot_service.update(last_snapshot) return last_snapshot
def action() -> LastSnapshot: last_snapshot.userId = principal_service.get_user_id() last_snapshot.tenantId = principal_service.get_tenant_id() last_snapshot.lastVisitTime = get_current_time_in_seconds() if last_snapshot.favoritePin is None: last_snapshot.favoritePin = False existing_last_snapshot = last_snapshot_service.find_by_user_id( principal_service.get_user_id(), principal_service.get_tenant_id()) if existing_last_snapshot is None: last_snapshot_service.create(last_snapshot) else: last_snapshot_service.update(last_snapshot) return last_snapshot
def copy_to_connected_space( template_connected_space: ConnectedSpace, connected_space: ConnectedSpace, connected_space_service: ConnectedSpaceService ) -> List[SubjectWithReports]: subject_service = get_subject_service(connected_space_service) subjects: List[Subject] = subject_service.find_by_connect_id( template_connected_space.connectId) report_service = get_report_service(connected_space_service) reports: List[Report] = report_service.find_by_connect_id( template_connected_space.connectId) report_map: Dict[SubjectId, List[Report]] = ArrayHelper(reports).group_by( lambda x: x.subjectId) now = get_current_time_in_seconds() def copy_and_create_report(report: Report, subject_id: SubjectId) -> Report: report_service.redress_storable_id(report) report.subjectId = subject_id report.connectId = connected_space.connectId report.userId = connected_space.userId report.tenantId = connected_space.tenantId report.lastVisitTime = now # remove thumbnail report.simulateThumbnail = None # noinspection PyTypeChecker return report_service.create(report) def copy_and_create_subject(subject: Subject) -> SubjectWithReports: my_reports = report_map.get(subject.subjectId) subject_service.redress_storable_id(subject) subject.connectId = connected_space.connectId subject.userId = connected_space.userId subject.tenantId = connected_space.tenantId subject.lastVisitTime = now # noinspection PyTypeChecker subject: Subject = subject_service.create(subject) subject_with_reports = SubjectWithReports(**subject.dict()) if my_reports is not None and len(my_reports) != 0: subject_with_reports.reports = ArrayHelper(my_reports) \ .map(lambda x: copy_and_create_report(x, subject.subjectId)) \ .to_list() else: subject_with_reports.reports = [] return subject_with_reports return ArrayHelper(subjects).map( lambda x: copy_and_create_subject(x)).to_list()
def build_index(self, topic: Topic) -> None: if not ask_engine_index_enabled(): return # noinspection PyTypeChecker factor_index_list: List[FactorIndex] = self.storage.find( EntityFinder(name=FACTOR_INDEX_ENTITY_NAME, shaper=FACTOR_INDEX_ENTITY_SHAPER, criteria=[ EntityCriteriaExpression( left=ColumnNameLiteral(columnName='topic_id'), right=topic.topicId) ])) index_map: Dict[FactorId, FactorIndex] = ArrayHelper(factor_index_list) \ .to_map(lambda x: x.factorId, lambda x: x) now = get_current_time_in_seconds() current_index_list = ArrayHelper(topic.factors).map( lambda x: self.build_factor_index(x, topic, now)).to_list() entity_id_helper = EntityIdHelper(name=FACTOR_INDEX_ENTITY_NAME, shaper=FACTOR_INDEX_ENTITY_SHAPER, idColumnName='factor_index_id') for factor_index in current_index_list: if factor_index.factorId in index_map: old_factor_index = index_map[factor_index.factorId] factor_index.factorIndexId = old_factor_index.factorIndexId factor_index.createdAt = old_factor_index.createdAt self.storage.update_one(factor_index, entity_id_helper) else: self.storage.insert_one(factor_index, entity_id_helper) current_factor_ids = ArrayHelper(current_index_list).map( lambda x: x.factorId).to_list() to_remove_list: List[FactorIndex] = ArrayHelper(list(index_map.values())) \ .filter(lambda x: x.factorId not in current_factor_ids).to_list() if len(to_remove_list) != 0: self.storage.delete( EntityDeleter( name=FACTOR_INDEX_ENTITY_NAME, shaper=FACTOR_INDEX_ENTITY_SHAPER, criteria=[ EntityCriteriaExpression( left=ColumnNameLiteral(columnName='topic_id'), right=topic.topicId), EntityCriteriaExpression( left=ColumnNameLiteral( columnName='factor_index_id'), operator=EntityCriteriaOperator.IN, right=ArrayHelper(to_remove_list).map( lambda x: x.factorIndexId).to_list()) ]))
def create_run_constant_segment( variable: MightAVariable ) -> Callable[[PipelineVariables, PrincipalService], Any]: prefix = variable.text variable_name = variable.variable if variable_name == VariablePredefineFunctions.NEXT_SEQ.value: return create_snowflake_generator(prefix) elif variable_name == VariablePredefineFunctions.NOW.value: return lambda variables, principal_service: get_current_time_in_seconds( ) elif variable_name.startswith(VariablePredefineFunctions.YEAR_DIFF.value): return create_date_diff(prefix, variable_name, VariablePredefineFunctions.YEAR_DIFF) elif variable_name.startswith(VariablePredefineFunctions.MONTH_DIFF.value): return create_date_diff(prefix, variable_name, VariablePredefineFunctions.MONTH_DIFF) elif variable_name.startswith(VariablePredefineFunctions.DAY_DIFF.value): return create_date_diff(prefix, variable_name, VariablePredefineFunctions.DAY_DIFF) elif variable_name.startswith( VariablePredefineFunctions.DATE_FORMAT.value): return create_date_format(prefix, variable_name) elif variable_name.startswith( VariablePredefineFunctions.FROM_PREVIOUS_TRIGGER_DATA.value): if variable_name == VariablePredefineFunctions.FROM_PREVIOUS_TRIGGER_DATA.value: if is_blank(prefix): return create_previous_trigger_data() else: raise DataKernelException( f'Previous trigger data is a dict, cannot prefix by a string[{prefix}].' ) length = len( VariablePredefineFunctions.FROM_PREVIOUS_TRIGGER_DATA.value) if len(variable_name) < length + 2 or variable_name[length:length + 1] != '.': raise DataKernelException( f'Constant[{variable_name}] is not supported.') return create_from_previous_trigger_data(prefix, variable_name[length + 1:]) else: return create_get_from_variables_with_prefix(prefix, variable_name)
async def find_topic_profile( topic_id: Optional[TopicId] = None, date: Optional[str] = None, principal_service: PrincipalService = Depends(get_admin_principal) ) -> Optional[TopicProfile]: if is_blank(topic_id): raise_400('Topic is is required.') parsed, query_date = is_date(date, ask_all_date_formats()) if not parsed: query_date = get_current_time_in_seconds() start_time = query_date.replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=None) end_time = query_date.replace(hour=23, minute=59, second=59, microsecond=999999, tzinfo=None) return TopicProfileService(principal_service).find(topic_id, start_time, end_time)
def action(report: Report) -> Report: report.userId = principal_service.get_user_id() report.tenantId = principal_service.get_tenant_id() report.lastVisitTime = get_current_time_in_seconds() if report.simulating is None: report.simulating = False if report_service.is_storable_id_faked(report.reportId): subject_id = report.subjectId if is_blank(subject_id): raise_400('Subject id is required.') subject_service = get_subject_service(report_service) existing_subject: Optional[Subject] = subject_service.find_by_id(subject_id) if existing_subject is None: raise_400('Incorrect subject id.') elif existing_subject.tenantId != report.tenantId or existing_subject.userId != report.userId: raise_403() else: report.connectId = existing_subject.connectId report_service.redress_storable_id(report) # noinspection PyTypeChecker report: Report = report_service.create(report) else: # noinspection PyTypeChecker existing_report: Optional[Report] = report_service.find_by_id(report.reportId) if existing_report is not None: if existing_report.tenantId != report.tenantId: raise_403() if existing_report.userId != report.userId: raise_403() report.subjectId = existing_report.subjectId report.connectId = existing_report.connectId # noinspection PyTypeChecker report: Report = report_service.update(report) return report
def update_with_lock_by_id( self, data: Dict[str, Any]) -> Tuple[int, EntityCriteria]: """ no storage connect and close, it must be done outside """ data_entity_helper = self.get_data_entity_helper() current_version = data_entity_helper.find_version(data) current_update_time = data_entity_helper.find_update_time(data) # increase version data_entity_helper.assign_version(data, current_version + 1) data_entity_helper.assign_update_time(data, get_current_time_in_seconds()) criteria = self.build_id_criteria(data) updated_count = self.get_storage().update_only( data_entity_helper.get_entity_updater( criteria, data_entity_helper.serialize_to_storage(data))) if updated_count == 0: # rollback version data_entity_helper.assign_version(data, current_version) data_entity_helper.assign_update_time(data, current_update_time) return updated_count, criteria
def insert(self, data: Dict[str, Any]) -> Dict[str, Any]: """ assign id and version, audit columns """ data_entity_helper = self.get_data_entity_helper() data_entity_helper.assign_id_column( data, self.get_snowflake_generator().next_id()) data_entity_helper.assign_version(data, 1) now = get_current_time_in_seconds() data_entity_helper.assign_tenant_id( data, self.get_principal_service().get_tenant_id()) data_entity_helper.assign_insert_time(data, now) data_entity_helper.assign_update_time(data, now) storage = self.get_storage() try: storage.connect() storage.insert_one(data, data_entity_helper.get_entity_helper()) return data finally: storage.close()
def action(dashboard: Dashboard) -> Dashboard: dashboard.userId = principal_service.get_user_id() dashboard.tenantId = principal_service.get_tenant_id() dashboard.lastVisitTime = get_current_time_in_seconds() # noinspection DuplicatedCode if dashboard_service.is_storable_id_faked(dashboard.dashboardId): dashboard_service.redress_storable_id(dashboard) validate_reports(dashboard, dashboard_service, principal_service) # noinspection PyTypeChecker dashboard: Dashboard = dashboard_service.create(dashboard) else: # noinspection PyTypeChecker existing_dashboard: Optional[Dashboard] = dashboard_service.find_by_id(dashboard.dashboardId) if existing_dashboard is not None: if existing_dashboard.tenantId != dashboard.tenantId: raise_403() if existing_dashboard.userId != dashboard.userId: raise_403() validate_reports(dashboard, dashboard_service, principal_service) # noinspection PyTypeChecker dashboard: Dashboard = dashboard_service.update(dashboard) return dashboard
def acquire_alive_worker_ids(self) -> List[int]: self.storage.begin() try: rows = self.storage.find_distinct_values( EntityDistinctValuesFinder( name=SNOWFLAKE_WORKER_ID_TABLE, shaper=COMPETITIVE_WORKER_SHAPER, # workers last beat at in 1 day, means still alive criteria=[ EntityCriteriaExpression( left=ColumnNameLiteral(columnName='data_center_id'), right=self.dataCenterId), EntityCriteriaExpression( left=ColumnNameLiteral(columnName='last_beat_at'), operator=EntityCriteriaOperator.GREATER_THAN, right=(get_current_time_in_seconds() + timedelta(days=-1)) ) ], distinctColumnNames=['worker_id'] ) ) return ArrayHelper(rows).map(lambda x: x.workerId).to_list() finally: self.storage.close()
def update_by_id_and_version( self, data: Dict[str, Any], additional_criteria: Optional[EntityCriteria] = None ) -> Tuple[int, EntityCriteria]: """ version + 1, assign audit columns. rollback version when update nothing given data must contain all columns """ data_entity_helper = self.get_data_entity_helper() criteria = self.build_id_version_criteria(data) if additional_criteria is not None: criteria = [*criteria, *additional_criteria] current_version = data_entity_helper.find_version(data) current_update_time = data_entity_helper.find_update_time(data) # increase version data_entity_helper.assign_version(data, current_version + 1) # set update time data_entity_helper.assign_update_time(data, get_current_time_in_seconds()) storage = self.get_storage() try: storage.connect() updated_count = storage.update_only( updater=data_entity_helper.get_entity_updater( criteria, data_entity_helper.serialize_to_storage(data)), peace_when_zero=True) if updated_count == 0: # rollback version data_entity_helper.assign_version(data, current_version) # rollback update time data_entity_helper.assign_update_time(data, current_update_time) return updated_count, criteria finally: storage.close()
def declare_myself(self, worker: CompetitiveWorker) -> None: self.storage.begin() try: updated_count = self.storage.update_only( EntityUpdater( name=SNOWFLAKE_WORKER_ID_TABLE, shaper=COMPETITIVE_WORKER_SHAPER, criteria=[ EntityCriteriaExpression( left=ColumnNameLiteral(columnName='data_center_id'), right=self.dataCenterId), EntityCriteriaExpression(left=ColumnNameLiteral(columnName='worker_id'), right=worker.workerId) ], update={'last_beat_at': get_current_time_in_seconds()} ) ) if updated_count == 0: raise WorkerDeclarationException( f'Failed to declare worker[dataCenterId={worker.dataCenterId}, workerId={worker.workerId}], ' f'certain data not found in storage.') self.storage.commit_and_close() except Exception as e: self.storage.rollback_and_close() raise e
def action(subject: Subject) -> Subject: subject.userId = principal_service.get_user_id() subject.tenantId = principal_service.get_tenant_id() subject.lastVisitTime = get_current_time_in_seconds() if subject_service.is_storable_id_faked(subject.subjectId): connect_id = subject.connectId if is_blank(connect_id): raise_400('Connected space id is required.') connected_space_service: ConnectedSpaceService = get_connected_space_service( subject_service) existing_connected_space: Optional[ Subject] = connected_space_service.find_by_id(connect_id) if existing_connected_space is None: raise_400('Incorrect connected space id.') elif existing_connected_space.tenantId != subject.tenantId or existing_connected_space.userId != subject.userId: raise_403() subject_service.redress_storable_id(subject) # noinspection PyTypeChecker subject: Subject = subject_service.create(subject) else: # noinspection PyTypeChecker existing_subject: Optional[Subject] = subject_service.find_by_id( subject.subjectId) if existing_subject is not None: if existing_subject.tenantId != subject.tenantId: raise_403() if existing_subject.userId != subject.userId: raise_403() subject.connectId = existing_subject.connectId # noinspection PyTypeChecker subject: Subject = subject_service.update(subject) return subject
def first_declare_myself(self, worker: CompetitiveWorker) -> None: self.storage.begin() try: existing_workers = self.storage.find( EntityFinder( name=SNOWFLAKE_WORKER_ID_TABLE, shaper=COMPETITIVE_WORKER_SHAPER, criteria=[ EntityCriteriaExpression( left=ColumnNameLiteral(columnName='data_center_id'), right=worker.dataCenterId), EntityCriteriaExpression(left=ColumnNameLiteral(columnName='worker_id'), right=worker.workerId) ] ) ) workers_count = len(existing_workers) if workers_count == 0: # worker not exists worker.lastBeatAt = get_current_time_in_seconds() # handle insert failed when other process already did it, may raise exception try: self.storage.insert_one( worker, EntityHelper(name=SNOWFLAKE_WORKER_ID_TABLE, shaper=COMPETITIVE_WORKER_SHAPER) ) except Exception as e: getLogger(__name__).error(e, exc_info=True, stack_info=True) raise WorkerFirstDeclarationException( f'Failed to declare worker[dataCenterId={worker.dataCenterId}, workerId={worker.workerId}], ' f'there might be an existing one in storage.') elif workers_count == 1: # noinspection PyTypeChecker existing_worker: CompetitiveWorker = existing_workers[0] if StorageBasedWorkerIdGenerator.is_abandoned(existing_worker): # worker last beat before 1 day, treat it as abandoned # replace it worker.lastBeatAt = get_current_time_in_seconds() updated_count = self.storage.update_only( EntityUpdater( name=SNOWFLAKE_WORKER_ID_TABLE, shaper=COMPETITIVE_WORKER_SHAPER, criteria=[ EntityCriteriaExpression( left=ColumnNameLiteral(columnName='data_center_id'), right=self.dataCenterId), EntityCriteriaExpression( left=ColumnNameLiteral(columnName='worker_id'), right=worker.workerId), EntityCriteriaExpression( left=ColumnNameLiteral(columnName='last_beat_at'), operator=EntityCriteriaOperator.LESS_THAN_OR_EQUALS, right=(get_current_time_in_seconds() + timedelta(days=-1)) ) ], update={ 'ip': worker.ip, 'process_id': worker.processId, 'registered_at': worker.registeredAt, 'last_beat_at': worker.lastBeatAt } ) ) # handle update failed when other process already did it, may raise exception if updated_count == 0: # no worker had been updated, which means declaration is failed raise WorkerFirstDeclarationException( f'Failed to declare worker[dataCenterId={worker.dataCenterId}, workerId={worker.workerId}], ' f'there might be an alive one or not exists in storage.') else: # the only worker is still alive raise WorkerFirstDeclarationException( f'Worker[dataCenterId={worker.dataCenterId}, workerId={worker.workerId}, lastBeatAt={worker.lastBeatAt}] ' f'still alive.') else: # multiple workers found raise WorkerFirstDeclarationException( f'Multiple workers[dataCenterId={worker.dataCenterId}, workerId={worker.workerId}, count={workers_count}] ' f'determined.') # commit data self.storage.commit_and_close() except Exception as e: # rollback data self.storage.rollback_and_close() # rethrow exception raise e
def now(self) -> datetime: """ get current time in seconds """ return get_current_time_in_seconds()
def create(self, lock: TopicSnapshotJobLock) -> TopicSnapshotJobLock: lock.lockId = self.generate_lock_id() lock.createdAt = get_current_time_in_seconds() self.storage.insert_one(lock, self.get_entity_helper()) return lock
def run() -> None: process_date = to_yesterday(get_current_time_in_seconds()) run_monitor_rules(process_date, MonitorRuleStatisticalInterval.DAILY)
def run() -> None: process_date = to_previous_week(get_current_time_in_seconds()) run_monitor_rules(process_date, MonitorRuleStatisticalInterval.WEEKLY)
def create(self, a_pat: PersonalAccessToken) -> PersonalAccessToken: a_pat.patId = self.generate_pat_id() a_pat.createdAt = get_current_time_in_seconds() self.storage.insert_one(a_pat, self.get_entity_helper()) return a_pat
def create_run_constant_segment( variable: MightAVariable, available_schemas: List[TopicSchema], allow_in_memory_variables: bool ) -> Tuple[Callable[[PipelineVariables, PrincipalService], Any], List[PossibleParameterType]]: prefix = variable.text has_prefix = len(prefix) != 0 variable_name = variable.variable if variable_name == VariablePredefineFunctions.NEXT_SEQ.value: return \ create_snowflake_generator(prefix), \ [PossibleParameterType.STRING if has_prefix else PossibleParameterType.NUMBER] elif variable_name == VariablePredefineFunctions.NOW.value: if has_prefix: value = f'{prefix}{get_current_time_in_seconds().strftime("%Y-%m-%d %H:%M:%S")}' return lambda variables, principal_service: value, [PossibleParameterType.STRING] else: return lambda variables, principal_service: get_current_time_in_seconds(), [PossibleParameterType.DATETIME] elif variable_name.startswith(VariablePredefineFunctions.YEAR_DIFF.value): return \ create_date_diff( prefix, variable_name, VariablePredefineFunctions.YEAR_DIFF, available_schemas, allow_in_memory_variables), \ [PossibleParameterType.STRING if has_prefix else PossibleParameterType.NUMBER] elif variable_name.startswith(VariablePredefineFunctions.MONTH_DIFF.value): return \ create_date_diff( prefix, variable_name, VariablePredefineFunctions.MONTH_DIFF, available_schemas, allow_in_memory_variables), \ [PossibleParameterType.STRING if has_prefix else PossibleParameterType.NUMBER] elif variable_name.startswith(VariablePredefineFunctions.DAY_DIFF.value): return \ create_date_diff( prefix, variable_name, VariablePredefineFunctions.DAY_DIFF, available_schemas, allow_in_memory_variables), \ [PossibleParameterType.STRING if has_prefix else PossibleParameterType.NUMBER] elif variable_name.startswith(VariablePredefineFunctions.DATE_FORMAT.value): return \ create_date_format(prefix, variable_name, available_schemas, allow_in_memory_variables), \ [PossibleParameterType.STRING] elif variable_name.endswith(VariablePredefineFunctions.LENGTH.value): return \ create_char_length(prefix, variable_name, available_schemas, allow_in_memory_variables), \ [PossibleParameterType.STRING if has_prefix else PossibleParameterType.NUMBER] if allow_in_memory_variables: if variable_name.startswith(VariablePredefineFunctions.FROM_PREVIOUS_TRIGGER_DATA.value): if variable_name == VariablePredefineFunctions.FROM_PREVIOUS_TRIGGER_DATA.value: raise DataKernelException( f'Previous trigger data is a dict, cannot be used for storage. ' f'Current constant segment is [{prefix}{{{variable_name}}}].') length = len(VariablePredefineFunctions.FROM_PREVIOUS_TRIGGER_DATA.value) if len(variable_name) < length + 2 or variable_name[length:length + 1] != '.': raise DataKernelException(f'Constant[{variable_name}] is not supported.') return \ create_from_previous_trigger_data(prefix, variable_name[length + 1:]), \ [PossibleParameterType.STRING if has_prefix else PossibleParameterType.ANY_VALUE] else: return \ create_get_from_variables_with_prefix(prefix, variable_name), \ [PossibleParameterType.STRING if has_prefix else PossibleParameterType.ANY_VALUE] else: # recover to original string return create_static_str(f'{prefix}{{{variable_name}}}'), [PossibleParameterType.STRING]
def run() -> None: process_date = to_yesterday(get_current_time_in_seconds()) run_job(scheduler.schedulerId, process_date)
def is_abandoned(worker: CompetitiveWorker) -> bool: return (get_current_time_in_seconds() - worker.lastBeatAt).days >= 1
def run() -> None: process_date = to_previous_month(get_current_time_in_seconds()) run_job(scheduler.schedulerId, process_date)
def test_date(variable_name: str) -> Tuple[bool, Optional[date]]: if variable_name == VariablePredefineFunctions.NOW: return True, get_current_time_in_seconds() else: return is_date(variable_name, ask_all_date_formats())