async def fetch_topic_by_name( query_name: Optional[str] = None, tenant_id: Optional[TenantId] = None, principal_service: PrincipalService = Depends(get_any_admin_principal) ) -> List[Topic]: if is_blank(query_name): raise_400('Name criteria is required.') if principal_service.is_tenant_admin(): if is_not_blank( tenant_id) and tenant_id != principal_service.get_tenant_id(): raise_400('Tenant id is incorrect.') else: tenant_id = principal_service.get_tenant_id() if principal_service.is_super_admin() and is_blank(tenant_id): raise_400('Tenant id is required.') topic_service = get_topic_service(principal_service) def action() -> List[Topic]: topic_index_service = get_topic_index_service(topic_service) factor_index_list = topic_index_service.find(query_name, tenant_id) if len(factor_index_list) == 0: return [] topic_ids: List[TopicId] = [] for factor_index in factor_index_list: if factor_index.topicId not in topic_ids: topic_ids.append(factor_index.topicId) return topic_service.find_by_ids(topic_ids, tenant_id) return trans_readonly(topic_service, action)
def find_enum( import_items: ImportEnumItems) -> Tuple[Enum, List[EnumItem]]: enum_id = import_items.enumId name = import_items.name if is_blank(enum_id) and is_blank(name): raise_400( 'At least one of enumeration id and name must be provided.') enumeration = None if is_not_blank(enum_id): # both provided, find by id enumeration = enum_service.find_by_id(enum_id) elif is_not_blank(name): enumeration = enum_service.find_by_name( name, principal_service.get_tenant_id()) if enumeration is not None: # found if enumeration.tenantId != principal_service.get_tenant_id(): raise_404(f'Enumeration[id={enum_id}, name={name}] not found.') elif is_not_blank(name): # not found, but name is given, create one enumeration = Enum(enumId=enum_id, name=name, tenantId=principal_service.get_tenant_id(), items=[]) enum_service.create(enumeration) else: raise_404(f'Enumeration[id={enum_id}, name={name}] not found.') return enumeration, import_items.items
def action(topic_id: TopicId, factor_id: FactorId) -> ParameterCondition: if is_blank(bucket_id): raise IndicatorKernelException( 'Bucket of achievement indicator not declared.') if is_blank(bucket_segment_name): raise IndicatorKernelException( 'Bucket segment name of achievement indicator not declared.' ) bucket = ask_bucket(bucket_id, self.principalService) segment = ArrayHelper( bucket.segments).find(lambda x: x.name == bucket_segment_name) if segment is None: raise IndicatorKernelException( f'Bucket segment[name={bucket_segment_name}] not found.') if isinstance(bucket, NumericSegmentsHolder): include = bucket.include return self.fake_numeric_segment_to_condition( include, segment)(topic_id, factor_id) elif isinstance(bucket, CategorySegmentsHolder): return self.fake_category_segment_to_condition( segment, bucket.segments)(topic_id, factor_id) else: bucket_data = bucket.to_dict() if bucket_data.get('include') is not None: return self.fake_numeric_segment_to_condition( bucket_data.get('include'), segment)(topic_id, factor_id) else: # noinspection PyTypeChecker return self.fake_category_segment_to_condition( segment, bucket.segments)(topic_id, factor_id)
def fake_tenant_admin( tenant_id: TenantId, user_id: Optional[UserId] = None, user_name: Optional[str] = None) -> PrincipalService: return PrincipalService(User( userId='1' if is_blank(user_id) else user_id, userName='******' if is_blank(user_name) else user_name, tenantId='-1' if is_blank(tenant_id) else tenant_id, role=UserRole.ADMIN ))
def handle_scheduler( scheduler: TopicSnapshotScheduler, source_topic: Topic, task_topic: Topic, topic_service: TopicService, scheduler_service: TopicSnapshotSchedulerService, principal_service: PrincipalService) -> Optional[Callable[[], None]]: target_topic_id = scheduler.targetTopicId if is_blank(target_topic_id): # incorrect scheduler, ignored return None should_save_scheduler = False target_topic: Optional[Topic] = topic_service.find_by_id(target_topic_id) if target_topic is None: # create target topic when not found target_topic = create_snapshot_target_topic(scheduler, source_topic) target_topic, target_topic_tail = ask_save_topic_action( topic_service, principal_service)(target_topic) scheduler.targetTopicId = target_topic.topicId should_save_scheduler = True else: # rebuild target topic target_topic = rebuild_snapshot_target_topic(target_topic, source_topic) target_topic, target_topic_tail = ask_save_topic_action( topic_service, principal_service)(target_topic) pipeline_service = get_pipeline_service(topic_service) pipeline_id = scheduler.pipelineId if is_blank(pipeline_id): # create pipeline not declared pipeline = create_snapshot_pipeline(task_topic, target_topic) pipeline = ask_save_pipeline_action(pipeline_service, principal_service)(pipeline) scheduler.pipelineId = pipeline.pipelineId should_save_scheduler = True else: pipeline: Optional[Pipeline] = pipeline_service.find_by_id(pipeline_id) if pipeline is None: # create pipeline when not found pipeline = create_snapshot_pipeline(task_topic, target_topic) pipeline = ask_save_pipeline_action(pipeline_service, principal_service)(pipeline) scheduler.pipelineId = pipeline.pipelineId should_save_scheduler = True else: # rebuild pipeline pipeline = rebuild_snapshot_pipeline(pipeline, task_topic, target_topic) ask_save_pipeline_action(pipeline_service, principal_service)(pipeline) if should_save_scheduler: scheduler_service.update(scheduler) return target_topic_tail
def create_particular(self, params: Dict[str, Any]) -> Encryptor: key = params.get('key') if is_blank(key) or len(key) != 32: raise DataKernelException( f'Parameter key[{key}] should be 32 digits.') iv = params.get('iv') if is_blank(iv) or len(iv) != 16: raise DataKernelException( f'Parameter iv[{iv}] should be 16 digits.') return AESEncryptor(key, iv)
async def fetch_topic_data( topic_name: Optional[str] = None, topic_id: Optional[TopicId] = None, tenant_id: Optional[TenantId] = None, criteria: TopicPageable = None, principal_service: PrincipalService = Depends(get_any_admin_principal) ) -> DataPage: if is_blank(topic_name) and is_blank(topic_id): raise_400('Topic id or name is required.') tenant_id = validate_tenant_id(tenant_id, principal_service) principal_service = fake_to_tenant(principal_service, tenant_id) if is_not_blank(topic_id): schema = get_topic_service(principal_service).find_schema_by_id( topic_id, tenant_id) else: schema = get_topic_schema(topic_name, tenant_id, principal_service) storage = ask_topic_storage(schema, principal_service) service = ask_topic_data_service(schema, storage, principal_service) pageable = Pageable( pageNumber=1 if criteria is None or criteria.pageNumber is None or criteria.pageNumber <= 0 else criteria.pageNumber, pageSize=100 if criteria is None or criteria.pageSize is None or criteria.pageSize <= 0 else criteria.pageSize) if criteria is None or is_blank( criteria.jointType) or criteria.filters is None: page = service.page_and_unwrap(None, pageable) else: parsed_criteria = parse_condition_for_storage(criteria, [schema], principal_service, False) empty_variables = PipelineVariables(None, None, None) page = service.page_and_unwrap( [parsed_criteria.run(empty_variables, principal_service)], pageable) def id_to_str(row: Dict[str, Any]) -> Dict[str, Any]: if TopicDataColumnNames.ID.value in row: copy = row.copy() copy[TopicDataColumnNames.ID.value] = str( row[TopicDataColumnNames.ID.value]) return copy else: return row page.data = ArrayHelper(page.data).map(id_to_str).to_list() return page
async def update_connected_space_name_by_id( connect_id: Optional[ConnectedSpaceId], name: Optional[str], principal_service: PrincipalService = Depends(get_console_principal) ) -> None: """ rename connected space will not increase the optimistic lock version """ if is_blank(connect_id): raise_400('Connected space id is required.') connected_space_service = get_connected_space_service(principal_service) # noinspection DuplicatedCode def action() -> None: existing_one = connected_space_service.find_tenant_and_user(connect_id) if existing_one is None: raise_404() existing_tenant_id, existing_user_id = existing_one if existing_tenant_id != principal_service.get_tenant_id(): raise_403() elif existing_user_id != principal_service.get_user_id(): raise_403() # noinspection PyTypeChecker connected_space_service.update_name(connect_id, name, principal_service.get_user_id(), principal_service.get_tenant_id()) trans(connected_space_service, action)
async def delete_connected_space_by_id( connect_id: Optional[ConnectedSpaceId], principal_service: PrincipalService = Depends(get_console_principal) ) -> None: if is_blank(connect_id): raise_400('Connected space id is required.') connected_space_service = get_connected_space_service(principal_service) # noinspection DuplicatedCode def action() -> None: # noinspection PyTypeChecker existing_connected_space: Optional[ ConnectedSpace] = connected_space_service.find_by_id(connect_id) if existing_connected_space is None: raise_404() if existing_connected_space.tenantId != principal_service.get_tenant_id( ): raise_403() if not principal_service.is_tenant_admin( ) and existing_connected_space.userId != principal_service.get_user_id( ): raise_403() connected_space_service.delete(connect_id) subject_service: SubjectService = get_subject_service( connected_space_service) subject_service.delete_by_connect_id(connect_id) report_service: ReportService = get_report_service( connected_space_service) report_service.delete_by_connect_id(connect_id) trans(connected_space_service, action)
def try_to_import_monitor_rule( monitor_rule: MonitorRule, monitor_rule_service: MonitorRuleService, do_update: bool ) -> MonitorRuleImportDataResult: if is_blank(monitor_rule.ruleId): monitor_rule_service.redress_storable_id(monitor_rule) monitor_rule_service.create(monitor_rule) else: existing_monitor_rule: Optional[MonitorRule] = monitor_rule_service.find_by_id(monitor_rule.ruleId) if existing_monitor_rule is None: monitor_rule_service.create(monitor_rule) elif do_update: if not for_same_location(monitor_rule, existing_monitor_rule): # has same id, but not for same location existing_monitor_rule: Optional[MonitorRule] = monitor_rule_service.find_by_location( monitor_rule.code, monitor_rule.topicId, monitor_rule.factorId, existing_monitor_rule.tenantId) if existing_monitor_rule is None: # same location rule not found, redress the rule id and create monitor_rule_service.create(monitor_rule) else: # use the original rule id and update monitor_rule.ruleId = existing_monitor_rule.ruleId monitor_rule_service.update(monitor_rule) else: monitor_rule_service.update(monitor_rule) else: return MonitorRuleImportDataResult( monitorRuleId=monitor_rule.ruleId, name=monitor_rule.code, passed=False, reason='Monitor rule already exists.') return MonitorRuleImportDataResult( monitorRuleId=monitor_rule.ruleId, name=monitor_rule.code, passed=True)
async def init_tenant( tenant_id: Optional[TenantId], principal_service: PrincipalService = Depends(get_any_admin_principal) ) -> None: if is_blank(tenant_id): if principal_service.is_super_admin(): raise_400('Tenant id is required.') elif principal_service.is_tenant_admin(): tenant_id = principal_service.get_tenant_id() else: if principal_service.get_tenant_id( ) != tenant_id and principal_service.is_tenant_admin(): raise_400(f'Tenant[{tenant_id}] does not match principal.') elif principal_service.is_super_admin(): tenant: Optional[Tenant] = get_tenant_service( principal_service).find_by_id(tenant_id) if tenant is None: raise_404(f'Tenant[id={tenant_id}] not found.') meta_tenant_service = get_meta_tenant_service(principal_service) def action() -> None: topics = ask_pipeline_monitor_topics() create_topics_and_pipelines( topics, lambda source_topics: ask_pipeline_monitor_pipelines( source_topics), tenant_id, meta_tenant_service, principal_service) topics = ask_dqc_topics() create_topics_and_pipelines( topics, lambda source_topics: ask_dqc_pipelines(source_topics), tenant_id, meta_tenant_service, principal_service) trans(meta_tenant_service, action)
async def patch_topic_data( topic_name: Optional[str] = None, patch_type: Optional[PipelineTriggerType] = PipelineTriggerType.MERGE, tenant_id: Optional[TenantId] = None, data=Body(...), principal_service: PrincipalService = Depends(get_any_admin_principal) ) -> None: """ data patch will not trigger any pipeline """ if is_blank(topic_name): raise_400('Topic name is required.') if patch_type is None: patch_type = PipelineTriggerType.MERGE if patch_type == PipelineTriggerType.INSERT_OR_MERGE: raise_400('Patch type can be one of insert/merge/delete.') tenant_id = validate_tenant_id(tenant_id, principal_service) principal_service = fake_to_tenant(principal_service, tenant_id) schema = get_topic_schema(topic_name, tenant_id, principal_service) storage = ask_topic_storage(schema, principal_service) service = ask_topic_data_service(schema, storage, principal_service) if patch_type == PipelineTriggerType.INSERT: service.trigger_by_insert(data) elif patch_type == PipelineTriggerType.MERGE: service.trigger_by_merge(data) elif patch_type == PipelineTriggerType.DELETE: service.trigger_by_delete(data) else: raise DataKernelException( f'Patch type [{patch_type}] is not supported.')
def action(user: User) -> User: # crypt password pwd = user.password if is_not_blank(pwd): user.password = crypt_password(pwd) if user.isActive is None: user.isActive = True if user_service.is_storable_id_faked(user.userId): if principal_service.is_super_admin() and check_user_group: if user.groupIds is not None and len(user.groupIds) != 0: # for super admin create user, there is no user group allowed raise_400( 'No user group allowed for creating user by super admin.' ) user_service.redress_storable_id(user) user_group_ids = ArrayHelper(user.groupIds).distinct().to_list() user.groupIds = user_group_ids # noinspection PyTypeChecker user: User = user_service.create(user) # synchronize user to user groups sync_user_to_groups(user_service, user.userId, user_group_ids, user.tenantId) else: # noinspection PyTypeChecker existing_user: Optional[User] = user_service.find_by_id( user.userId) if existing_user is not None: if existing_user.tenantId != user.tenantId: raise_403() elif is_blank(user.password): # keep original password user.password = existing_user.password if principal_service.is_super_admin() and check_user_group: # for super admin update user, simply keep user group user.groupIds = existing_user.groupIds else: user_group_ids = ArrayHelper( user.groupIds).distinct().to_list() user.groupIds = user_group_ids user_group_ids = user.groupIds # noinspection PyTypeChecker user: User = user_service.update(user) if principal_service.is_tenant_admin(): # remove user from user groups, in case user groups are removed removed_user_group_ids = ArrayHelper( existing_user.groupIds).difference( user_group_ids).to_list() remove_user_from_groups(user_service, user.userId, removed_user_group_ids, user.tenantId) # synchronize user to user groups sync_user_to_groups(user_service, user.userId, user_group_ids, user.tenantId) # remove password clear_pwd(user) return user
def action() -> QueryTenantDataPage: if is_blank(query_name): # noinspection PyTypeChecker return tenant_service.find_by_text(None, pageable) else: # noinspection PyTypeChecker return tenant_service.find_by_text(query_name, pageable)
async def update_pipeline_enabled_by_id( pipeline_id: Optional[PipelineId], enabled: Optional[bool], principal_service: PrincipalService = Depends(get_admin_principal) ) -> None: """ enable/disable pipeline will not increase the optimistic lock version """ if is_blank(pipeline_id): raise_400('Pipeline id is required.') if enabled is None: raise_400('Enabled is required.') pipeline_service = get_pipeline_service(principal_service) def action() -> None: existing_tenant_id: Optional[ TenantId] = pipeline_service.find_tenant_id(pipeline_id) if existing_tenant_id is None: raise_404() elif existing_tenant_id != principal_service.get_tenant_id(): raise_403() # noinspection PyTypeChecker pipeline: Pipeline = pipeline_service.update_enablement( pipeline_id, enabled, principal_service.get_tenant_id()) post_update_pipeline_enablement(pipeline, pipeline_service) trans(pipeline_service, action)
def get_truncation_count(self) -> Optional[int]: chart = self.get_report().chart if chart is not None and chart.settings is not None and chart.settings.truncation is not None: if is_blank(chart.settings.truncation.count): return None return chart.settings.truncation.count return None
def sync_topic_structure_storage( topic: Topic, original_topic: Optional[Topic], principal_service: PrincipalService) -> None: if ask_trino_enabled(): sync_for_trino(topic, original_topic, principal_service) if not ask_sync_topic_to_storage(): return if original_topic is None: create_topic_structure(topic, principal_service) elif ask_replace_topic_to_storage(): drop_topic_structure(original_topic, principal_service) create_topic_structure(topic, principal_service) elif topic.dataSourceId != original_topic.dataSourceId: # not in same data source, leave original as is create_topic_structure(topic, principal_service) elif is_blank(original_topic.dataSourceId): # no data source declared in original, typically no storage entity existing # simply do create for new one create_topic_structure(topic, principal_service) elif beautify_name(topic) != beautify_name(original_topic): # name changed, leave original as is create_topic_structure(topic, principal_service) else: # with same name, same data source, update it update_topic_structure(topic, original_topic, principal_service)
def ask_topic_storage( topic_or_schema: Union[Topic, TopicSchema], principal_service: PrincipalService) -> TopicDataStorageSPI: topic = topic_or_schema if isinstance( topic_or_schema, Topic) else topic_or_schema.get_topic() data_source_id = topic.dataSourceId if is_blank(data_source_id): raise DataKernelException( f'Data source is not defined for topic[id={topic.topicId}, name={topic.name}]' ) build = CacheService.data_source().get_builder(data_source_id) if build is not None: return build() data_source = get_data_source_service(principal_service).find_by_id( data_source_id) if data_source is None: raise DataKernelException( f'Data source not declared for topic' f'[id={topic.topicId}, name={topic.name}, dataSourceId={data_source_id}]' ) build = build_topic_data_storage(data_source) CacheService.data_source().put_builder(data_source_id, build) return build()
async def load_user_by_id( user_id: Optional[UserId] = None, principal_service: PrincipalService = Depends(get_any_principal) ) -> User: if is_blank(user_id): raise_400('User id is required.') if not principal_service.is_admin(): # console user cannot visit other users if user_id != principal_service.get_user_id(): raise_403() user_service = get_user_service(principal_service) def action() -> User: # noinspection PyTypeChecker user: User = user_service.find_by_id(user_id) if user is None: raise_404() # check tenant id if not principal_service.is_super_admin(): # tenant id must match current principal's, except current is super admin if user.tenantId != principal_service.get_tenant_id(): raise_404() # remove password clear_pwd(user) return user return trans_readonly(user_service, action)
async def delete_user_by_id_by_super_admin( user_id: Optional[UserId] = None, principal_service: PrincipalService = Depends(get_super_admin_principal) ) -> User: if not ask_tuple_delete_enabled(): raise_404('Not Found') if is_blank(user_id): raise_400('User id is required.') user_service = get_user_service(principal_service) def action() -> User: # noinspection PyTypeChecker user: User = user_service.delete(user_id) if user is None: raise_404() user_group_ids = user.groupIds if user_group_ids is not None and len(user_group_ids) != 0: user_group_ids = ArrayHelper(user_group_ids).filter( lambda x: is_not_blank(x)).to_list() remove_user_from_groups(user_service, user.userId, user_group_ids, user.tenantId) return user return trans(user_service, action)
async def update_dashboard_name_by_id( dashboard_id: Optional[DashboardId], name: Optional[str], principal_service: PrincipalService = Depends(get_console_principal) ) -> None: """ rename dashboard will not increase the optimistic lock version """ if is_blank(dashboard_id): raise_400('Dashboard id is required.') dashboard_service = get_dashboard_service(principal_service) # noinspection DuplicatedCode def action() -> None: existing_one = dashboard_service.find_tenant_and_user(dashboard_id) if existing_one is None: raise_404() existing_tenant_id, existing_user_id = existing_one if existing_tenant_id != principal_service.get_tenant_id(): raise_403() elif existing_user_id != principal_service.get_user_id(): raise_403() # noinspection PyTypeChecker dashboard_service.update_name( dashboard_id, name, principal_service.get_user_id(), principal_service.get_tenant_id()) trans(dashboard_service, action)
def validate_user(a_tuple: UserBasedTuple, user_service: UserService, principal_service: PrincipalService) -> None: if not principal_service.is_admin(): raise_403() if is_blank(a_tuple.userId): if principal_service.is_super_admin(): raise_400('User id is required.') elif principal_service.is_tenant_admin(): a_tuple.userId = principal_service.get_user_id() else: raise_403() else: if a_tuple.userId == principal_service.get_user_id(): if principal_service.is_super_admin(): raise_400(f'Incorrect user id[{a_tuple.userId}].') else: user: Optional[User] = user_service.find_by_id(a_tuple.userId) if user is None: raise_400('User id is required.') if principal_service.is_super_admin(): if user.tenantId == principal_service.get_tenant_id(): raise_400(f'Incorrect user id[{a_tuple.userId}].') elif principal_service.is_tenant_admin(): if user.tenantId != principal_service.get_tenant_id(): raise_400(f'Incorrect user id[{a_tuple.userId}].')
async def load_enum_by_id( enum_id: Optional[EnumId] = None, principal_service: PrincipalService = Depends(get_console_principal) ) -> Enum: if is_blank(enum_id): raise_400('Enumeration id is required.') enum_service = get_enum_service(principal_service) def action() -> Enum: # noinspection PyTypeChecker an_enum: Enum = enum_service.find_by_id(enum_id) if an_enum is None: raise_404() # tenant id must match current principal's if an_enum.tenantId != principal_service.get_tenant_id(): raise_404() enum_item_service = get_enum_item_service(enum_service) enum_list: List[EnumItem] = enum_item_service.find_by_enum_id(enum_id) if ArrayHelper(enum_list).some( lambda x: x.tenantId != principal_service.get_tenant_id()): raise_500( None, f'Items of enumeration[enumId={an_enum.enumId}] has incorrect data, ' 'check and correct it at meta storage manually.') if enum_list is None: an_enum.items = [] else: an_enum.items = enum_list return an_enum return trans_readonly(enum_service, action)
async def delete_subject_by_id( subject_id: Optional[SubjectId], principal_service: PrincipalService = Depends(get_console_principal) ) -> None: if is_blank(subject_id): raise_400('Subject id is required.') subject_service = get_subject_service(principal_service) # noinspection DuplicatedCode def action() -> None: # noinspection PyTypeChecker existing_subject: Optional[Subject] = subject_service.find_by_id( subject_id) if existing_subject is None: raise_404() if existing_subject.tenantId != principal_service.get_tenant_id(): raise_403() if not principal_service.is_tenant_admin( ) and existing_subject.userId != principal_service.get_user_id(): raise_403() subject_service.delete(subject_id) report_service: ReportService = get_report_service(subject_service) report_service.delete_by_subject_id(subject_id) trans(subject_service, action)
async def fetch_topic_data_count( topic_id: Optional[TopicId] = None, tenant_id: Optional[TenantId] = None, criteria: Optional[ParameterJoint] = None, principal_service: PrincipalService = Depends(get_any_admin_principal) ) -> List[str]: if is_blank(topic_id): raise_400('Topic id is required.') tenant_id = validate_tenant_id(tenant_id, principal_service) principal_service = fake_to_tenant(principal_service, tenant_id) schema = get_topic_service(principal_service).find_schema_by_id( topic_id, tenant_id) storage = ask_topic_storage(schema, principal_service) service = ask_topic_data_service(schema, storage, principal_service) if criteria is None: rows = service.find_distinct_values(None, [TopicDataColumnNames.ID.value], False) else: parsed_criteria = parse_condition_for_storage(criteria, [schema], principal_service, False) empty_variables = PipelineVariables(None, None, None) rows = service.find_distinct_values( [parsed_criteria.run(empty_variables, principal_service)], [TopicDataColumnNames.ID.value], False) return ArrayHelper(rows).map( lambda x: str(x.get(TopicDataColumnNames.ID.value))).to_list()
def try_to_import_connected_space( connected_space: ConnectedSpaceWithSubjects, connected_space_service: ConnectedSpaceService, do_update: bool ) -> ConnectedSpaceImportDataResult: if is_blank(connected_space.connectId): connected_space_service.redress_storable_id(connected_space) connected_space_service.create(connected_space) else: existing_connected_space: Optional[ConnectedSpace] = \ connected_space_service.find_by_id(connected_space.connectId) if existing_connected_space is None: connected_space_service.create(connected_space) elif do_update: connected_space_service.update(connected_space) else: return ConnectedSpaceImportDataResult( connectId=connected_space.connectId, name=connected_space.name, passed=False, reason='Connected space already exists.') def set_connect_id(subject_or_report: Union[SubjectWithReports, Report], connect_id: ConnectedSpaceId) -> None: subject_or_report.connectId = connect_id ArrayHelper(connected_space.subjects) \ .each(lambda x: set_connect_id(x, connected_space.connectId)) \ .map(lambda x: x.reports) \ .flatten() \ .filter(lambda x: x is not None) \ .each(lambda x: set_connect_id(x, connected_space.connectId)) return ConnectedSpaceImportDataResult(connectId=connected_space.connectId, name=connected_space.name, passed=True)
def action(connected_space: ConnectedSpace) -> ConnectedSpace: space_id = connected_space.spaceId if is_blank(space_id): raise_400('Space id is required.') space_service = get_space_service(connected_space_service) space: Optional[Space] = space_service.find_by_id(space_id) if space is None: raise_400('Incorrect space id.') if space.tenantId != principal_service.get_tenant_id(): raise_403() connected_space.userId = principal_service.get_user_id() connected_space.tenantId = principal_service.get_tenant_id() connected_space.lastVisitTime = get_current_time_in_seconds() if connected_space_service.is_storable_id_faked( connected_space.connectId): connected_space_service.redress_storable_id(connected_space) # noinspection PyTypeChecker connected_space: ConnectedSpace = connected_space_service.create( connected_space) else: # noinspection PyTypeChecker existing_connected_space: Optional[ConnectedSpace] = \ connected_space_service.find_by_id(connected_space.connectId) if existing_connected_space is not None: if existing_connected_space.tenantId != connected_space.tenantId: raise_403() if existing_connected_space.userId != connected_space.userId: raise_403() # noinspection PyTypeChecker connected_space: ConnectedSpace = connected_space_service.update( connected_space) return connected_space
async def delete_catalog_by_id_by_super_admin( catalog_id: Optional[CatalogId] = None, principal_service: PrincipalService = Depends(get_any_admin_principal) ) -> Catalog: if is_blank(catalog_id): raise_400('Catalog id is required.') catalog_service = get_catalog_service(principal_service) def action() -> Catalog: if principal_service.is_super_admin(): # noinspection PyTypeChecker catalog: Optional[Catalog] = catalog_service.delete(catalog_id) else: existing_catalog = catalog_service.find_by_id(catalog_id) if existing_catalog is None: catalog = existing_catalog elif existing_catalog.tenantId != principal_service.get_tenant_id( ): catalog = None else: catalog = catalog_service.delete(catalog_id) if catalog is None: raise_404() return catalog return trans(catalog_service, action)
async def find_updated_topics( lastModified: LastModified, principal_service: PrincipalService = Depends(get_admin_principal) ) -> List[Topic]: if lastModified is None or is_blank(lastModified.at): return [] parsed, last_modified_at = is_date(lastModified.at, ask_all_date_formats()) if not parsed: return [] if not isinstance(last_modified_at, datetime): last_modified_at = datetime(year=last_modified_at.year, month=last_modified_at.month, day=last_modified_at.day, hour=0, minute=0, second=0, microsecond=0, tzinfo=None) topic_service = get_topic_service(principal_service) def action() -> List[Topic]: return topic_service.find_modified_after( last_modified_at, principal_service.get_tenant_id()) return trans_readonly(topic_service, action)
async def load_achievement_by_id( achievement_id: Optional[AchievementId], principal_service: PrincipalService = Depends(get_console_principal) ) -> Achievement: if is_blank(achievement_id): raise_400('Achievement id is required.') achievement_service = get_achievement_service(principal_service) # noinspection DuplicatedCode def action() -> Achievement: # noinspection PyTypeChecker achievement: Achievement = achievement_service.find_by_id( achievement_id) if achievement is None: raise_404() # user id must match current principal's if achievement.userId != principal_service.get_user_id(): raise_404() # tenant id must match current principal's if achievement.tenantId != principal_service.get_tenant_id(): raise_404() return achievement return trans_readonly(achievement_service, action)