def find_by_topic_id(self, topic_id: TopicId) -> List[Pipeline]: pipeline_ids = CacheService.pipelines_by_topic().get(topic_id) if pipeline_ids is not None: pipelines = ArrayHelper(pipeline_ids) \ .map(lambda x: self.find_by_id(x)) \ .filter(lambda x: x is not None).to_list() if len(pipelines) != len(pipeline_ids): loaded = ArrayHelper(pipelines).map( lambda x: x.pipelineId).to_list() raise Exception( f'Except pipelines[{pipeline_ids}], but get[{loaded}] only.' ) return pipelines storage_service = PipelineStorageService(ask_meta_storage(), ask_snowflake_generator(), self.principalService) storage_service.begin_transaction() try: # noinspection PyTypeChecker pipelines: List[Pipeline] = storage_service.find_by_topic_id( topic_id, self.principalService.get_tenant_id()) if len(pipelines) == 0: CacheService.pipelines_by_topic().declare_no_pipelines( topic_id) return pipelines return ArrayHelper(pipelines).each( lambda x: CacheService.pipeline().put(x)).to_list() finally: storage_service.close_transaction()
def action(space: Space) -> Space: if space_service.is_storable_id_faked(space.spaceId): space_service.redress_storable_id(space) user_group_ids = ArrayHelper(space.groupIds).distinct().to_list() space.groupIds = user_group_ids topic_ids = ArrayHelper(space.topicIds).distinct().to_list() space.topicIds = topic_ids # check topics validate_topics(space_service, topic_ids, space.tenantId) # noinspection PyTypeChecker space: Space = space_service.create(space) # synchronize space to user groups sync_space_to_groups(space_service, space.spaceId, user_group_ids, space.tenantId) else: # noinspection PyTypeChecker,DuplicatedCode existing_space: Optional[Space] = space_service.find_by_id(space.spaceId) if existing_space is not None: if existing_space.tenantId != space.tenantId: raise_403() user_group_ids = ArrayHelper(space.groupIds).distinct().to_list() space.groupIds = user_group_ids topic_ids = ArrayHelper(space.topicIds).distinct().to_list() space.topicIds = topic_ids # check topics validate_topics(space_service, topic_ids, space.tenantId) # noinspection PyTypeChecker space: Space = space_service.update(space) # remove space from user groups, in case user groups are removed removed_user_group_ids = ArrayHelper(existing_space.groupIds).difference(user_group_ids).to_list() remove_space_from_groups(space_service, space.spaceId, removed_user_group_ids, space.tenantId) # synchronize space to user groups sync_space_to_groups(space_service, space.spaceId, user_group_ids, space.tenantId) return space
def translate_to_array_row(self, row: Dict[str, Any]) -> List[Any]: return [ *ArrayHelper(self.get_report().indicators).map_with_index( lambda x, index: row.get(self.as_indicator_name(x, index))).to_list(), *ArrayHelper(self.get_report().dimensions).map_with_index( lambda x, index: row.get(self.as_dimension_name(x, index))).to_list() ]
def get_result_columns(self) -> List[str]: return [ *ArrayHelper(self.get_report().indicators).map_with_index( lambda x, index: self.as_indicator_name(x, index)).to_list(), *ArrayHelper(self.get_report().dimensions).map_with_index( lambda x, index: self.as_dimension_name(x, index)).to_list() ]
def find_by_measure_method(self, measure_methods: List[Tuple[MeasureMethod, Optional[EnumId]]], tenant_id: Optional[TenantId]) -> List[Bucket]: criteria = [] non_enum_measure_methods = ArrayHelper(measure_methods) \ .filter(lambda x: x[0] != MeasureMethod.ENUM and x[1] is None).map(lambda x: x[0]).to_list() enum_measure_methods = ArrayHelper(measure_methods) \ .filter(lambda x: x[0] == MeasureMethod.ENUM and x[1] is not None).map(lambda x: x[1]).to_list() if len(non_enum_measure_methods) != 0 and len( enum_measure_methods) != 0: criteria.append( EntityCriteriaJoint( conjunction=EntityCriteriaJointConjunction.OR, children=[ EntityCriteriaExpression( left=ColumnNameLiteral(columnName='measure'), operator=EntityCriteriaOperator.IN, right=non_enum_measure_methods), EntityCriteriaJoint( conjunction=EntityCriteriaJointConjunction.AND, children=[ EntityCriteriaExpression( left=ColumnNameLiteral( columnName='measure'), right=MeasureMethod.ENUM), EntityCriteriaExpression( left=ColumnNameLiteral( columnName='enum_id'), operator=EntityCriteriaOperator.IN, right=enum_measure_methods) ]) ])) elif len(non_enum_measure_methods) != 0: criteria.append( EntityCriteriaExpression( left=ColumnNameLiteral(columnName='measure'), operator=EntityCriteriaOperator.IN, right=non_enum_measure_methods)) elif len(enum_measure_methods) != 0: criteria.append( EntityCriteriaJoint( conjunction=EntityCriteriaJointConjunction.AND, children=[ EntityCriteriaExpression( left=ColumnNameLiteral(columnName='measure'), right=MeasureMethod.ENUM), EntityCriteriaExpression( left=ColumnNameLiteral(columnName='enum_id'), operator=EntityCriteriaOperator.IN, right=enum_measure_methods) ])) if tenant_id is not None and len(tenant_id.strip()) != 0: criteria.append( EntityCriteriaExpression( left=ColumnNameLiteral(columnName='tenant_id'), right=tenant_id)) # noinspection PyTypeChecker return self.storage.find(self.get_entity_finder(criteria=criteria))
def load_subjects_and_reports( connected_space: ConnectedSpace, connected_space_service: ConnectedSpaceService, should_update_last_visit_time: bool) -> ConnectedSpaceWithSubjects: connect_id = connected_space.connectId subject_service = get_subject_service(connected_space_service) subjects: List[Subject] = subject_service.find_by_connect_id(connect_id) report_service = get_report_service(connected_space_service) reports: List[Report] = report_service.find_by_connect_id(connect_id) # noinspection DuplicatedCode report_map: Dict[SubjectId, List[Report]] = ArrayHelper(reports).group_by( lambda x: x.subjectId) connected_space_with_subjects = ConnectedSpaceWithSubjects( **connected_space.dict()) connected_space_with_subjects.subjects = ArrayHelper(subjects) \ .map(lambda x: to_subject_with_reports(x, report_map.get(x.subjectId))).to_list() if should_update_last_visit_time: # update last visit time update_last_visit_time(connected_space_service, connected_space.connectId, connected_space) ArrayHelper(subjects) \ .each(lambda x: update_last_visit_time(subject_service, x.subjectId, x)) ArrayHelper(reports) \ .each(lambda x: update_last_visit_time(report_service, x.reportId, x)) return connected_space_with_subjects
def action() -> List[Space]: tenant_id: TenantId = principal_service.get_tenant_id() user_id = principal_service.get_user_id() # noinspection PyTypeChecker user: User = get_user_service(space_service).find_by_id(user_id) user_group_ids = user.groupIds if user_group_ids is None or len(user_group_ids) == 0: return [] user_group_ids = ArrayHelper(user_group_ids).filter(lambda x: is_not_blank(x)).to_list() if len(user_group_ids) == 0: return [] user_group_service = get_user_group_service(space_service) user_groups = user_group_service.find_by_ids(user_group_ids, tenant_id) def gather_space_ids(distinct_space_ids: List[SpaceId], user_group: UserGroup) -> List[SpaceId]: given_space_ids = user_group.spaceIds if given_space_ids is None or len(given_space_ids) == 0: return distinct_space_ids given_space_ids = ArrayHelper(given_space_ids).filter(lambda x: is_not_blank(x)).to_list() for space_id in given_space_ids: if space_id not in distinct_space_ids: distinct_space_ids.append(space_id) return distinct_space_ids space_ids = ArrayHelper(user_groups).reduce(gather_space_ids, []) return space_service.find_by_ids(space_ids, tenant_id)
def action(user: User) -> User: # crypt password pwd = user.password if is_not_blank(pwd): user.password = crypt_password(pwd) if user.isActive is None: user.isActive = True if user_service.is_storable_id_faked(user.userId): if principal_service.is_super_admin() and check_user_group: if user.groupIds is not None and len(user.groupIds) != 0: # for super admin create user, there is no user group allowed raise_400( 'No user group allowed for creating user by super admin.' ) user_service.redress_storable_id(user) user_group_ids = ArrayHelper(user.groupIds).distinct().to_list() user.groupIds = user_group_ids # noinspection PyTypeChecker user: User = user_service.create(user) # synchronize user to user groups sync_user_to_groups(user_service, user.userId, user_group_ids, user.tenantId) else: # noinspection PyTypeChecker existing_user: Optional[User] = user_service.find_by_id( user.userId) if existing_user is not None: if existing_user.tenantId != user.tenantId: raise_403() elif is_blank(user.password): # keep original password user.password = existing_user.password if principal_service.is_super_admin() and check_user_group: # for super admin update user, simply keep user group user.groupIds = existing_user.groupIds else: user_group_ids = ArrayHelper( user.groupIds).distinct().to_list() user.groupIds = user_group_ids user_group_ids = user.groupIds # noinspection PyTypeChecker user: User = user_service.update(user) if principal_service.is_tenant_admin(): # remove user from user groups, in case user groups are removed removed_user_group_ids = ArrayHelper( existing_user.groupIds).difference( user_group_ids).to_list() remove_user_from_groups(user_service, user.userId, removed_user_group_ids, user.tenantId) # synchronize user to user groups sync_user_to_groups(user_service, user.userId, user_group_ids, user.tenantId) # remove password clear_pwd(user) return user
def action(an_indicator: Indicator) -> Indicator: if indicator_service.is_storable_id_faked(an_indicator.indicatorId): indicator_service.redress_storable_id(an_indicator) user_group_ids = ArrayHelper(an_indicator.groupIds).distinct().to_list() an_indicator.groupIds = user_group_ids # noinspection PyTypeChecker an_indicator: Indicator = indicator_service.create(an_indicator) # synchronize space to user groups sync_indicator_to_groups(indicator_service, an_indicator.indicatorId, user_group_ids, indicator.tenantId) else: # noinspection PyTypeChecker existing_indicator: Optional[Indicator] = indicator_service.find_by_id(an_indicator.indicatorId) if existing_indicator is not None: if existing_indicator.tenantId != an_indicator.tenantId: raise_403() user_group_ids = ArrayHelper(an_indicator.groupIds).distinct().to_list() an_indicator.groupIds = user_group_ids # noinspection PyTypeChecker an_indicator: Indicator = indicator_service.update(an_indicator) # remove indicator from user groups, in case user groups are removed removed_user_group_ids = ArrayHelper(existing_indicator.groupIds).difference(user_group_ids).to_list() remove_indicator_from_groups( indicator_service, indicator.indicatorId, removed_user_group_ids, indicator.tenantId) # synchronize indicator to user groups sync_indicator_to_groups(indicator_service, indicator.indicatorId, user_group_ids, indicator.tenantId) return an_indicator
def action(enumeration: Enum) -> Enum: if enum_service.is_storable_id_faked(enumeration.enumId): enum_service.redress_storable_id(enumeration) if enumeration.items is None: enumeration.items = [] # noinspection PyTypeChecker enumeration: Enum = enum_service.create(enumeration) enum_item_service = get_enum_item_service(enum_service) ArrayHelper(enumeration.items).each( lambda x: create_enum_item(enum_item_service, x, enumeration)) else: # noinspection PyTypeChecker existing_enum: Optional[Enum] = enum_service.find_by_id( enumeration.enumId) if existing_enum is not None: if existing_enum.tenantId != enumeration.tenantId: raise_403() if enumeration.items is None: enumeration.items = [] # noinspection PyTypeChecker enumeration: Enum = enum_service.update(enumeration) enum_item_service = get_enum_item_service(enum_service) enum_item_service.delete_by_enum_id(enumeration.enumId) ArrayHelper(enumeration.items).each( lambda x: create_enum_item(enum_item_service, x, enumeration)) return enumeration
def action() -> SubjectForIndicator: subject: Optional[Subject] = subject_service.find_by_id(subject_id) if subject is None: raise_404() if subject.tenantId != principal_service.get_tenant_id(): raise_403() connected_space_service = get_connected_space_service(subject_service) connected_space: Optional[ ConnectedSpace] = connected_space_service.find_by_id( subject.connectId) if connected_space is None: raise IndicatorKernelException( f'Connected space not found for subject[id={subject_id}].') space_service = get_space_service(subject_service) space: Optional[Space] = space_service.find_by_id( connected_space.spaceId) if space is None: raise IndicatorKernelException( f'Space not found for subject[id={subject_id}].') topic_service = get_topic_service(subject_service) topics = topic_service.find_by_ids(space.topicIds, principal_service.get_tenant_id()) topic_map: Dict[TopicId, Topic] = ArrayHelper(topics).to_map( lambda x: x.topicId, lambda x: x) all_topic_ids = space.topicIds all_topics = ArrayHelper(all_topic_ids).map( lambda x: topic_map[x]).to_list() return SubjectForIndicator(**subject.to_dict(), topics=all_topics)
def find_distinct_values(self, finder: EntityDistinctValuesFinder) -> EntityList: document = self.find_document(finder.name) where = build_criteria_for_statement([document], finder.criteria) if len(finder.distinctColumnNames ) != 1 or not finder.distinctValueOnSingleColumn: def add_column(columns: Dict[str, int], column_name: str) -> Dict[str, int]: columns[column_name] = 1 return columns project = ArrayHelper(finder.distinctColumnNames).reduce( add_column, {}) sort = build_sort_for_statement(finder.sort) results = self.connection.find_with_project( document, project, where, sort) else: results = self.connection.find_distinct( document, finder.distinctColumnNames[0], where) return ArrayHelper(results) \ .map(self.remove_object_id) \ .map(finder.shaper.deserialize) \ .to_list()
def build_criteria_joint(tables: List[Table], joint: EntityCriteriaJoint): conjunction = joint.conjunction if conjunction == EntityCriteriaJointConjunction.AND: return and_(*ArrayHelper(joint.children).map(lambda x: build_criteria_statement(tables, x)).to_list()) elif conjunction == EntityCriteriaJointConjunction.OR: return or_(*ArrayHelper(joint.children).map(lambda x: build_criteria_statement(tables, x)).to_list()) else: raise UnsupportedCriteriaException(f'Unsupported criteria joint conjunction[{conjunction}].')
def __init__(self, catalog: str, schema: str, topic: Topic): self.catalog = catalog self.schema = schema self.topic = topic self.factor_map = ArrayHelper(topic.factors).to_map( lambda x: x.factorId, lambda x: x) self.entity_name = f'{catalog}.{schema}.{as_table_name(topic.name)}' self.alias = self.entity_name
def action() -> Tuple[List[Topic], Callable[[], None]]: validate_tenant_based_tuples(topics, get_user_service(topic_service), principal_service) save = ask_save_topic_action(topic_service, principal_service, True) # noinspection PyTypeChecker results = ArrayHelper(topics).map(lambda x: save(x)).to_list() saved_topics = ArrayHelper(results).map(lambda x: x[0]).to_list() tails = ArrayHelper(results).map(lambda x: x[1]).to_list() return saved_topics, bundling_tails(tails)
def parse_encrypt_factors(topic: Topic) -> List[EncryptFactorGroup]: groups = ArrayHelper(topic.factors) \ .filter(lambda x: x.encrypt is not None and x.encrypt != FactorEncryptMethod.NONE) \ .map(lambda x: EncryptFactor(x)) \ .filter(lambda x: is_not_blank(x.factorName)) \ .group_by(lambda x: x.names[0]) return ArrayHelper(list(groups.items())) \ .map(lambda x: EncryptFactorGroup(name=x[0], factors=x[1])).to_list()
def parse_date_or_time_factors(topic: Topic) -> List[DateOrTimeFactorGroup]: groups = ArrayHelper(topic.factors) \ .filter(is_date_or_time) \ .map(lambda x: DateOrTimeFactor(x)) \ .filter(lambda x: is_not_blank(x.factorName)) \ .group_by(lambda x: x.names[0]) return ArrayHelper(list(groups.items())) \ .map(lambda x: DateOrTimeFactorGroup(name=x[0], factors=x[1])).to_list()
def run(self, variables: PipelineVariables, principal_service: PrincipalService) -> bool: if self.jointType == ParameterJointType.OR: return ArrayHelper(self.filters).some( lambda x: x.run(variables, principal_service)) else: # and or not given return ArrayHelper(self.filters).every( lambda x: x.run(variables, principal_service))
def action(topic: Topic) -> Tuple[Topic, Callable[[], None]]: if topic_service.is_storable_id_faked(topic.topicId): topic_service.redress_storable_id(topic) redress_factor_ids(topic, topic_service) # noinspection PyTypeChecker topic: Topic = topic_service.create(topic) tail = sync_topic_structure(topic, None, principal_service) else: # noinspection PyTypeChecker existing_topic: Optional[Topic] = topic_service.find_by_id( topic.topicId) if existing_topic is not None: if existing_topic.tenantId != topic.tenantId: raise_403() redress_factor_ids(topic, topic_service) # noinspection PyTypeChecker topic: Topic = topic_service.update(topic) if handle_snapshots: scheduler_service = get_snapshot_scheduler_service( topic_service) schedulers = scheduler_service.find_by_topic(topic.topicId) if len(schedulers) != 0: # first find the task topic task_topic_name = as_snapshot_task_topic_name(topic) task_topic = topic_service.find_by_name_and_tenant( task_topic_name, topic.tenantId) if task_topic is None: # create task topic task_topic = create_snapshot_task_topic(topic) else: # rebuild task topic task_topic = rebuild_snapshot_task_topic( task_topic, topic) # save task topic task_topic, tail_task_topic = ask_save_topic_action( topic_service, principal_service)(task_topic) # handle target topic and pipelines for each scheduler tails = ArrayHelper(schedulers) \ .map( lambda x: handle_scheduler( x, topic, task_topic, topic_service, scheduler_service, principal_service)) \ .filter(lambda x: x is not None) \ .to_list() tail = combine_tail_actions( ArrayHelper(tails).grab(tail_task_topic).to_list()) else: tail = sync_topic_structure(topic, existing_topic, principal_service) else: tail = sync_topic_structure(topic, existing_topic, principal_service) post_save_topic(topic, topic_service) return topic, tail
def __init__(self, name: str, factors: List[DateOrTimeFactor]): self.name = name # in reality, zero or one factor. # if there is one, name is same as group's, and will not contain group anymore self.factors = ArrayHelper(factors).filter(lambda x: len(x.names) == 1).to_list() groups = ArrayHelper(factors).filter(lambda x: len(x.names) > 1) \ .each(lambda x: x.pop_first_name()) \ .group_by(lambda x: x.names[0]) self.groups = ArrayHelper(list(groups.items())) \ .map(lambda x: DateOrTimeFactorGroup(name=x[0], factors=x[1])).to_list()
def fake_aggregate_columns( self, table_columns: List[FreeColumn] ) -> Tuple[bool, List[FreeAggregateColumn]]: aggregated = ArrayHelper(table_columns) \ .some(lambda x: x.arithmetic is not None and x.arithmetic != FreeAggregateArithmetic.NONE) return aggregated, [] if not aggregated else ArrayHelper( table_columns).map_with_index(lambda x, index: FreeAggregateColumn( name=f'column_{index + 1}', arithmetic=x.arithmetic, alias=x.alias)).to_list()
def find(self, topic_id: TopicId, start_time: datetime, end_time: datetime) -> Optional[TopicProfile]: schema = get_topic_schema(topic_id, self.principalService) if is_raw_topic(schema.get_topic()): raise DqcException(f'Raw topic[name={schema.get_topic().name}] is not supported for profiling.') storage = ask_topic_storage(schema, self.principalService) service = ask_topic_data_service(schema, storage, self.principalService) criteria = [ EntityCriteriaExpression( left=ColumnNameLiteral(columnName=TopicDataColumnNames.TENANT_ID.value), right=self.principalService.get_tenant_id()), EntityCriteriaExpression( left=ColumnNameLiteral(columnName=TopicDataColumnNames.UPDATE_TIME.value), operator=EntityCriteriaOperator.GREATER_THAN_OR_EQUALS, right=start_time), EntityCriteriaExpression( left=ColumnNameLiteral(columnName=TopicDataColumnNames.UPDATE_TIME.value), operator=EntityCriteriaOperator.LESS_THAN_OR_EQUALS, right=end_time) ] data = service.find(criteria) columns = [ TopicDataColumnNames.ID.value, *ArrayHelper(schema.get_topic().factors).map(lambda x: x.name).to_list(), TopicDataColumnNames.TENANT_ID.value, TopicDataColumnNames.INSERT_TIME.value, TopicDataColumnNames.UPDATE_TIME.value ] def row_to_list(row: Dict[str, Any]) -> List[Any]: return ArrayHelper(columns).map(lambda x: row.get(x)).to_list() data_frame = build_data_frame(ArrayHelper(data).map(row_to_list).to_list(), columns) data_frame = convert_data_frame_type_by_topic(data_frame, schema.get_topic()) data_frame.drop([ TopicDataColumnNames.TENANT_ID, TopicDataColumnNames.UPDATE_TIME, TopicDataColumnNames.INSERT_TIME, TopicDataColumnNames.AGGREGATE_ASSIST, TopicDataColumnNames.ID, TopicDataColumnNames.VERSION ], axis=1, inplace=True, errors='ignore') if data_frame.empty or len(data_frame.index) == 1: return None else: logger.info(f'memory_usage {data_frame.memory_usage(deep=True).sum()} bytes') profile = ProfileReport(data_frame, title=f'{schema.get_topic().name} data profile report', minimal=True) json_data = profile.to_json() json_constants_map = { '-Infinity': float('-Infinity'), 'Infinity': float('Infinity'), 'NaN': None, } return loads(json_data, parse_constant=lambda x: json_constants_map[x])
def fake_time_range_to_dataset_filter(self) -> Optional[ParameterJoint]: time_range_factor_id = self.inspection.timeRangeFactorId if is_blank(time_range_factor_id): return None time_range_factor = self.find_factor(time_range_factor_id, lambda: 'Time range factor not declared.') time_ranges = ArrayHelper(self.inspection.timeRanges) \ .filter(lambda x: x is not None and x.value is not None).to_list() if len(time_ranges) == 0: # no ranges given return None operator = ParameterExpressionOperator.EQUALS if len(time_ranges) == 1 else ParameterExpressionOperator.IN right = time_ranges[0].value if len(time_ranges) == 1 \ else ArrayHelper(time_ranges).map(lambda x: x.value).join(',') time_range_measure = self.inspection.timeRangeMeasure if self.has_year_or_month(time_range_factor): if time_range_measure == MeasureMethod.YEAR: compute_type = ParameterComputeType.YEAR_OF elif time_range_measure == MeasureMethod.MONTH: compute_type = ParameterComputeType.MONTH_OF else: raise IndicatorKernelException( f'Measure method[{time_range_measure}] for factor type[{time_range_factor.type}] is not supported.') joint = ParameterJoint( jointType=ParameterJointType.AND, filters=[ ParameterExpression( left=ComputedParameter( kind=ParameterKind.COMPUTED, type=compute_type, parameters=[ TopicFactorParameter( kind=ParameterKind.TOPIC, topicId=self.topic.topicId, factorId=time_range_factor_id) ] ), operator=operator, right=ConstantParameter(kind=ParameterKind.CONSTANT, value=str(right)) ) ] ) else: joint = ParameterJoint( jointType=ParameterJointType.AND, filters=[ ParameterExpression( left=TopicFactorParameter( kind=ParameterKind.TOPIC, topicId=self.topic.topicId, factorId=time_range_factor_id), operator=operator, right=ConstantParameter(kind=ParameterKind.CONSTANT, value=str(right)) ) ] ) return joint
def build_criteria_joint(self, joint: EntityCriteriaJoint) -> str: conjunction = joint.conjunction if conjunction == EntityCriteriaJointConjunction.AND: return ArrayHelper(joint.children).map(lambda x: self.build_criteria_statement(x)) \ .map(lambda x: f'({x})').join(' AND ') elif conjunction == EntityCriteriaJointConjunction.OR: return ArrayHelper(joint.children).map(lambda x: self.build_criteria_statement(x)) \ .map(lambda x: f'({x})').join(' OR ') else: raise UnsupportedCriteriaException( f'Unsupported criteria joint conjunction[{conjunction}].')
def run(self, variables: PipelineVariables, principal_service: PrincipalService) -> EntityCriteriaJoint: if self.jointType == ParameterJointType.OR: return EntityCriteriaJoint( conjunction=EntityCriteriaJointConjunction.OR, children=ArrayHelper(self.filters).map(lambda x: x.run(variables, principal_service)).to_list() ) else: # and or not given return EntityCriteriaJoint( conjunction=EntityCriteriaJointConjunction.AND, children=ArrayHelper(self.filters).map(lambda x: x.run(variables, principal_service)).to_list() )
def parse_default_value_factors(topic: Topic) -> List[DefaultValueFactorGroup]: if ask_ignore_default_on_raw(): return [] groups = ArrayHelper(topic.factors) \ .filter(lambda x: x.defaultValue is not None) \ .map(lambda x: DefaultValueFactor(x)) \ .filter(lambda x: is_not_blank(x.factorName)) \ .group_by(lambda x: x.names[0]) return ArrayHelper(list(groups.items())) \ .map(lambda x: DefaultValueFactorGroup(name=x[0], factors=x[1])).to_list()
def run() -> None: snowflake_generator = ask_snowflake_generator() schedulers = find_enabled_jobs() # remove jobs which not exists in enabled schedulers scheduler_ids = ArrayHelper(schedulers).map(lambda x: x.schedulerId).to_list() topic_snapshot_jobs.remove_jobs_but(scheduler_ids) # replace jobs ArrayHelper(schedulers) \ .filter(lambda x: topic_snapshot_jobs.should_put_job(x.schedulerId, x.version)) \ .map(lambda x: create_job(ioScheduler, x, snowflake_generator)) \ .filter(lambda x: x is not None) \ .each(lambda x: topic_snapshot_jobs.put_job(x[0].schedulerId, x[0].version, x[1]))
def action() -> List[TemplateConnectedSpace]: connected_spaces = connected_space_service.find_templates_by_space_id( space_id, principal_service.get_tenant_id()) user_ids: List[UserId] = ArrayHelper(connected_spaces).map( lambda x: x.userId).distinct().to_list() user_service = get_user_service(connected_space_service) users: List[User] = user_service.find_by_ids( user_ids, principal_service.get_tenant_id()) user_map: Dict[UserId, User] = ArrayHelper(users).to_map( lambda x: x.userId, lambda x: x) return ArrayHelper(connected_spaces).map( lambda x: to_template_connected_space(x, user_map)).to_list()
def is_list_on_variables(self, names: List[str]) -> bool: factor_name = self.variables_from.get(names[0]) if is_blank(factor_name): return False else: factor_name = factor_name + '.' + ArrayHelper(names[1:]).join('.') factor: Optional[Factor] = ArrayHelper( self.topic.factors).find(lambda x: x.name == factor_name) if factor is None: return False else: return factor.type == FactorType.ARRAY
def is_list_on_trigger(self, names: List[str]) -> bool: if self.topic is None: # no topic declared, false return False name = ArrayHelper(names).join('.') factor: Optional[Factor] = ArrayHelper( self.topic.factors).find(lambda x: x.name == name) if factor is None: return False else: return factor.type == FactorType.ARRAY