def aid_hierarchy(self, data: Dict[str, Any]) -> Dict[str, Any]:
        """
		given data might be changed, and returns exactly the given one
		"""
        if self.should_aid_hierarchy():
            aid(data, [], ask_snowflake_generator())
        return data
	def find_schema_by_id(self, topic_id: TopicId, tenant_id: TenantId) -> Optional[TopicSchema]:
		if not self.principalService.is_super_admin():
			if self.principalService.get_tenant_id() != tenant_id:
				raise Exception('Forbidden')

		schema = CacheService.topic().get_schema(topic_id)
		if schema is not None:
			if schema.get_topic().tenantId != tenant_id:
				return None
			return schema

		storage_service = TopicStorageService(ask_meta_storage(), ask_snowflake_generator(), self.principalService)
		storage_service.begin_transaction()
		try:
			# noinspection PyTypeChecker
			topic: Topic = storage_service.find_by_id(topic_id)
			if topic is None:
				return None

			CacheService.topic().put(topic)
			schema = CacheService.topic().get_schema(topic.topicId)
			if schema is not None:
				if schema.get_topic().tenantId != tenant_id:
					return None
			return schema
		finally:
			storage_service.close_transaction()
Exemplo n.º 3
0
def create_jobs(ioScheduler: AsyncIOScheduler) -> None:
	schedulers = find_enabled_jobs()
	snowflake_generator = ask_snowflake_generator()
	ArrayHelper(schedulers) \
		.map(lambda x: create_job(ioScheduler, x, snowflake_generator)) \
		.filter(lambda x: x is not None) \
		.each(lambda x: topic_snapshot_jobs.put_job(x[0].schedulerId, x[0].version, x[1]))
Exemplo n.º 4
0
    def find_by_topic_id(self, topic_id: TopicId) -> List[Pipeline]:
        pipeline_ids = CacheService.pipelines_by_topic().get(topic_id)
        if pipeline_ids is not None:
            pipelines = ArrayHelper(pipeline_ids) \
             .map(lambda x: self.find_by_id(x)) \
             .filter(lambda x: x is not None).to_list()
            if len(pipelines) != len(pipeline_ids):
                loaded = ArrayHelper(pipelines).map(
                    lambda x: x.pipelineId).to_list()
                raise Exception(
                    f'Except pipelines[{pipeline_ids}], but get[{loaded}] only.'
                )
            return pipelines

        storage_service = PipelineStorageService(ask_meta_storage(),
                                                 ask_snowflake_generator(),
                                                 self.principalService)
        storage_service.begin_transaction()
        try:
            # noinspection PyTypeChecker
            pipelines: List[Pipeline] = storage_service.find_by_topic_id(
                topic_id, self.principalService.get_tenant_id())
            if len(pipelines) == 0:
                CacheService.pipelines_by_topic().declare_no_pipelines(
                    topic_id)
                return pipelines

            return ArrayHelper(pipelines).each(
                lambda x: CacheService.pipeline().put(x)).to_list()
        finally:
            storage_service.close_transaction()
Exemplo n.º 5
0
 def __init__(self, schema: TopicSchema,
              data_entity_helper: TopicDataEntityHelper,
              storage: TopicDataStorageSPI,
              principal_service: PrincipalService):
     super().__init__(schema, data_entity_helper)
     self.storage = storage
     self.principalService = principal_service
     self.snowflakeGenerator = ask_snowflake_generator()
Exemplo n.º 6
0
def register_topic_snapshot_job(scheduler: TopicSnapshotScheduler) -> None:
	scheduler_id = scheduler.schedulerId
	topic_snapshot_jobs.remove_job(scheduler_id)
	if not scheduler.enabled:
		return

	job = create_job(topic_snapshot_jobs.get_scheduler(), scheduler, ask_snowflake_generator())
	if job is not None:
		topic_snapshot_jobs.put_job(scheduler_id, scheduler.version, job[1])
async def handle_trigger_data(trigger_data: PipelineTriggerDataWithPAT) -> None:
	# TODO should log trigger data
	pat = trigger_data.pat
	if is_blank(pat):
		raise Exception('PAT not found.')
	principal_service = get_principal_by_pat(
		retrieve_authentication_manager(), pat, [UserRole.ADMIN, UserRole.SUPER_ADMIN])

	trace_id: PipelineTriggerTraceId = str(ask_snowflake_generator().next_id())
	await try_to_invoke_pipelines(trigger_data, trace_id, principal_service)
 def find_all(self) -> List[Tenant]:
     storage_service = TenantStorageService(ask_meta_storage(),
                                            ask_snowflake_generator(),
                                            self.principalService)
     storage_service.begin_transaction()
     try:
         # noinspection PyTypeChecker
         return storage_service.find_all()
     finally:
         storage_service.close_transaction()
	def find_should_monitored(self, tenant_id: TenantId) -> List[Topic]:
		storage_service = TopicStorageService(ask_meta_storage(), ask_snowflake_generator(), self.principalService)
		storage_service.begin_transaction()
		try:
			# noinspection PyTypeChecker
			topics = storage_service.find_all(tenant_id)
			# only business topics need to be monitored
			return ArrayHelper(topics).filter(lambda x: x.kind == TopicKind.BUSINESS).to_list()
		finally:
			storage_service.close_transaction()
Exemplo n.º 10
0
async def trigger_pipeline_async(
    trigger_data: PipelineTriggerData,
    principal_service: PrincipalService = Depends(get_any_admin_principal)
) -> PipelineTriggerResult:
    trace_id = trigger_data.traceId if is_not_blank(
        trigger_data.traceId) else str(ask_snowflake_generator().next_id())
    internal_data_id = await try_to_invoke_pipelines_async(
        trigger_data, trace_id, principal_service)
    return PipelineTriggerResult(received=True,
                                 traceId=trace_id,
                                 internalDataId=str(internal_data_id))
Exemplo n.º 11
0
	def run() -> None:
		snowflake_generator = ask_snowflake_generator()
		schedulers = find_enabled_jobs()
		# remove jobs which not exists in enabled schedulers
		scheduler_ids = ArrayHelper(schedulers).map(lambda x: x.schedulerId).to_list()
		topic_snapshot_jobs.remove_jobs_but(scheduler_ids)
		# replace jobs
		ArrayHelper(schedulers) \
			.filter(lambda x: topic_snapshot_jobs.should_put_job(x.schedulerId, x.version)) \
			.map(lambda x: create_job(ioScheduler, x, snowflake_generator)) \
			.filter(lambda x: x is not None) \
			.each(lambda x: topic_snapshot_jobs.put_job(x[0].schedulerId, x[0].version, x[1]))
def heart_beat_on_topics() -> None:
    topics = CacheService.topic().all()
    topic_service = TopicService(ask_meta_storage(), ask_snowflake_generator(),
                                 ask_super_admin())
    topic_service.begin_transaction()
    try:
        for topic in topics:
            loaded: Optional[Topic] = topic_service.find_by_id(topic.topicId)
            if loaded is None:
                CacheService.topic().remove(topic.topicId)
            elif loaded.lastModifiedAt > topic.lastModifiedAt or loaded.version > topic.version:
                CacheService.topic().put(loaded)
    finally:
        topic_service.close_transaction()
def heart_beat_on_pipelines() -> None:
    pipelines = CacheService.pipeline().all()
    pipeline_service = PipelineService(ask_meta_storage(),
                                       ask_snowflake_generator(),
                                       ask_super_admin())
    pipeline_service.begin_transaction()
    try:
        for pipeline in pipelines:
            loaded: Optional[Pipeline] = pipeline_service.find_by_id(
                pipeline.pipelineId)
            if loaded is None:
                CacheService.pipeline().remove(pipeline.pipelineId)
            elif loaded.lastModifiedAt > pipeline.lastModifiedAt or loaded.version > pipeline.version:
                CacheService.pipeline().put(loaded)
    finally:
        pipeline_service.close_transaction()
def heart_beat_on_tenants() -> None:
    tenants = CacheService.tenant().all()
    tenant_service = TenantService(ask_meta_storage(),
                                   ask_snowflake_generator(),
                                   ask_super_admin())
    tenant_service.begin_transaction()
    try:
        for tenant in tenants:
            loaded: Optional[Tenant] = tenant_service.find_by_id(
                tenant.tenantId)
            if loaded is None:
                CacheService.tenant().remove(tenant.tenantId)
            elif loaded.lastModifiedAt > tenant.lastModifiedAt or loaded.version > tenant.version:
                CacheService.tenant().put(loaded)
    finally:
        tenant_service.close_transaction()
def heart_beat_on_data_sources() -> None:
    data_sources = CacheService.data_source().all()
    data_source_service = DataSourceService(ask_meta_storage(),
                                            ask_snowflake_generator(),
                                            ask_super_admin())
    data_source_service.begin_transaction()
    try:
        for data_source in data_sources:
            loaded: Optional[DataSource] = data_source_service.find_by_id(
                data_source.dataSourceId)
            if loaded is None:
                CacheService.data_source().remove(data_source.dataSourceId)
            elif loaded.lastModifiedAt > data_source.lastModifiedAt or loaded.version > data_source.version:
                CacheService.data_source().put(loaded)
    finally:
        data_source_service.close_transaction()
def heart_beat_on_external_writers() -> None:
    external_writers = CacheService.external_writer().all()
    external_writer_service = ExternalWriterService(ask_meta_storage(),
                                                    ask_snowflake_generator(),
                                                    ask_super_admin())
    external_writer_service.begin_transaction()
    try:
        for external_writer in external_writers:
            loaded: Optional[
                ExternalWriter] = external_writer_service.find_by_id(
                    external_writer.writerId)
            if loaded is None:
                CacheService.external_writer().remove(external_writer.writerId)
            elif loaded.lastModifiedAt > external_writer.lastModifiedAt or loaded.version > external_writer.version:
                CacheService.external_writer().put(loaded)
    finally:
        external_writer_service.close_transaction()
Exemplo n.º 17
0
async def exchange_user(principal_service: PrincipalService = Depends(get_any_principal)) -> Optional[User]:
	"""
	returns current principal
	"""
	user_id = principal_service.get_user_id()
	user_service = UserService(ask_meta_storage(), ask_snowflake_generator(), principal_service)
	user_service.begin_transaction()
	try:
		# noinspection PyTypeChecker
		user: User = user_service.find_by_id(user_id)
		if user is None:
			return None
		else:
			del user.password
			return user
	finally:
		user_service.close_transaction()
    def find_by_id(self, tenant_id: TenantId) -> Optional[Tenant]:
        tenant = CacheService.tenant().get(tenant_id)
        if tenant is not None:
            return tenant

        storage_service = TenantStorageService(ask_meta_storage(),
                                               ask_snowflake_generator(),
                                               self.principalService)
        storage_service.begin_transaction()
        try:
            # noinspection PyTypeChecker
            tenant: Tenant = storage_service.find_by_id(tenant_id)
            if tenant is None:
                return None

            CacheService.tenant().put(tenant)
            return tenant
        finally:
            storage_service.close_transaction()
	def find_by_id(self, topic_id: TopicId) -> Optional[Topic]:
		topic = CacheService.topic().get(topic_id)
		if topic is not None:
			if topic.tenantId != self.principalService.get_tenant_id():
				raise DataKernelException(
					f'Topic[id={topic_id}] not belongs to current tenant[id={self.principalService.get_tenant_id()}].')
			return topic

		storage_service = TopicStorageService(ask_meta_storage(), ask_snowflake_generator(), self.principalService)
		storage_service.begin_transaction()
		try:
			# noinspection PyTypeChecker
			topic: Topic = storage_service.find_by_id(topic_id)
			if topic is None:
				return None

			CacheService.topic().put(topic)
			return topic
		finally:
			storage_service.close_transaction()
Exemplo n.º 20
0
def trigger_pipeline(detected: MonitorRuleDetected,
                     principal_service: PrincipalService) -> None:
    schema = find_topic_schema('dqc_raw_rule_result', principal_service)
    trace_id: PipelineTriggerTraceId = str(ask_snowflake_generator().next_id())
    asynchronized = ask_monitor_result_pipeline_async()

    # noinspection PyUnusedLocal
    def handle_monitor_log(monitor_log: PipelineMonitorLog,
                           is_asynchronized: bool) -> None:
        logger.info(monitor_log)

    pipeline_trigger = PipelineTrigger(trigger_topic_schema=schema,
                                       trigger_type=PipelineTriggerType.INSERT,
                                       trigger_data=detected.to_dict(),
                                       trace_id=trace_id,
                                       principal_service=principal_service,
                                       asynchronized=asynchronized,
                                       handle_monitor_log=handle_monitor_log)
    if asynchronized:
        ensure_future(pipeline_trigger.invoke())
    else:
        run(pipeline_trigger.invoke())
Exemplo n.º 21
0
	def find_by_id(self, data_source_id: DataSourceId) -> Optional[DataSource]:
		data_source = CacheService.data_source().get(data_source_id)
		if data_source is not None:
			if data_source.tenantId != self.principalService.get_tenant_id():
				raise DataKernelException(
					f'Data source[id={data_source_id}] not belongs to '
					f'current tenant[id={self.principalService.get_tenant_id()}].')
			return data_source

		storage_service = DataSourceStorageService(
			ask_meta_storage(), ask_snowflake_generator(), self.principalService)
		storage_service.begin_transaction()
		try:
			# noinspection PyTypeChecker
			data_source: DataSource = storage_service.find_by_id(data_source_id)
			if data_source is None:
				return None

			CacheService.data_source().put(data_source)
			return data_source
		finally:
			storage_service.close_transaction()
Exemplo n.º 22
0
    def find_by_id(self, pipeline_id: PipelineId) -> Optional[Pipeline]:
        pipeline = CacheService.pipeline().get(pipeline_id)
        if pipeline is not None:
            if pipeline.tenantId != self.principalService.get_tenant_id():
                raise DataKernelException(
                    f'Pipeline[id={pipeline_id}] not belongs to '
                    f'current tenant[id={self.principalService.get_tenant_id()}].'
                )
            return pipeline

        storage_service = PipelineStorageService(ask_meta_storage(),
                                                 ask_snowflake_generator(),
                                                 self.principalService)
        storage_service.begin_transaction()
        try:
            # noinspection PyTypeChecker
            pipeline: Pipeline = storage_service.find_by_id(pipeline_id)
            if pipeline is None:
                return None

            CacheService.pipeline().put(pipeline)
            return pipeline
        finally:
            storage_service.close_transaction()
Exemplo n.º 23
0
	def find_by_id(self, writer_id: ExternalWriterId) -> Optional[ExternalWriter]:
		external_writer = CacheService.external_writer().get(writer_id)
		if external_writer is not None:
			if external_writer.tenantId != self.principalService.get_tenant_id():
				raise DataKernelException(
					f'External writer[id={writer_id}] not belongs to '
					f'current tenant[id={self.principalService.get_tenant_id()}].')
			register_external_writer(external_writer)
			return external_writer

		storage_service = ExternalWriterStorageService(
			ask_meta_storage(), ask_snowflake_generator(), self.principalService)
		storage_service.begin_transaction()
		try:
			# noinspection PyTypeChecker
			external_writer: ExternalWriter = storage_service.find_by_id(writer_id)
			if external_writer is None:
				return None

			CacheService.external_writer().put(external_writer)
			register_external_writer(external_writer)
			return external_writer
		finally:
			storage_service.close_transaction()
    def find_by_name(self, subject_name: str) -> Optional[Subject]:
        storage_service = SubjectStorageService(ask_meta_storage(),
                                                ask_snowflake_generator(),
                                                self.principalService)
        storage_service.begin_transaction()
        try:
            # noinspection PyTypeChecker
            subject: Subject = storage_service.find_by_name(subject_name)
            if subject is None:
                return None
            if subject.tenantId != self.principalService.get_tenant_id():
                raise InquiryKernelException(
                    f'Subject[name={subject_name}] not belongs to '
                    f'current tenant[id={self.principalService.get_tenant_id()}].'
                )
            if not self.principalService.is_admin(
            ) and subject.userId != self.principalService.get_user_id():
                raise InquiryKernelException(
                    f'Subject[name={subject_name}] not belongs to '
                    f'current user[id={self.principalService.get_user_id()}].')

            return subject
        finally:
            storage_service.close_transaction()
def get_topic_service(principal_service: PrincipalService) -> TopicService:
    return TopicService(ask_meta_storage(), ask_snowflake_generator(),
                        principal_service)
def get_inspection_service(principal_service: PrincipalService) -> InspectionService:
	return InspectionService(ask_meta_storage(), ask_snowflake_generator(), principal_service)
Exemplo n.º 27
0
def get_pipeline_service(
        principal_service: PrincipalService) -> PipelineService:
    return PipelineService(ask_meta_storage(), ask_snowflake_generator(),
                           principal_service)
Exemplo n.º 28
0
def get_topic_snapshot_scheduler_service(principal_service: PrincipalService) -> TopicSnapshotSchedulerService:
	return TopicSnapshotSchedulerService(ask_meta_storage(), ask_snowflake_generator(), principal_service)
Exemplo n.º 29
0
def get_subject_service(principal_service: PrincipalService) -> SubjectService:
    return SubjectService(ask_meta_storage(), ask_snowflake_generator(),
                          principal_service)
Exemplo n.º 30
0
def get_connected_space_graphic_service(
        principal_service: PrincipalService) -> ConnectedSpaceGraphicService:
    return ConnectedSpaceGraphicService(ask_meta_storage(),
                                        ask_snowflake_generator(),
                                        principal_service)