def test_multiple_any_config(self):
        rule = SchedulingRule(MeetAllEventCondition().add_event(
            event_key="key_1",
            event_value="value_1",
            event_type="*",
            namespace="*",
            sender="1-job-name"), action=JobAction.START)
        configs = [rule]
        config_str = json_utils.dumps(configs)
        handler = AIFlowHandler(config=config_str)
        event: BaseEvent = BaseEvent(key='key_1',
                                     value='value_1',
                                     namespace='default',
                                     sender='1-job-name',
                                     create_time=round(time.time() * 1000))
        action, ts = handler.handle_event(event, None)
        self.assertEqual(SchedulingAction.START, action)

        event: BaseEvent = BaseEvent(key='key_1',
                                     value='value_1',
                                     namespace='default',
                                     sender='aa',
                                     create_time=round(time.time() * 1000))
        action, ts = handler.handle_event(event, ts)
        self.assertEqual(SchedulingAction.NONE, action)

        event: BaseEvent = BaseEvent(key='key_1_1',
                                     value='value_1',
                                     namespace='default',
                                     sender='1-job-name',
                                     create_time=round(time.time() * 1000))
        action, ts = handler.handle_event(event, ts)
        self.assertEqual(SchedulingAction.NONE, action)
 def test_list_all_events_with_id_range(self):
     event1 = self.client.send_event(BaseEvent(key="key", value="value1"))
     self.client.send_event(BaseEvent(key="key", value="value2"))
     event3 = self.client.send_event(BaseEvent(key="key", value="value3"))
     events = self.client.list_all_events(start_version=event1.version,
                                          end_version=event3.version)
     self.assertEqual(2, len(events))
    def test_list_events(self):
        self.client._default_namespace = "a"
        self.client._sender = 's'
        event1 = self.client.send_event(BaseEvent(key="key", value="value1"))

        self.client._default_namespace = "b"
        self.client.send_event(
            BaseEvent(key="key", value="value2", event_type="a"))
        self.client.send_event(BaseEvent(key="key", value="value3"))
        self.client.send_event(BaseEvent(key="key2", value="value3"))

        events = self.client.list_events(["key", "key2"],
                                         version=event1.version)
        self.assertEqual(3, len(events))
        self.assertEqual('s', events[0].sender)

        self.client._default_namespace = "a"
        events = self.client.list_events("key")
        self.assertEqual(1, len(events))

        self.client._default_namespace = "b"
        events = self.client.list_events("key")
        self.assertEqual(2, len(events))

        events = self.client.list_events("key", event_type="a")
        self.assertEqual(1, len(events))

        events = self.client.list_events("key", sender='s')
        self.assertEqual(2, len(events))

        events = self.client.list_events("key", sender='p')
        self.assertEqual(0, len(events))
 def test_all_listen_events(self):
     self.client.send_event(BaseEvent(key="key", value="value1"))
     event2 = self.client.send_event(BaseEvent(key="key", value="value2"))
     start_time = event2.create_time
     self.client.send_event(BaseEvent(key="key", value="value3"))
     events = self.client.list_all_events(start_time)
     self.assertEqual(2, len(events))
Exemple #5
0
 def run_ai_flow_function(self):
     client = NotificationClient(server_uri="localhost:{}".format(
         self.port),
                                 default_namespace="default",
                                 sender='1-job-name')
     while True:
         with create_session() as session:
             tes = session.query(TaskExecution).filter(
                 TaskExecution.dag_id == 'workflow_1',
                 TaskExecution.task_id == '1-job-name').all()
             if len(tes) > 0:
                 time.sleep(5)
                 client.send_event(
                     BaseEvent(key='key_1',
                               value='value_1',
                               event_type='UNDEFINED'))
                 client.send_event(
                     BaseEvent(key='key_2',
                               value='value_2',
                               event_type='UNDEFINED'))
                 while True:
                     with create_session() as session_2:
                         tes_2 = session_2.query(TaskExecution).filter(
                             TaskExecution.dag_id == 'workflow_1').all()
                         if len(tes_2) == 3:
                             break
                         else:
                             time.sleep(1)
                 break
             else:
                 time.sleep(1)
     time.sleep(3)
     client.send_event(StopSchedulerEvent(job_id=0).to_event())
 def test_inner_event(self):
     self.assertTrue(SchedulerInnerEventUtil.is_inner_event(BaseEvent('', '', 'STOP_SCHEDULER')))
     self.assertFalse(SchedulerInnerEventUtil.is_inner_event(BaseEvent('', '', 'TEST')))
     self.assertEqual(SchedulerInnerEventUtil.event_type(BaseEvent('', '', 'STOP_SCHEDULER')),
                      SchedulerInnerEventType.STOP_SCHEDULER)
     event = SchedulerInnerEventUtil.to_inner_event(BaseEvent('1', '', 'STOP_SCHEDULER'))
     self.assertTrue(isinstance(event, StopSchedulerEvent))
    def test_two_rules_trigger_will_clear_all_event_life(self):
        condition = MeetAnyEventCondition() \
            .add_event(event_key="key_1", event_value="value_1", sender="1-job-name")
        rule1 = SchedulingRule(condition, action=JobAction.START)
        condition = MeetAllEventCondition() \
            .add_event(event_key="key_1", event_value="value_1", sender="1-job-name") \
            .add_event(event_key='key_2', event_value='value_2', sender="1-job-name")
        rule2 = SchedulingRule(condition, action=JobAction.STOP)

        configs = [rule1, rule2]
        config_str = json_utils.dumps(configs)
        handler = AIFlowHandler(config=config_str)
        event1: BaseEvent = BaseEvent(key='key_1',
                                      value='value_1',
                                      namespace='default',
                                      sender='1-job-name',
                                      create_time=round(time.time() * 1000))
        event2: BaseEvent = BaseEvent(key='key_2',
                                      value='value_2',
                                      namespace='default',
                                      sender='1-job-name',
                                      create_time=round(time.time() * 1000))
        action, ts = handler.handle_event(event2, None)
        self.assertEqual(SchedulingAction.NONE, action)
        action, ts = handler.handle_event(event1, ts)
        self.assertEqual(SchedulingAction.START, action)
        action, ts = handler.handle_event(event2, ts)
        self.assertEqual(SchedulingAction.NONE, action)
    def test_listen_events(self):
        event_list = []

        class TestWatch(EventWatcher):
            def __init__(self, event_list) -> None:
                super().__init__()
                self.event_list = event_list

            def process(self, events: List[BaseEvent]):
                self.event_list.extend(events)

        self.client._default_namespace = "a"
        event1 = self.client.send_event(BaseEvent(key="key", value="value1"))
        self.client.start_listen_event(key="key",
                                       watcher=TestWatch(event_list),
                                       version=event1.version)
        self.client.send_event(BaseEvent(key="key", value="value2"))
        self.client.send_event(BaseEvent(key="key", value="value3"))

        self.client._default_namespace = None
        self.client.send_event(BaseEvent(key="key", value="value4"))

        self.client._default_namespace = "a"
        self.client.stop_listen_event("key")
        events = self.client.list_events("key", version=event1.version)
        self.assertEqual(2, len(events))
        self.assertEqual(2, len(event_list))
Exemple #9
0
 def test_list_events(self):
     event = self.client.send_event(BaseEvent(key="key", value="value1"))
     event = self.client.send_event(BaseEvent(key="key", value="value2"))
     event = self.client.send_event(BaseEvent(key="key", value="value3"))
     events = self.client.list_events("key", version=1)
     self.assertEqual(2, len(events))
     events = self.client.list_events("key")
     self.assertEqual(3, len(events))
Exemple #10
0
 def test_all_listen_events(self):
     event = self.client.send_event(BaseEvent(key="key", value="value1"))
     event = self.client.send_event(BaseEvent(key="key", value="value2"))
     start_time = event.create_time
     print("#####start time: {}".format(start_time))
     event = self.client.send_event(BaseEvent(key="key", value="value3"))
     events = self.client.list_all_events(start_time)
     self.assertEqual(2, len(events))
Exemple #11
0
 def test_list_events(self):
     event = self.client.send_event(BaseEvent(key="key", value="value1"))
     first_version = event.version
     print("######first version: {}".format(first_version))
     event = self.client.send_event(BaseEvent(key="key", value="value2"))
     event = self.client.send_event(BaseEvent(key="key", value="value3"))
     events = self.client.list_events("key", version=first_version)
     self.assertEqual(2, len(events))
     events = self.client.list_events("key")
     self.assertEqual(3, len(events))
Exemple #12
0
 def add_event(self, event: BaseEvent):
     self.max_id += 1
     event.id = self.max_id
     event.create_time = time.time_ns()
     if event.key not in self.kv:
         self.kv[event.key] = 0
     self.kv[event.key] += 1
     event.version = self.kv[event.key]
     self.store.append(event)
     return event
Exemple #13
0
    def test_event_type_any(self):
        met_config_1 = {"action": "START",
                        "condition": "NECESSARY",
                        "event_key": "key_1",
                        "event_type": "*",
                        "event_value": "value_1",
                        "life": "ONCE",
                        "namespace": "aa",
                        "sender": "1-job-name",
                        "value_condition": "EQUAL"}
        configs = [met_config_1]
        config_str = json.dumps(configs)
        handler = AIFlowHandler(config=config_str)
        event: BaseEvent = BaseEvent(key='key_1',
                                     value='value_1',
                                     event_type='aa',
                                     namespace='aa',
                                     sender='1-job-name',
                                     create_time=int(time.time()*1000))
        ts = AiFlowTs()
        action, ts = handler.handle_event(event, ts)
        self.assertEqual(SchedulingAction.START, action)

        event: BaseEvent = BaseEvent(key='key_1',
                                     value='value_1',
                                     namespace='aa',
                                     event_type='bb',
                                     sender='1-job-name',
                                     create_time=int(time.time() * 1000+1))
        action, ts = handler.handle_event(event, ts)
        self.assertEqual(SchedulingAction.START, action)

        met_config_1 = {"action": "START",
                        "condition": "NECESSARY",
                        "event_key": "key_1",
                        "event_type": "aa",
                        "event_value": "value_1",
                        "life": "ONCE",
                        "namespace": "aa",
                        "sender": "1-job-name",
                        "value_condition": "EQUAL"}
        configs = [met_config_1]
        config_str = json.dumps(configs)
        handler = AIFlowHandler(config=config_str)
        event: BaseEvent = BaseEvent(key='key_1',
                                     value='value_1',
                                     event_type='bb',
                                     namespace='aa',
                                     sender='1-job-name',
                                     create_time=int(time.time() * 1000))
        ts = AiFlowTs()
        action, ts = handler.handle_event(event, ts)
        self.assertEqual(SchedulingAction.NONE, action)
Exemple #14
0
 def add_event(self, event: BaseEvent):
     kwargs = {
         "server_id": self.server_id,
         "create_time": time.time_ns(),
         "event_type": event.event_type,
         "key": event.key,
         "value": event.value,
     }
     mongo_event = MongoEvent(**kwargs)
     mongo_event.save()
     mongo_event.reload()
     event.create_time = mongo_event.create_time
     event.version = mongo_event.version
     event.id = mongo_event.auto_increase_id
     return event
Exemple #15
0
 def run_one_task_function(self):
     self.wait_for_running()
     self.client.send_event(BaseEvent(key='a', value='a'))
     time.sleep(5)
     self.client.send_event(BaseEvent(key='a', value='a'))
     while True:
         with create_session() as session:
             tes = session.query(TaskExecution).filter(
                 TaskExecution.dag_id == 'single',
                 TaskExecution.task_id == 'task_1').all()
             if len(tes) >= 2:
                 break
             else:
                 time.sleep(1)
     self.client.send_event(StopSchedulerEvent(job_id=0).to_event())
    def test_event_handler_schedule_time(self):
        condition = MeetAnyEventCondition() \
            .add_event(event_key="key_1", event_value="value_1", sender="1-job-name")
        rule1 = SchedulingRule(condition, action=JobAction.START)
        condition = MeetAllEventCondition() \
            .add_event(event_key="key_1", event_value="value_1", sender="1-job-name") \
            .add_event(event_key='key_2', event_value='value_2', sender="1-job-name")
        rule2 = SchedulingRule(condition, action=JobAction.STOP)

        configs = [rule1, rule2]
        config_str = json_utils.dumps(configs)
        handler = AIFlowHandler(config=config_str)

        event1_time = round(time.time() * 1000)
        event1: BaseEvent = BaseEvent(key='key_1',
                                      value='value_1',
                                      namespace='default',
                                      sender='1-job-name',
                                      create_time=event1_time)
        action, ts = handler.handle_event(event1, None)
        ts: AiFlowTs = ts
        self.assertEqual(event1_time, ts.rule_states[0].schedule_time)
        self.assertEqual(event1_time, ts.rule_states[0].latest_time)
        self.assertNotEqual(event1_time, ts.rule_states[1].schedule_time)
        self.assertEqual(event1_time, ts.rule_states[1].latest_time)
Exemple #17
0
    def process(self, execution_context: ExecutionContext,
                input_list: List) -> List:
        model_meta: af.ModelMeta = execution_context.config.get('model_info')
        model_name = model_meta.name
        validated_model = af.get_latest_validated_model_version(model_name)
        # Deprecate deployed model
        deployed_model_version = af.get_deployed_model_version(model_name)
        if deployed_model_version is not None:
            af.update_model_version(
                model_name=model_name,
                model_version=deployed_model_version.version,
                current_stage=ModelVersionStage.DEPRECATED)
        af.update_model_version(model_name=model_name,
                                model_version=validated_model.version,
                                current_stage=ModelVersionStage.DEPLOYED)

        af.get_ai_flow_client().send_event(
            BaseEvent(key='START_PREDICTION', value=validated_model.version))
        print(validated_model.version)

        # Copy deployed model to deploy_model_dir

        deployed_model_dir = af.get_artifact_by_name(self.artifact).uri
        if not os.path.exists(deployed_model_dir):
            os.makedirs(deployed_model_dir)
        for file in os.listdir(deployed_model_dir):
            file_path = os.path.join(deployed_model_dir, file)
            if os.path.isfile(file_path):
                os.remove(file_path)
            elif os.path.isdir(file_path):
                shutil.rmtree(file_path, True)
        deployed_model_version = af.get_deployed_model_version(model_name)
        shutil.copy(deployed_model_version.model_path, deployed_model_dir)
        return []
Exemple #18
0
 def run_event_task_function(self):
     client = NotificationClient(server_uri="localhost:{}".format(
         self.port),
                                 default_namespace="")
     while True:
         with create_session() as session:
             tes = session.query(TaskExecution).filter(
                 TaskExecution.dag_id == 'event_dag',
                 TaskExecution.task_id == 'task_1').all()
             if len(tes) > 0:
                 time.sleep(5)
                 client.send_event(
                     BaseEvent(key='start',
                               value='',
                               event_type='',
                               namespace=''))
                 while True:
                     with create_session() as session_2:
                         tes_2 = session_2.query(TaskExecution).filter(
                             TaskExecution.dag_id == 'event_dag',
                             TaskExecution.task_id == 'task_2').all()
                         if len(tes_2) > 0:
                             break
                         else:
                             time.sleep(1)
                 break
             else:
                 time.sleep(1)
     client.send_event(StopSchedulerEvent(job_id=0).to_event())
Exemple #19
0
    def test_dag_run_event_manager(self):
        mailbox = Mailbox()
        event_manager = DagRunEventManager(mailbox)
        event_manager.start()

        self.create_task_state(dag_run=self._dag_run,
                               task_id='operator_toggle_handler')
        event = BaseEvent("test_event", "test_event", namespace="default")

        event_manager.handle_event(
            DagRunId(self._dag_run.dag_id, self._dag_run.run_id), event)
        event_manager.handle_event(
            DagRunId(self._dag_run.dag_id, self._dag_run.run_id), event)

        handle_event = mailbox.get_message()
        message = EventHandleResult.from_event(handle_event)
        assert message == EventHandleResult(
            DagRunId(self._dag_run.dag_id, self._dag_run.run_id),
            "operator_toggle_handler", SchedulingAction.START)

        handle_event = mailbox.get_message()
        message = EventHandleResult.from_event(handle_event)
        assert message == EventHandleResult(
            DagRunId(self._dag_run.dag_id, self._dag_run.run_id),
            "operator_toggle_handler", SchedulingAction.STOP)

        time.sleep(2)
        event_manager.handle_event(
            DagRunId(self._dag_run.dag_id, self._dag_run.run_id), event)
        handle_event = mailbox.get_message()
        message = EventHandleResult.from_event(handle_event)
        assert message == EventHandleResult(
            DagRunId(self._dag_run.dag_id, self._dag_run.run_id),
            "operator_toggle_handler", SchedulingAction.START)
        event_manager.end()
 def deleteModelVersion(self, request, context):
     model_meta_param = ModelVersion.from_proto(request)
     self.model_repo_store.delete_model_version(model_meta_param)
     self.notification_client.send_event(BaseEvent(model_meta_param.model_name,
                                                   json.dumps(model_meta_param.__dict__),
                                                   ModelVersionEventType.MODEL_DELETED))
     return _wrap_response(request.model_meta)
Exemple #21
0
 def test_two_config_2(self):
     met_config_1 = {"action": "START",
                     "condition": "SUFFICIENT",
                     "event_key": "key_1",
                     "event_type": "UNDEFINED",
                     "event_value": "value_1",
                     "life": "ONCE",
                     "namespace": "default",
                     "sender": "1-job-name",
                     "value_condition": "EQUAL"}
     met_config_2 = {"action": "START",
                     "condition": "NECESSARY",
                     "event_key": "key_2",
                     "event_type": "UNDEFINED",
                     "event_value": "value_2",
                     "life": "ONCE",
                     "namespace": "default",
                     "sender": "1-job-name",
                     "value_condition": "EQUAL"}
     configs = [met_config_1, met_config_2]
     config_str = json.dumps(configs)
     handler = AIFlowHandler(config=config_str)
     event: BaseEvent = BaseEvent(key='key_1',
                                  value='value_1',
                                  namespace='default',
                                  sender='1-job-name',
                                  create_time=round(time.time()*1000))
     ts = AiFlowTs()
     action, ts = handler.handle_event(event, ts)
     self.assertEqual(SchedulingAction.START, action)
    def test_dag_run_event_manager_release_runner(self):
        dag_run1 = self._dag_run
        _, dag_run2 = self.init_dag_and_dag_run(
            '../../dags/test_task_event_handler_dag.py', 'test_event_handler',
            timezone.datetime(2017, 1, 2))
        self.create_task_state(dag_run1, 'operator_toggle_handler')
        self.create_task_state(dag_run2, 'operator_toggle_handler')

        event = BaseEvent("test_event", "test_event", namespace="default")
        mailbox = Mailbox()

        event_manager = DagRunEventManager(mailbox=mailbox)
        event_manager.handle_event(DagRunId(dag_run1.dag_id, dag_run1.run_id),
                                   event)

        time.sleep(5)
        event_manager.handle_event(DagRunId(dag_run1.dag_id, dag_run2.run_id),
                                   event)
        assert (DagRunId(
            dag_run2.dag_id,
            dag_run2.run_id)) in event_manager._event_executor_runners
        assert (DagRunId(
            dag_run1.dag_id,
            dag_run1.run_id)) not in event_manager._event_executor_runners

        event_manager.end()
    def test_task_event_executor_runner(self):
        event = BaseEvent("test_event", "test_event", namespace="default")

        self.create_task_state(dag_run=self._dag_run,
                               task_id='operator_toggle_handler')

        mailbox = Mailbox()
        executor_runner = DagRunEventExecutorRunner(
            mailbox, DagRunId(self._dag_run.dag_id, self._dag_run.run_id), 10)
        executor_runner.put_event(event)
        executor_runner.put_event(event)

        executor_runner.run()
        handle_event = mailbox.get_message()
        message = EventHandleResult.from_event(
            SchedulerInnerEventUtil.to_inner_event(handle_event))
        assert message == EventHandleResult(
            DagRunId(self._dag_run.dag_id, self._dag_run.run_id),
            "operator_toggle_handler", SchedulingAction.START)

        handle_event = mailbox.get_message()
        message = EventHandleResult.from_event(
            SchedulerInnerEventUtil.to_inner_event(handle_event))
        assert message == EventHandleResult(
            DagRunId(self._dag_run.dag_id, self._dag_run.run_id),
            "operator_toggle_handler", SchedulingAction.STOP)
Exemple #24
0
        def run_task_function(client: NotificationClient):
            with af.global_config_file(workflow_config_file()):
                with af.config('task_2'):
                    executor_1 = af.user_define_operation(
                        af.PythonObjectExecutor(SimpleExecutor()))
                with af.config('task_5'):
                    executor_2 = af.user_define_operation(
                        af.PythonObjectExecutor(SimpleExecutor()))
                af.user_define_control_dependency(src=executor_2,
                                                  dependency=executor_1,
                                                  namespace='test',
                                                  event_key='key_1',
                                                  event_value='value_1',
                                                  sender='*')
                workflow_info = af.workflow_operation.submit_workflow(
                    workflow_name)

            af.workflow_operation.start_new_workflow_execution(workflow_name)
            flag = True
            while True:
                with create_session() as session:
                    tes = session.query(TaskExecution).filter(
                        TaskExecution.dag_id == 'test_project.test_workflow',
                        TaskExecution.task_id == 'task_2').all()
                    if 1 == len(tes) and flag:
                        client.send_event(
                            BaseEvent(key='key_1', value='value_1'))
                        flag = False
                    dag_run = session.query(DagRun).filter(
                        DagRun.dag_id == 'test_project.test_workflow').first()
                    if dag_run is not None and dag_run.state in State.finished:
                        break
                    else:
                        time.sleep(1)
    def handle_event(self, event: BaseEvent,
                     task_state: object) -> Tuple[SchedulingAction, object]:
        rules = self._parse_configs(self.config)
        if SchedulerInnerEventType.TASK_STATUS_CHANGED.value == event.event_type:
            event = copy.deepcopy(event)
            event.value = _airflow_task_state_to_aiflow_status(event.value)
        if task_state is None:
            task_state = AiFlowTs(len(rules))
        af_ts = copy.deepcopy(task_state)
        af_ts.add_event(event)
        actions = self._check_rules(rules, af_ts)

        # pick the first triggered action
        for action in actions:
            if action is not None:
                scheduling_action = job_action_to_scheduling_action(action)
                logging.debug(
                    "AIFlowHandler {} handle event {} triggered action: {}".
                    format(self.config, event, scheduling_action))
                return scheduling_action, af_ts

        logging.debug(
            "AIFlowHandler {} handle event {} no action triggered return action {}"
            .format(self.config, event, SchedulingAction.NONE))
        return SchedulingAction.NONE, af_ts
Exemple #26
0
    async def _send_event(self, request):
        event_proto = request.event
        event = BaseEvent(
            key=event_proto.key,
            value=event_proto.value,
            event_type=None
            if event_proto.event_type == "" else event_proto.event_type,
            context=None if event_proto.context == "" else event_proto.context,
            namespace=None
            if event_proto.namespace == "" else event_proto.namespace)
        uuid = request.uuid
        key = event.key
        namespace = event.namespace
        # Lock conditions dict for get/check/update of key
        await self.lock.acquire()
        if self.notification_conditions.get((key, namespace)) is None:
            self.notification_conditions.update({((key, namespace),
                                                  asyncio.Condition())})
        # Release lock after check/update key of notification conditions dict
        self.lock.release()
        async with self.notification_conditions.get(
            (key, namespace)), self.write_condition:
            event: BaseEvent = self.storage.add_event(event, uuid)
            self.notification_conditions.get((key, namespace)).notify_all()
            self.write_condition.notify_all()

        result_event_proto = event_to_proto(event)
        return notification_service_pb2.SendEventsResponse(
            event=result_event_proto,
            return_code=notification_service_pb2.ReturnStatus.SUCCESS,
            return_msg='')
Exemple #27
0
def event_proto_to_event(event_proto):
    return BaseEvent(id=event_proto.id,
                     key=event_proto.key,
                     value=event_proto.value,
                     event_type=event_proto.event_type,
                     version=event_proto.version,
                     create_time=event_proto.create_time)
Exemple #28
0
    def schedule_task_function(self):
        stopped = False
        while not stopped:
            with create_session() as session:
                ti_sleep_1000_secs = self._get_task_instance(
                    EVENT_BASED_SCHEDULER_DAG, 'sleep_1000_secs', session)
                ti_python_sleep = self._get_task_instance(
                    EVENT_BASED_SCHEDULER_DAG, 'python_sleep', session)
                if ti_sleep_1000_secs and ti_sleep_1000_secs.state == State.SCHEDULED and \
                   ti_python_sleep and ti_python_sleep.state == State.SCHEDULED:
                    self.client.send_event(
                        BaseEvent(key='start',
                                  value='',
                                  event_type='',
                                  namespace='test_namespace'))

                    while not stopped:
                        ti_sleep_1000_secs.refresh_from_db()
                        ti_python_sleep.refresh_from_db()
                        if ti_sleep_1000_secs and ti_sleep_1000_secs.state == State.RUNNING and \
                           ti_python_sleep and ti_python_sleep.state == State.RUNNING:
                            time.sleep(10)
                            break
                        else:
                            time.sleep(1)
                    self.client.send_event(
                        BaseEvent(key='stop',
                                  value='',
                                  event_type=UNDEFINED_EVENT_TYPE,
                                  namespace='test_namespace'))
                    self.client.send_event(
                        BaseEvent(key='restart',
                                  value='',
                                  event_type=UNDEFINED_EVENT_TYPE,
                                  namespace='test_namespace'))
                    while not stopped:
                        ti_sleep_1000_secs.refresh_from_db()
                        ti_python_sleep.refresh_from_db()
                        if ti_sleep_1000_secs and ti_sleep_1000_secs.state == State.KILLED and \
                           ti_python_sleep and ti_python_sleep.state == State.RUNNING:
                            stopped = True
                        else:
                            time.sleep(1)
                else:
                    time.sleep(1)
        self.client.send_event(StopSchedulerEvent(job_id=0).to_event())
Exemple #29
0
 def add_event(self, event: BaseEvent, uuid: str):
     kwargs = {
         "server_id": self.server_id,
         "create_time": int(time.time() * 1000),
         "event_type": event.event_type,
         "key": event.key,
         "value": event.value,
         "context": event.context,
         "namespace": event.namespace,
         "uuid": uuid
     }
     mongo_event = MongoEvent(**kwargs)
     mongo_event.save()
     mongo_event.reload()
     event.create_time = mongo_event.create_time
     event.version = mongo_event.version
     return event
Exemple #30
0
def event_proto_to_event(event_proto):
    return BaseEvent(key=event_proto.key,
                     value=event_proto.value,
                     event_type=event_proto.event_type,
                     version=event_proto.version,
                     create_time=event_proto.create_time,
                     context=event_proto.context,
                     namespace=event_proto.namespace)