예제 #1
0
파일: server.py 프로젝트: pipetree/pipetree
    async def _run_job(self, job):
        # Get stage from pipeline
        pf = PipelineStageFactory()
        config = PipelineStageConfig(job['stage_name'], job['stage_config'])
        stage = pf.create_pipeline_stage(config)

        # Load input artifact payloads from cache
        loaded_artifacts = []
        for artifact in job['artifacts']:
            art_obj = Artifact(stage._config)
            art_obj.meta_from_dict(artifact)
            loaded = self._backend.load_artifact(art_obj)
            if loaded is None:
                self._log("Could not find payload for artifact")
                raise Exception("Could not find payload for artifact")
            loaded_artifacts.append(loaded)

        # Execute the task
        exec_task = self._executor.create_task(stage, loaded_artifacts)
        result = await exec_task.generate_artifacts()

        for art in result:
            art._creation_time = float(time.time())
            art._dependency_hash = Artifact.dependency_hash(loaded_artifacts)
            self._backend.save_artifact(art)
        self._backend.log_pipeline_stage_run_complete(
            config, Artifact.dependency_hash(loaded_artifacts))

        return result
예제 #2
0
 def setUp(self):
     self.data = {
         "inputs": [],
         "execute": "tests.functional.module.executor_function.function",
         "type": "ExecutorPipelineStage"
     }
     self.config = PipelineStageConfig('WriteBytes', self.data)
     self.factory = PipelineStageFactory()
예제 #3
0
 def pregenerate_artifacts(self, backend):
     pf = PipelineStageFactory()
     config = PipelineStageConfig("StageA",
                                  self.generate_pipeline_config()["StageA"])
     stage = pf.create_pipeline_stage(config)
     arts = []
     for art in stage.yield_artifacts():
         backend.save_artifact(art)
         arts.append(art)
     return arts
예제 #4
0
    def test_create_tasks(self):
        loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)
        executor = RemoteSQSExecutor(
            aws_profile="testing",
            task_queue_name=self.test_queue_task_name,
            result_queue_name=self.test_queue_result_name,
            loop=loop)

        pf = PipelineStageFactory()
        stage = pf.create_pipeline_stage(self.stage_config)
        input_artifacts = [Artifact(self.stage_config)]
        executor.create_task(stage, input_artifacts)

        async def process_loop(executor, stage, input_artifacts):
            exit_loop = False
            while not exit_loop:
                await asyncio.sleep(2.0)
                for message in executor._task_queue.receive_messages(
                        MessageAttributeNames=[
                            'stage_config_hash', 'dependency_hash'
                        ]):
                    print("Retrieved message")
                    print(message.body)
                    print(message.message_attributes)
                    if message.message_attributes is None:
                        self.assertEqual(0, "Message attributes absent")

                    m_config_hash = message.message_attributes.\
                        get('stage_config_hash').get('StringValue')
                    m_dependency_hash = message.message_attributes.\
                        get('dependency_hash').get('StringValue')
                    config_hash = stage._config.hash()
                    dependency_hash = Artifact.dependency_hash(input_artifacts)

                    self.assertEqual(config_hash, m_config_hash)
                    self.assertEqual(dependency_hash, m_dependency_hash)
                    message.delete()
                    exit_loop = True
            for task in asyncio.Task.all_tasks():
                task.cancel()
            raise CancelledError

        try:
            loop.run_until_complete(
                asyncio.wait([
                    executor._process_queue(),
                    process_loop(executor, stage, input_artifacts)
                ]))
        except CancelledError:
            print('CancelledError raised: closing event loop.')
예제 #5
0
    async def _run_job(self, job):
        # Get stage from pipeline
        pf = PipelineStageFactory()
        config = PipelineStageConfig(job['stage_name'], job['stage_config'])
        stage = pf.create_pipeline_stage(config)

        # Load artifact payloads from cache
        loaded_artifacts = []
        for artifact in job['artifacts']:
            art_obj = Artifact(stage._config)
            art_obj.meta_from_dict(artifact)
            print(art_obj._pipeline_stage)
            loaded = self._backend.load_artifact(art_obj)
            if loaded is None:
                raise Exception("Could not find payload for artifact")
            loaded_artifacts.append(loaded)

        # Execute the task
        exec_task = self._executor.create_task(stage, loaded_artifacts)
        result = await exec_task.generate_artifacts()
        return result
예제 #6
0
 def setUp(self):
     self.filename = 'file.json'
     self.factory = PipelineStageFactory()
     self.fs = isolated_filesystem()
     self.fs.__enter__()
예제 #7
0
    def test_executor_server_integration(self):
        loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)

        executor = RemoteSQSExecutor(
            aws_profile=self.test_profile,
            task_queue_name=self.test_queue_task_name,
            result_queue_name=self.test_queue_result_name,
            s3_bucket_name=self.test_bucket_name,
            dynamodb_artifact_table_name=self.
            test_dynamodb_artifact_table_name,
            dynamodb_stage_run_table_name=self.test_dynamodb_stage_run_name,
            loop=loop)

        server = RemoteSQSServer(
            aws_profile=self.test_profile,
            aws_region=self.test_region,
            s3_bucket_name=self.test_bucket_name,
            task_queue_name=self.test_queue_task_name,
            result_queue_name=self.test_queue_result_name,
            dynamodb_artifact_table_name=self.
            test_dynamodb_artifact_table_name,
            dynamodb_stage_run_table_name=self.test_dynamodb_stage_run_name,
            loop=loop)

        # Create task. Its input will be itself because that's just great.
        pf = PipelineStageFactory()
        stage = pf.create_pipeline_stage(self.stage_config)
        input_artifacts = []
        for art in stage.yield_artifacts():
            input_artifacts.append(art)
        executor.create_task(stage, input_artifacts)

        # Save input artifacts so they're available for the remote server
        executor._backend.save_artifact(input_artifacts[0])

        # Run our local RemoteExecutor and the remote RemoteSQSServer
        # for 10 seconds.
        async def timeout():
            await asyncio.sleep(10.0)
            for task in asyncio.Task.all_tasks():
                task.cancel()
            raise CancelledError

        try:
            loop.run_until_complete(
                asyncio.wait([
                    executor._process_queue(),
                    server._process_tasks(),
                    server._executor_server._listen_to_queue(),
                    timeout()
                ]))
        except CancelledError:
            print('CancelledError raised: closing event loop.')

        # Load our remotely generated artifact(s) and ensure they
        # have the correct payload.
        arts = executor._backend.find_pipeline_stage_run_artifacts(
            self.stage_config, Artifact.dependency_hash(input_artifacts))

        loaded = []
        for art in arts:
            loaded.append(executor._backend.load_artifact(art))

        self.assertEqual(1, len(loaded))
        self.assertEqual(loaded[0].item.payload['param_a'],
                         "string parameter value")
예제 #8
0
 def __init__(self):
     self._stage_factory = PipelineStageFactory()
     self._loader = PipelineConfigLoader()