Exemple #1
0
 async def test_run(self):
     self.required_plugins("dffml-config-yaml", "dffml-model-scratch")
     # Load get_single and model_predict
     get_single = Operation.load("get_single")
     model_predict = list(load("dffml.operation.model:model_predict"))[0]
     # Create new dataflow from operations
     dataflow = DataFlow.auto(get_single, model_predict)
     # Add the seed inputs
     dataflow.seed.append(
         Input(
             value=[
                 definition.name
                 for definition in model_predict.op.outputs.values()
             ],
             definition=get_single.inputs["spec"],
         ))
     # Write out the dataflow
     dataflow_yaml = pathlib.Path(self.mktempfile() + ".yaml")
     async with BaseConfigLoader.load("yaml").withconfig(
         {}) as configloader:
         async with configloader() as loader:
             dataflow_yaml.write_bytes(await loader.dumpb(
                 dataflow.export(linked=True)))
     # TODO Figure out how nested model config options will work
     # print(dataflow_yaml.read_text())
     return
Exemple #2
0
 async def test_dataflow_run_cli_example(self):
     # Write out override dataflow
     created = self.mktempfile() + ".yaml"
     with open(created, "w") as fileobj:
         with contextlib.redirect_stdout(fileobj):
             await CLI.cli(
                 "dataflow",
                 "create",
                 "dffml.mapping.create",
                 "print_output",
                 "-configloader",
                 "yaml",
             )
     # Load the generated dataflow
     async with ConfigLoaders() as cfgl:
         _, exported = await cfgl.load_file(created)
         dataflow = DataFlow._fromdict(**exported)
     # Modify the dataflow
     dataflow.flow["print_output"].inputs["data"] = [{
         "dffml.mapping.create":
         "mapping"
     }]
     # Write back modified dataflow
     async with BaseConfigLoader.load("yaml").withconfig(
         {}) as configloader:
         async with configloader() as loader:
             with open(created, "wb") as fileobj:
                 fileobj.write(await
                               loader.dumpb(dataflow.export(linked=True)))
     # Run the dataflow
     with contextlib.redirect_stdout(self.stdout):
         await CLI.cli(
             "dataflow",
             "run",
             "records",
             "all",
             "-no-echo",
             "-record-def",
             "value",
             "-inputs",
             "hello=key",
             "-dataflow",
             created,
             "-sources",
             "m=memory",
             "-source-records",
             "world",
             "user",
         )
     self.assertEqual(self.stdout.getvalue(),
                      "{'hello': 'world'}\n{'hello': 'user'}\n")
Exemple #3
0
    async def __aenter__(self) -> "DataFlowSourceContext":
        self.sctx = await self.parent.source().__aenter__()

        if isinstance(self.parent.config.dataflow, str):
            dataflow_path = pathlib.Path(self.parent.config.dataflow)
            config_type = dataflow_path.suffix.replace(".", "")
            config_cls = BaseConfigLoader.load(config_type)
            async with config_cls.withconfig({}) as configloader:
                async with configloader() as loader:
                    exported = await loader.loadb(dataflow_path.read_bytes())
                self.parent.config.dataflow = DataFlow._fromdict(**exported)

        self.octx = await self.parent.orchestrator(self.parent.config.dataflow
                                                   ).__aenter__()

        return self