def _test_to_script_or_to_markdown(self, to_method, template_lines): # These methods are so similar that it makes sense to have a helper # that can test either one instead of duplicating a bunch of code. workflow = Workflow.from_markdown(self.markdown_fp) artifact_fp1 = os.path.join(self.test_dir.name, 'artifact1.qtf') artifact_fp2 = os.path.join(self.test_dir.name, 'artifact2.qtf') Artifact.save([-1, 42, 0, 43, 43], DummyType, None, artifact_fp1) Artifact.save([1, 2, 100], DummyType, None, artifact_fp2) artifact_fp3 = os.path.join(self.test_dir.name, 'artifact3.qtf') job = to_method( workflow, input_artifact_filepaths={ 'input1': artifact_fp1, 'input2': artifact_fp2 }, parameter_references={ 'param1': 99, 'param2': -999, }, output_artifact_filepaths={'concatenated_inputs': artifact_fp3}) provenance_lines = [ "provenance = Provenance(", "parameters={", "'param2': -999" ] setup_lines = [ "input1 = Artifact(%r).data" % artifact_fp1, "input2 = Artifact(%r).data" % artifact_fp2, "param1 = 99", "param2 = -999" ] teardown_lines = [ "Artifact.save(concatenated_inputs, DummyType, provenance, " "%r)" % artifact_fp3 ] for expected_lines in (provenance_lines, setup_lines, template_lines, teardown_lines): for expected_line in expected_lines: self.assertIn(expected_line, job.code) self.assertIsInstance(job.uuid, uuid.UUID) self.assertEqual(job.uuid.version, 4) self.assertEqual(job.input_artifact_filepaths, { 'input1': artifact_fp1, 'input2': artifact_fp2 }) self.assertEqual(job.parameter_references, { 'param1': 99, 'param2': -999 }) self.assertEqual(job.output_artifact_filepaths, {'concatenated_inputs': artifact_fp3})
def test_constructor_no_provenance(self): fp = os.path.join(self.test_dir.name, 'artifact.qtf') Artifact.save([-1, 42, 0, 43, 43], DummyType, None, fp) artifact = Artifact(fp) self.assertEqual(artifact.data, [-1, 42, 0, 43, 43]) self.assertEqual(artifact.type, DummyType) self.assertEqual(artifact.provenance, None) self.assertIsInstance(artifact.uuid, uuid.UUID) self.assertEqual(artifact.uuid.version, 4)
def test_save(self): fp = os.path.join(self.test_dir.name, 'artifact.qtf') Artifact.save([-1, 42, 0, 43, 43], DummyType, self.dummy_provenance, fp) with tarfile.open(fp, mode='r') as tar: fps = set(tar.getnames()) expected = {'artifact', 'artifact/metadata.yaml', 'artifact/README.md', 'artifact/data', 'artifact/data/data.txt'} self.assertEqual(fps, expected)
def retrieve_artifact_info(qclient, artifact_id, artifact_type): fps_info = qclient.get("/qiita_db/artifacts/%s/filepaths/" % artifact_id) filepaths = fps_info['filepaths'] translator_lookup = get_artifact_translators_lookup() input_translator, _, _ = translator_lookup[str(artifact_type)] artifact_data = input_translator(filepaths) fd, temp_file_name = mkstemp(suffix='.qtf') close(fd) # TODO: None is the provenance Artifact.save(artifact_data, artifact_type, None, temp_file_name) return temp_file_name
def setUp(self): # TODO standardize temporary directories created by QIIME self.test_dir = tempfile.TemporaryDirectory(prefix='qiime2-temp-') self.workflow = Workflow.from_function( dummy_function, inputs={ 'input1': DummyType, 'input2': DummyType, 'param1': Int, 'param2': Int }, outputs=collections.OrderedDict([ ('concatenated_inputs', DummyType) ]), name='Concatenate things', doc="Let's concatenate some things!" ) self.artifact_fp1 = os.path.join(self.test_dir.name, 'artifact1.qtf') self.artifact_fp2 = os.path.join(self.test_dir.name, 'artifact2.qtf') Artifact.save([-1, 42, 0, 43, 43], DummyType, None, self.artifact_fp1) Artifact.save([1, 2, 100], DummyType, None, self.artifact_fp2) self.artifact_fp3 = os.path.join(self.test_dir.name, 'artifact3.qtf')
def _test_to_script_or_to_markdown(self, to_method, template_lines): # These methods are so similar that it makes sense to have a helper # that can test either one instead of duplicating a bunch of code. workflow = Workflow.from_markdown(self.markdown_fp) artifact_fp1 = os.path.join(self.test_dir.name, 'artifact1.qtf') artifact_fp2 = os.path.join(self.test_dir.name, 'artifact2.qtf') Artifact.save([-1, 42, 0, 43, 43], DummyType, None, artifact_fp1) Artifact.save([1, 2, 100], DummyType, None, artifact_fp2) artifact_fp3 = os.path.join(self.test_dir.name, 'artifact3.qtf') job = to_method( workflow, input_artifact_filepaths={ 'input1': artifact_fp1, 'input2': artifact_fp2 }, parameter_references={ 'param1': 99, 'param2': -999, }, output_artifact_filepaths={ 'concatenated_inputs': artifact_fp3 } ) provenance_lines = [ "provenance = Provenance(", "parameters={", "'param2': -999" ] setup_lines = [ "input1 = Artifact(%r).data" % artifact_fp1, "input2 = Artifact(%r).data" % artifact_fp2, "param1 = 99", "param2 = -999" ] teardown_lines = [ "Artifact.save(concatenated_inputs, DummyType, provenance, " "%r)" % artifact_fp3 ] for expected_lines in (provenance_lines, setup_lines, template_lines, teardown_lines): for expected_line in expected_lines: self.assertIn(expected_line, job.code) self.assertIsInstance(job.uuid, uuid.UUID) self.assertEqual(job.uuid.version, 4) self.assertEqual( job.input_artifact_filepaths, {'input1': artifact_fp1, 'input2': artifact_fp2}) self.assertEqual( job.parameter_references, {'param1': 99, 'param2': -999}) self.assertEqual( job.output_artifact_filepaths, {'concatenated_inputs': artifact_fp3})