def test_create_data_object(): filename = "dag_runner_create_data_object.pkl" # hack part 1: make sure this filename exists so that checks in Configuration pass open(filename, "w+") config = { "metadata": { "data_object": { "read_from_cache": True, "read_filename": "dag_runner_create_data_object.pkl", } }, "implementation_config": { "reader_config": { "csv_reader": { "class": "CsvReader", "filename": "test/minimal.csv", "destinations": [], } } }, } configuration = Configuration(None, is_dict_config=True, dict_config=config) # hack part 2: now get rid of it if os.path.exists(filename): os.remove(filename) # now write the actual object to restore from data_object = DataObject(configuration) writer = CsvReader(configuration, "csv_reader") data_object.add(writer, "some_data") data_object.write_to_cache(filename) assert os.path.exists(filename) # now we get to the code to test runner = DagRunner(configuration) restored_data_object = runner.create_data_object() # run some checks assert isinstance(restored_data_object, DataObject) assert (restored_data_object.get( "csv_reader", rtype=DataObjectResponseType.VALUE.value) == "some_data") # cleanup if os.path.exists(filename): os.remove(filename)
def test_caching(): config = { "implementation_config": { "reader_config": { "csv_reader": { "class": "CsvReader", "filename": "test/minimal.csv", "destinations": [], } } } } configuration = Configuration(None, is_dict_config=True, dict_config=config) data_object = DataObject(configuration) writer = CsvReader(configuration, "csv_reader") data_object.add(writer, "some_data") filename = "test_data_object_cache.pkl" if os.path.exists(filename): os.remove(filename) data_object.write_to_cache(filename) assert os.path.exists(filename) restored_data_object = DataObject.read_from_cache(filename) assert isinstance(restored_data_object, DataObject) assert ( restored_data_object.get("csv_reader", rtype=DataObjectResponseType.VALUE.value) == "some_data" ) if os.path.exists(filename): os.remove(filename)