def test_input_iter_one(): args = argparse.Namespace args.file = None args.data = None args.iter = 1 graph = WorkflowGraph() prod = TestProducer() graph.add(prod) inputs = p.create_inputs(args, graph) tools.eq_(inputs[prod.id], 1)
def test_input_file(): args = argparse.Namespace import tempfile namedfile = tempfile.NamedTemporaryFile() with namedfile as temp: data = '{ "TestProducer": 20}' try: temp.write(data) except: temp.write(bytes(data, 'UTF-8')) temp.flush() temp.seek(0) args.file = namedfile.name args.data = None args.iter = 1 graph = WorkflowGraph() prod = TestProducer() graph.add(prod) inputs = p.create_inputs(args, graph) tools.eq_(inputs[prod.id], 20)
waveform_reader, (plot_stream, { "source": "waveform_reader", "output_dir": "./output-images", "tag": "observed-image" }) ] # processes.append((fn, params)) chain = create_iterative_chain(processes, FunctionPE_class=SimpleFunctionPE) watcher = WatchDirectory(0) watcher_xml = WatchDirectory(1) downloadPE.name = "downloadPE" graph = WorkflowGraph() graph.add(downloadPE) graph.connect(downloadPE, 'output', watcher, "input") graph.connect(downloadPE, 'output', watcher_xml, "input") graph.connect(watcher, 'output', chain, "input") graph.connect(watcher_xml, 'output', xmlr, "input") # injectProv(graph,SeismoPE) # graph=attachProvenanceRecorderPE(graph,ProvenanceRecorderToFileBulk,username=os.environ['USER_NAME'],runId=os.environ['RUN_ID']) # Store to local path #ProvenancePE.PROV_PATH = os.environ['PROV_PATH'] # # Size of the provenance bulk before sent to storage or sensor #ProvenancePE.BULK_SIZE = 20 #injectProv(graph, (SeismoPE,), save_mode=ProvenancePE.SAVE_MODE_FILE,
waveformr = SimpleFunctionPE(waveform_reader) xmlr = SimpleFunctionPE(stationxml_reader) downloadPE = SimpleFunctionPE(download_data) processes=[waveform_reader,(plot_stream,{"source":"waveform_reader","output_dir": "./output-images","tag": "observed-image"})] #processes.append((fn, params)) chain = create_iterative_chain(processes, FunctionPE_class=SimpleFunctionPE) watcher = WatchDirectory(0) watcher_xml = WatchDirectory(1) downloadPE.name = "downloadPE" graph = WorkflowGraph() graph.add(downloadPE) graph.connect(downloadPE, 'output', watcher, "input") graph.connect(downloadPE, 'output', watcher_xml, "input") graph.connect(watcher, 'output', chain, "input") graph.connect(watcher_xml, 'output', xmlr, "input") #injectProv(graph,SeismoPE) #graph=attachProvenanceRecorderPE(graph,ProvenanceRecorderToFileBulk,username=os.environ['USER_NAME'],runId=os.environ['RUN_ID']) #Store to local path ProvenancePE.PROV_PATH=os.environ['PROV_PATH'] #Size of the provenance bulk before sent to storage or sensor ProvenancePE.BULK_SIZE=20
NPROC, "downloadPE": [{ "input": { "minimum_interstation_distance_in_m": 100, "channel_priorities": ["BH[E,N,Z]", "EH[E,N,Z]"], "location_priorities": ["", "00", "10"], "mseed_path": "./data", "stationxml_path": "./stations", "RECORD_LENGTH_IN_MINUTES": RECORD_LENGTH_IN_MINUTES, "ORIGIN_TIME": ETIME, "minlatitude": latitude_min, "maxlatitude": latitude_max, "minlongitude": longitude_min, "maxlongitude": longitude_min } }] } filename = "misfit_data/data_file_test.json" with open(filename, "w") as write_file: json.dump(d, write_file) print(os.getcwd()) print(os.listdir(os.getcwd() + '/misfit_data/SPECFEMDATA')) write_stream = WriteJSON() write_stream.name = "WJSON" graph = WorkflowGraph() graph.add(write_stream)
def testOnePE(): graph = WorkflowGraph() prod = TestProducer() graph.add(prod) results = simple_process.process(graph, {prod: [{}]}) tools.eq_({(prod.id, 'output'): [1]}, results)
def testOnePE(): graph = WorkflowGraph() prod = TestProducer() graph.add(prod) results = simple_process.process(graph, { prod: [{}] }) tools.eq_({(prod.id, 'output'):[1]}, results)