Exemple #1
0
def create_processing_chain(proc):
    processes = []
    for p in proc:
        fn_name = p['type']
        params = p['parameters']
        print 'adding %s(%s)' % (fn_name, params)
        fn = getattr(mf, fn_name)
        processes.append((fn, params))
    return create_iterative_chain(processes, FunctionPE_class=MisfitPreprocessingFunctionPE)
def create_processing_chain(proc):
    processes = []
    for p in proc:
        fn_name = p['type']
        params = p['parameters']
        fn = getattr(mf, fn_name)
        processes.append((fn, params))
    return create_iterative_chain(
        processes, FunctionPE_class=MisfitPreprocessingFunctionPE)
Exemple #3
0
def create_processing_chain(proc):
    processes = []
    plotSeqIdx=0
    storeSeqIdx=0
    for p in proc:
        fn_name = p['type']
        params = p['parameters']
        if fn_name=='plot_stream':
            params.update({'seq_idx':plotSeqIdx})
            plotSeqIdx=plotSeqIdx+1

        if fn_name=='store_stream':
            params.update({'seq_idx':storeSeqIdx})
            storeSeqIdx=storeSeqIdx+1
         
        print 'adding %s(%s)' % (fn_name, params)
        fn = getattr(mf, fn_name)
        processes.append((fn, params))
    return create_iterative_chain(processes, FunctionPE_class=MisfitPreprocessingFunctionPE)
Exemple #4
0
def create_processing_chain(proc):
    processes = []
    plotSeqIdx=0
    storeSeqIdx=0
    for p in proc:
        fn_name = p['type']
        params = p['parameters']
        if fn_name=='plot_stream':
            params.update({'seq_idx':plotSeqIdx})
            plotSeqIdx=plotSeqIdx+1

        if fn_name=='store_stream':
            params.update({'seq_idx':storeSeqIdx})
            storeSeqIdx=storeSeqIdx+1
         
        print 'adding %s(%s)' % (fn_name, params)
        fn = getattr(mf, fn_name)
        processes.append((fn, params))
    return create_iterative_chain(processes, FunctionPE_class=MisfitPreprocessingFunctionPE)
def test_dot_composite():
    def inc(a):
        return a + 1

    def dec(a):
        return a - 1

    graph = WorkflowGraph()
    prod = TestProducer()
    comp = create_iterative_chain([inc, dec])
    cons = TestOneInOneOut()
    graph.connect(prod, "output", comp, "input")
    graph.connect(comp, "output", cons, "input")
    graph.inputmappings = {"input": (prod, "input")}
    root_prod = TestProducer()
    root_graph = WorkflowGraph()
    root_graph.connect(root_prod, "output", graph, "input")
    dot = draw(root_graph)
    tools.ok_("subgraph cluster_" in dot)
def testCreateChain():
    def add(a, b):
        return a + b

    def mult(a, b):
        return a * b

    def is_odd(a):
        return a % 2 == 1

    c = [(add, {"b": 1}), (mult, {"b": 3}), is_odd]
    chain = create_iterative_chain(c)
    prod = TestProducer()
    graph = WorkflowGraph()
    graph.connect(prod, "output", chain, "input")
    graph.flatten()
    results = simple_process.process_and_return(graph, {prod: 2})
    for key, value in results.items():
        tools.eq_({"output": [False, True]}, value)
Exemple #7
0
def testCreateChain():
    def add(a, b):
        return a + b

    def mult(a, b):
        return a * b

    def is_odd(a):
        return a % 2 == 1

    c = [(add, {'b': 1}), (mult, {'b': 3}), is_odd]
    chain = create_iterative_chain(c)
    prod = TestProducer()
    graph = WorkflowGraph()
    graph.connect(prod, 'output', chain, 'input')
    graph.flatten()
    results = simple_process.process_and_return(graph, {prod: 2})
    for key, value in results.items():
        tools.eq_({'output': [False, True]}, value)
def test_dot_composite():

    def inc(a):
        return a+1

    def dec(a):
        return a-1

    graph = WorkflowGraph()
    prod = TestProducer()
    comp = create_iterative_chain([inc, dec])
    cons = TestOneInOneOut()
    graph.connect(prod, 'output', comp, 'input')
    graph.connect(comp, 'output', cons, 'input')
    graph.inputmappings = {'input': (prod, 'input')}
    root_prod = TestProducer()
    root_graph = WorkflowGraph()
    root_graph.connect(root_prod, 'output', graph, 'input')
    dot = draw(root_graph)
    tools.ok_('subgraph cluster_' in dot)
Exemple #9
0
stagein_syn=[
                   (stagein,{ 'target':os.environ['STAGED_DATA']+'/'+input_json['readJSONstgin'][0]['input']['synt_dir'],'irods':os.environ['IRODS_URL'],'type':'synt','rootpath':'~/verce/'})
            ]

stagein_xml=[
                   (stagein,{ 'target':os.environ['STAGED_DATA']+'/'+input_json['readJSONstgin'][0]['input']['stations_dir'],'irods':os.environ['IRODS_URL'],'type':'stationxml','format':'application/xml','rootpath':'~/verce/'})
            ]

graph = WorkflowGraph()
read=ReadJSON()
read.name='readJSONstgin'
streamer0=StreamMapper('')
streamer1=StreamMapper('')
streamer2=StreamMapper('')

syn_staging_pipeline = create_iterative_chain(stagein_syn)
raw_staging_pipeline = create_iterative_chain(stagein_raw)
xml_staging_pipeline = create_iterative_chain(stagein_xml)

graph.connect(read, 'output_syn', streamer0, "input")
graph.connect(read, 'output_raw', streamer1, "input")
graph.connect(read, 'output_xml', streamer2, "input")
graph.connect(streamer0, 'output', syn_staging_pipeline, "input")
graph.connect(streamer1, 'output', raw_staging_pipeline, "input")
graph.connect(streamer2, 'output', xml_staging_pipeline, "input")

#injectProv(graph,ProvenancePE)
#attachProvenanceRecorderPE(graph,ProvenanceRecorderToFileBulk,username=os.environ['USER_NAME'],runId=os.environ['RUN_ID'],w3c_prov=False)

InitiateNewRun(graph,ProvenanceRecorderToFileBulk,username=os.environ['USER_NAME'],runId=input_json['runId'],w3c_prov=False,workflowName="preprocess_stagein",workflowId="")
#display(graph)
Exemple #10
0


stagein_data=[
                   (stagein,{ 'target':os.environ['STAGED_DATA']+'/'+input_json['readJSONstgin'][0]['input']['synt_dir'],'irods':os.environ['IRODS_URL'],'type':'waveform','rootpath':'~/verce/'})
            ]

stagein_xml=[
                   (stagein,{ 'target':os.environ['STAGED_DATA']+'/'+input_json['readJSONstgin'][0]['input']['stations_dir'],'irods':os.environ['IRODS_URL'],'type':'stationxml','format':'application/xml','rootpath':'~/verce/'})
            ]

graph = WorkflowGraph()
read=ReadJSON()
read.name='readJSONstgin'
streamer0=StreamMapper('')
streamer1=StreamMapper('')

data_staging_pipeline = create_iterative_chain(stagein_data)
xml_staging_pipeline = create_iterative_chain(stagein_xml)

graph.connect(read, 'output_data', streamer0, "input")
graph.connect(read, 'output_xml', streamer1, "input")
graph.connect(streamer0, 'output', data_staging_pipeline, "input")
graph.connect(streamer1, 'output', xml_staging_pipeline, "input")
#injectProv(graph,ProvenancePE)
#attachProvenanceRecorderPE(graph,ProvenanceRecorderToFileBulk,username=os.environ['USER_NAME'],runId=os.environ['RUN_ID'],w3c_prov=False)

InitiateNewRun(graph,ProvenanceRecorderToFileBulk,username=os.environ['USER_NAME'],runId=input_json['runId'],w3c_prov=False,workflowName="misfit_process_stagein",workflowId="")
#display(graph)
 
Exemple #11
0
    'type':
    'stationxml',
    'format':
    'application/xml',
    'rootpath':
    '~/verce/'
})]

graph = WorkflowGraph()
read = ReadJSON()
read.name = 'readJSONstgin'
streamer0 = StreamMapper('')
streamer1 = StreamMapper('')
streamer2 = StreamMapper('')

syn_staging_pipeline = create_iterative_chain(stagein_syn)
raw_staging_pipeline = create_iterative_chain(stagein_raw)
xml_staging_pipeline = create_iterative_chain(stagein_xml)

graph.connect(read, 'output_syn', streamer0, "input")
graph.connect(read, 'output_raw', streamer1, "input")
graph.connect(read, 'output_xml', streamer2, "input")
graph.connect(streamer0, 'output', syn_staging_pipeline, "input")
graph.connect(streamer1, 'output', raw_staging_pipeline, "input")
graph.connect(streamer2, 'output', xml_staging_pipeline, "input")

#injectProv(graph,ProvenancePE)
#attachProvenanceRecorderPE(graph,ProvenanceRecorderToFileBulk,username=os.environ['USER_NAME'],runId=os.environ['RUN_ID'],w3c_prov=False)

InitiateNewRun(graph,
               ProvenanceRecorderToFileBulk,
    prov={'location':["file://"+socket.gethostname()+"/"+os.environ['STAGED_DATA']+"/"+data['mseed_path'],"file://"+socket.gethostname()+"/"+os.environ['STAGED_DATA']+"/"+data['stationxml_path']], 'format':'multipart/mixed', 'metadata':download_report}
    
     
    
    return {'_d4p_prov':prov,'_d4p_data':[os.environ['STAGED_DATA']+"/"+data['mseed_path'],os.environ['STAGED_DATA']+"/"+data['stationxml_path']]}   



waveformr = SimpleFunctionPE(waveform_reader)
xmlr = SimpleFunctionPE(stationxml_reader)
downloadPE = SimpleFunctionPE(download_data)

processes=[waveform_reader,(plot_stream,{"source":"waveform_reader","output_dir": "./output-images","tag": "observed-image"})]
            
#processes.append((fn, params))
chain = create_iterative_chain(processes, FunctionPE_class=SimpleFunctionPE)



watcher = WatchDirectory(0)
watcher_xml = WatchDirectory(1)
downloadPE.name = "downloadPE"
graph = WorkflowGraph()
graph.add(downloadPE)

graph.connect(downloadPE, 'output', watcher, "input")
graph.connect(downloadPE, 'output', watcher_xml, "input")
graph.connect(watcher, 'output', chain, "input")
graph.connect(watcher_xml, 'output', xmlr, "input")

#injectProv(graph,SeismoPE)
Exemple #13
0

from dispel4py.workflow_graph import WorkflowGraph
streamProducer = SimpleFunctionPE(stream_producer)
stats1 = SimpleFunctionPE(readstats) 
stats2 = SimpleFunctionPE(readstats) 
match_traces=MatchPE()
xcorrelation_traces= SimpleFunctionPE(xcorrelation, {'maxlag':1000}) 

pipeline = [
    (decimate, {'sps':4}), 
    detrend, 
    demean, 
    (filter, {'freqmin':0.01, 'freqmax':1., 'corners':4, 'zerophase':False}),
    spectralwhitening,
    readstats]
preprocess_trace_1 = create_iterative_chain(pipeline)
preprocess_trace_2 = create_iterative_chain(pipeline)

graph = WorkflowGraph()
graph.connect(streamProducer, 'output', preprocess_trace_1, 'input')
graph.connect(streamProducer, 'output', preprocess_trace_2, 'input')
graph.connect(preprocess_trace_1, 'output', match_traces, 'input1')
graph.connect(preprocess_trace_2, 'output', match_traces, 'input2')
graph.connect(match_traces, 'output', xcorrelation_traces, 'input')
 


#from dispel4py import simple_process
#input_data = [ {'input' : sta1 },{'input' : sta2 }]
#simple_process.process(graph, input_data)
streamProducer = StreamProducer(t_start, t_finish, channel)
streamProducer.name = 'streamProducer'
streamToFile = StreamToFile()
streamToFile.name = 'StreamToFile'
functions = [(decimate, {
    'sps': 4
}), detrend, demean,
             (remove_response, {
                 'pre_filt': (0.005, 0.006, 30.0, 35.0),
                 'units': 'VEL'
             }),
             (filter, {
                 'freqmin': 0.01,
                 'freqmax': 1.,
                 'corners': 4,
                 'zerophase': False
             }), (calc_norm, {
                 'norm': 'env',
                 'N': 15
             }), (whiten, {
                 'smooth': None
             }), (calc_fft, {
                 'type': 'float64',
                 'shift': 5000
             })]
preTask = create_iterative_chain(functions, PreTaskPE)
graph = WorkflowGraph()

graph.connect(streamProducer, StreamProducer.OUTPUT_NAME, preTask, 'input')
graph.connect(preTask, 'output', streamToFile, StreamToFile.INPUT_NAME)
Exemple #15
0
stats1 = SeismoSimpleFunctionPE(readstats)
stats2 = SeismoSimpleFunctionPE(readstats)
stackp = SeismoSimpleFunctionPE(stackPlot, {'s_rate': 4})

match_traces = MatchPE()
xcorrelation_traces = SeismoSimpleFunctionPE(xcorrelation, {'maxlag': 1000})

pipeline = [
    #(decimate, {'sps':4}),
    #detrend,
    #demean,
    #(filter, {'freqmin':0.01, 'freqmax':1., 'corners':4, 'zerophase':False}),
    #spectralwhitening,
    readstats
]
preprocess_trace_1 = create_iterative_chain(
    pipeline, FunctionPE_class=SeismoSimpleFunctionPE)
preprocess_trace_2 = create_iterative_chain(
    pipeline, FunctionPE_class=SeismoSimpleFunctionPE)
graph = WorkflowGraph()
graph.connect(streamProducer, 'output', preprocess_trace_1, 'input')
graph.connect(streamProducer, 'output', preprocess_trace_2, 'input')
graph.connect(preprocess_trace_1, 'output', match_traces, 'input1')
graph.connect(preprocess_trace_2, 'output', match_traces, 'input2')
graph.connect(match_traces, 'output', xcorrelation_traces, 'input')
graph.connect(xcorrelation_traces, 'output', stackp, 'input')

InitiateNewRun(graph,
               ProvenanceRecorderToService,
               input={
                   "url": "http://test.verce.eu/stations",
                   "mime-type": "application/xml",
Exemple #16
0
        spec /= spec_ampl  # Do we need to do some smoothing here?
        ret = real(ifft(spec, nfft)[:n])

        st[trace].data = ret

    return st


from dispel4py.workflow_graph import WorkflowGraph

streamProducer = ProvenancePE(stream_producer)
sta = ProvenancePE(extractStation)

preprocess_trace = create_iterative_chain(
    [(decimate, {"sps": 4}), detrend, demean, spectralwhitening], FunctionPE_class=ProvenancePE
)

graph = WorkflowGraph()
graph.connect(streamProducer, "output", preprocess_trace, "input")
graph.connect(preprocess_trace, "output", sta, "input")
InitiateNewRun(
    graph,
    ProvenanceRecorderToService,
    input=[{"test": "1", "blah": "3"}],
    username="******",
    workflowId="173",
    description="test",
    system_id="xxxx",
    workflowName="postprocessing",
    runId="stami8",
            self.params["seedresp"]["filename"] = rf
        except KeyError:
            pass
        result = self.compute_fn(str1, **self.params)
        self.log("%s: done processing: %s" % (count, self.compute_fn.__name__))
        return [count, result, rf]


streamProducer = StreamProducer()
streamToFile = StreamToFile(ROOT_DIR + "OUTPUT/NOWHITE/%s_preprocessed.SAC")
streamToFile.name = "StreamToFileNonWhitened"
streamToFileWhitened = StreamToFile(ROOT_DIR + "OUTPUT/WHITE/%s_preprocessed.SAC")
streamToFileWhitened.name = "StreamToFileWhitened"
functions = [
    (decimate, {"sps": 4}),
    detrend,
    demean,
    (simulate, {"paz_remove": None, "pre_filt": (0.005, 0.006, 30.0, 35.0), "seedresp": {"units": "VEL"}}),
    (filter, {"freqmin": 0.01, "freqmax": 1.0, "corners": 4, "zerophase": False}),
    (mean_norm, {"N": 15}),
]
preTask = create_iterative_chain(functions, PreTaskPE)
whiten = PreTaskPE(spectralwhitening)
whiten.name = "PE_whiten"

graph = WorkflowGraph()
graph.connect(streamProducer, StreamProducer.OUTPUT_NAME, preTask, "input")
graph.connect(preTask, "output", streamToFile, StreamToFile.INPUT_NAME)
graph.connect(preTask, "output", whiten, IterativePE.INPUT_NAME)
graph.connect(whiten, IterativePE.OUTPUT_NAME, streamToFileWhitened, StreamToFile.INPUT_NAME)
Exemple #18
0
streamProducer1.name = "SP1"
streamProducer2 = SimpleFunctionPE(stream_producer)
streamProducer2.name = "SP2"
stats1 = SimpleFunctionPE(readstats) 
stats2 = SimpleFunctionPE(readstats) 
match_traces=MatchPE()
xcorrelation_traces= SimpleFunctionPE(xcorrelation, {'maxlag':1000}) 

pipeline = [
    (decimate, {'sps':4}), 
    detrend, 
    demean, 
    (filter, {'freqmin':0.01, 'freqmax':1., 'corners':4, 'zerophase':False}),
    spectralwhitening,
    readstats]
preprocess_trace_1 = create_iterative_chain(pipeline)
preprocess_trace_2 = create_iterative_chain(pipeline)

graph = WorkflowGraph()
graph.connect(streamProducer1, 'output', preprocess_trace_1, 'input')
graph.connect(streamProducer2, 'output', preprocess_trace_2, 'input')
graph.connect(preprocess_trace_1, 'output', match_traces, 'input1')
graph.connect(preprocess_trace_2, 'output', match_traces, 'input2')
graph.connect(match_traces, 'output', xcorrelation_traces, 'input')


# dispel4py simple Solution_Session_3.py -d '{ "SP1" : [ { "input" : "../A25A.TA..BHZ.2011.025.00.00.00.000-2011.026.00.00.39.000.rm.scale-AUTO.SAC" } ], "SP2" : [ { "input" : "../BMN.LB..BHZ.2011.025.00.00.00.023-2011.026.00.00.38.998.rm.scale-AUTO.SAC"} ] }'



Exemple #19
0
stats1 = SeismoSimpleFunctionPE(readstats)
stats2 = SeismoSimpleFunctionPE(readstats)
stackp =SeismoSimpleFunctionPE(stackPlot,{'s_rate':4})

match_traces=MatchPE()
xcorrelation_traces= SeismoSimpleFunctionPE(xcorrelation, {'maxlag':1000})


pipeline = [
    #(decimate, {'sps':4}), 
    #detrend, 
    #demean, 
    #(filter, {'freqmin':0.01, 'freqmax':1., 'corners':4, 'zerophase':False}),
    #spectralwhitening,
    readstats]
preprocess_trace_1 = create_iterative_chain(pipeline,FunctionPE_class=SeismoSimpleFunctionPE)
preprocess_trace_2 = create_iterative_chain(pipeline,FunctionPE_class=SeismoSimpleFunctionPE)
graph = WorkflowGraph()
graph.connect(streamProducer, 'output', preprocess_trace_1, 'input')
graph.connect(streamProducer, 'output', preprocess_trace_2, 'input')
graph.connect(preprocess_trace_1, 'output', match_traces, 'input1')
graph.connect(preprocess_trace_2, 'output', match_traces, 'input2')
graph.connect(match_traces, 'output', xcorrelation_traces, 'input')
graph.connect(xcorrelation_traces, 'output', stackp, 'input')

InitiateNewRun(graph,ProvenanceRecorderToService,input={"url": "http://test.verce.eu/stations", "mime-type": "application/xml", "name": "stations"},username="******",workflowId="173",description="test22",system_id="xxxx",workflowName="postprocessing",runId="dispel4py_training004",w3c_prov=False)

#from dispel4py.visualisation import display


        st[trace].data = ret

    return st


# Now it's time to create the graph for preprocessing the traces.

from dispel4py.workflow_graph import WorkflowGraph
streamProducer = SimpleFunctionPE(stream_producer)
streamProducer.name = "streamProducer"
sta = SimpleFunctionPE(readstats)

preprocess_trace = create_iterative_chain([(decimate, {
    'sps': 4
}), detrend, demean,
                                           (filter, {
                                               'freqmin': 0.01,
                                               'freqmax': 1.,
                                               'corners': 4,
                                               'zerophase': False
                                           }), spectralwhitening])

graph = WorkflowGraph()
graph.connect(streamProducer, 'output', preprocess_trace, 'input')
graph.connect(preprocess_trace, 'output', sta, 'input')

# Now execute the graph:
# Simple command for executing this workflow from a terminal.

# dispel4py simple Solution_Session_2.py -d '{ "streamProducer" : [ {"input": "../A25A.TA..BHZ.2011.025.00.00.00.000-2011.026.00.00.39.000.rm.scale-AUTO.SAC"}, {"input": "../BMN.LB..BHZ.2011.025.00.00.00.023-2011.026.00.00.38.998.rm.scale-AUTO.SAC"} ] }'
Exemple #21
0
    return 2 + data


def multiplyByFour(data):
    '''
    Returns 4 * `data`.
    '''
    return 4 * data


def divideByTwo(data):
    '''
    Returns `data`/2.
    '''
    return data / 2


def subtract(data, n):
    '''
    Returns `data` - `n`.
    '''
    return data - n


functions = [addTwo, multiplyByFour, divideByTwo, (subtract, {'n': 1})]
composite = create_iterative_chain(functions)
producer = TestProducer()

graph = WorkflowGraph()
graph.connect(producer, 'output', composite, 'input')
    return 2 + data


def multiplyByFour(data):
    """
    Returns 4 * `data`.
    """
    return 4 * data


def divideByTwo(data):
    """
    Returns `data`/2.
    """
    return data / 2


def subtract(data, n):
    """
    Returns `data` - `n`.
    """
    return data - n


functions = [addTwo, multiplyByFour, divideByTwo, (subtract, {"n": 1})]
composite = create_iterative_chain(functions)
producer = TestProducer()

graph = WorkflowGraph()
graph.connect(producer, "output", composite, "input")
Exemple #23
0
    prov={'location':locations, 'format':'application/octet-stream', 'error':stderrdata,'metadata':{'my_feature1':2,'my_feature2':'test'}}
    
    return {'_d4p_prov':prov,'_d4p_data':stdoutdata}


stagein_mesh=[extractMesh,
                   (stagein,{ 'target':os.environ['MODEL_PATH'],'irods':os.environ['IRODS_URL'],'type':'mesh'}),
                   (unpack,{'location':os.environ['MODEL_PATH']})
                   ]

stagein_model=[extractModel,
                   (stagein,{ 'target':os.environ['MODEL_PATH'],'irods':os.environ['IRODS_URL'],'type':'model'}),
                   (unpack,{'location':os.environ['MODEL_PATH']})
                   ]


graph = WorkflowGraph()
readf=ReadJSON()
mesh_staging_pipeline = create_iterative_chain(stagein_mesh)
model_staging_pipeline = create_iterative_chain(stagein_model)

graph.connect(readf, 'output', mesh_staging_pipeline, "input")
graph.connect(readf, 'output', model_staging_pipeline, "input")

injectProv(graph,ProvenancePE)
attachProvenanceRecorderPE(graph,ProvenanceRecorderToFile,username=os.environ['USER_NAME'],runId=os.environ['RUN_ID'],w3c_prov=False)

#InitiateNewRun(graph,ProvenanceRecorderToService,username=os.environ['USER_NAME'],runId=os.environ['RUN_ID'],w3c_prov=False,workflowName="model_stagein",workflowId="")
#display(graph)
 
Exemple #24
0
    os.environ['IRODS_URL'],
    'type':
    'stationxml',
    'format':
    'application/xml',
    'rootpath':
    '~/verce/'
})]

graph = WorkflowGraph()
read = ReadJSON()
read.name = 'readJSONstgin'
streamer0 = StreamMapper('')
streamer1 = StreamMapper('')

data_staging_pipeline = create_iterative_chain(stagein_data)
xml_staging_pipeline = create_iterative_chain(stagein_xml)

graph.connect(read, 'output_data', streamer0, "input")
graph.connect(read, 'output_xml', streamer1, "input")
graph.connect(streamer0, 'output', data_staging_pipeline, "input")
graph.connect(streamer1, 'output', xml_staging_pipeline, "input")
#injectProv(graph,ProvenancePE)
#attachProvenanceRecorderPE(graph,ProvenanceRecorderToFileBulk,username=os.environ['USER_NAME'],runId=os.environ['RUN_ID'],w3c_prov=False)

InitiateNewRun(graph,
               ProvenanceRecorderToFileBulk,
               username=os.environ['USER_NAME'],
               runId=input_json['runId'],
               w3c_prov=False,
               workflowName="misfit_process_stagein",
waveformr = SimpleFunctionPE(waveform_reader)
xmlr = SimpleFunctionPE(stationxml_reader)
downloadPE = SimpleFunctionPE(download_data)

processes = [
    waveform_reader,
    (plot_stream, {
        "source": "waveform_reader",
        "output_dir": "./output-images",
        "tag": "observed-image"
    })
]

# processes.append((fn, params))
chain = create_iterative_chain(processes, FunctionPE_class=SimpleFunctionPE)

watcher = WatchDirectory(0)
watcher_xml = WatchDirectory(1)
downloadPE.name = "downloadPE"
graph = WorkflowGraph()
graph.add(downloadPE)

graph.connect(downloadPE, 'output', watcher, "input")
graph.connect(downloadPE, 'output', watcher_xml, "input")
graph.connect(watcher, 'output', chain, "input")
graph.connect(watcher_xml, 'output', xmlr, "input")

# injectProv(graph,SeismoPE)
# graph=attachProvenanceRecorderPE(graph,ProvenanceRecorderToFileBulk,username=os.environ['USER_NAME'],runId=os.environ['RUN_ID'])
Exemple #26
0
        spec_ampl = sqrt(abs(multiply(spec, conjugate(spec))))

        spec /= spec_ampl  #Do we need to do some smoothing here?
        ret = real(ifft(spec, nfft)[:n])

        st[trace].data = ret

    return st


from dispel4py.workflow_graph import WorkflowGraph
streamProducer = ProvenancePE(stream_producer)
sta = ProvenancePE(extractStation)

preprocess_trace = create_iterative_chain([(decimate, {
    'sps': 4
}), detrend, demean, spectralwhitening],
                                          FunctionPE_class=ProvenancePE)

graph = WorkflowGraph()
graph.connect(streamProducer, 'output', preprocess_trace, 'input')
graph.connect(preprocess_trace, 'output', sta, 'input')
InitiateNewRun(graph,
               ProvenanceRecorderToService,
               input=[{
                   'test': '1',
                   'blah': '3'
               }],
               username="******",
               workflowId="173",
               description="test",
               system_id="xxxx",