Example #1
0
    def __init__(self, *args, **kwargs):

        self.__class__ = type(str(self.__class__),
                              (self.__class__, SimpleFunctionPE), {})
        SimpleFunctionPE.__init__(self, *args, **kwargs)
        # name=self.name
        ProvenancePE.__init__(self, self.name, *args, **kwargs)
Example #2
0
def testSplitMerge():
    '''
    Creates the split/merge graph with 4 nodes.
    '''
    graph = WorkflowGraph()
    split = splitPE(3)
    mult1 = SimpleFunctionPE(mult)
    mult2 = SimpleFunctionPE(mult)
    mult3 = SimpleFunctionPE(mult)
    merge = mergePE(3)
    test = fwritePE()

    graph.connect(split, 'output0', mult1, 'input')
    graph.connect(split, 'output1', mult2, 'input')
    graph.connect(split, 'output2', mult3, 'input')
    graph.connect(mult1, 'output', merge, 'input0')
    graph.connect(mult2, 'output', merge, 'input1')
    graph.connect(mult3, 'output', merge, 'input2')
    graph.connect(merge, 'output', test, 'input')

    return graph
        'multipart/mixed',
        'metadata':
        download_report
    }

    return {
        '_d4p_prov':
        prov,
        '_d4p_data': [
            os.environ['STAGED_DATA'] + "/" + data['mseed_path'],
            os.environ['STAGED_DATA'] + "/" + data['stationxml_path']
        ]
    }


waveformr = SimpleFunctionPE(waveform_reader)
xmlr = SimpleFunctionPE(stationxml_reader)
downloadPE = SimpleFunctionPE(download_data)

processes = [
    waveform_reader,
    (plot_stream, {
        "source": "waveform_reader",
        "output_dir": "./output-images",
        "tag": "observed-image"
    })
]

# processes.append((fn, params))
chain = create_iterative_chain(processes, FunctionPE_class=SimpleFunctionPE)
Example #4
0
            for x in self.indexmap1:
                
                # Writes the output and specify additional dependencies. 
                # Specifically, it refer to the object read from the list used to produce 
                # the cross product
                self.write('output', (val, self.indexmap1[x]), metadata={'res':str((val, self.indexmap1[x]))},dep=[str('index1'+str(x))])
        
       
# Instantiates the Workflow Components        

sc = Source()
sc.name='PE_source'
divf=Div()
divf.name='PE_div'
crossp = CrossProd()
squaref=SimpleFunctionPE(square,{})
#Uncomment this line to associate this PE to the mycluster provenance-cluster 
squaref=SimpleFunctionPE(square,{'prov_cluster':'mycluster'})


#Initialise and compose the workflow graph
graph = WorkflowGraph()
graph.connect(sc,'output',squaref,'input')
graph.connect(sc,'output',crossp,'input1')
graph.connect(squaref,'output',crossp,'input2')
graph.connect(crossp,'output',divf,'input')


#Declare workflow inputs:
input_data = {"PE_source": [{"input": [10]}]}
        spec = fft(data, nfft)
        spec_ampl = sqrt(abs(multiply(spec, conjugate(spec))))

        spec /= spec_ampl  #Do we need to do some smoothing here?
        ret = real(ifft(spec, nfft)[:n])

        st[trace].data = ret

    return st


# Now it's time to create the graph for preprocessing the traces.

from dispel4py.workflow_graph import WorkflowGraph
streamProducer = SimpleFunctionPE(stream_producer)
streamProducer.name = "streamProducer"
sta = SimpleFunctionPE(readstats)

preprocess_trace = create_iterative_chain([(decimate, {
    'sps': 4
}), detrend, demean,
                                           (filter, {
                                               'freqmin': 0.01,
                                               'freqmax': 1.,
                                               'corners': 4,
                                               'zerophase': False
                                           }), spectralwhitening])

graph = WorkflowGraph()
graph.connect(streamProducer, 'output', preprocess_trace, 'input')
Example #6
0


sc1 = Collector()
sc1.name = 'collector'

sc2 = Match()
sc2.name = 'match'

sc3 = Writer("+")
sc3.name = 'writer'

sc4 = Visualiser()
sc4.name = 'vizu'

read=SimpleFunctionPE(reader)
comb=SimpleFunctionPE(combine,{"operator":"+"})
 

#processes=[readn,multn]
#chain = create_iterative_chain(processes, FunctionPE_class=SimpleFunctionPE)

#Initialise the graph
graph = WorkflowGraph()

#Common way of composing the graph
graph.connect(sc1,'output_R',read,'input')
graph.connect(read,'output', sc2,'input')
graph.connect(sc2,'output_X',comb,'input')
graph.connect(comb,'output',sc3,'var')
graph.connect(sc1,'output_W',sc3,'file')
Example #7
0
def testSimpleFunction():
    def add(a, b):
        return a+b
    simp = SimpleFunctionPE(add, {'b': 2})
    tools.eq_({'output': 3}, simp.process({'input': 1}))
        for station in r["data"]:
            download_report.append({"provider": r["client"],
                                    "provider_url": URL_MAPPINGS[r["client"]],
                                    "station": "%s.%s" % (station.network, station.station)})
   
    prov={'location':["file://"+socket.gethostname()+"/"+os.environ['STAGED_DATA']+"/"+data['mseed_path'],"file://"+socket.gethostname()+"/"+os.environ['STAGED_DATA']+"/"+data['stationxml_path']], 'format':'multipart/mixed', 'metadata':download_report}
    
     
    
    return {'_d4p_prov':prov,'_d4p_data':[os.environ['STAGED_DATA']+"/"+data['mseed_path'],os.environ['STAGED_DATA']+"/"+data['stationxml_path']]}   



waveformr = SimpleFunctionPE(waveform_reader)
xmlr = SimpleFunctionPE(stationxml_reader)
downloadPE = SimpleFunctionPE(download_data)

processes=[waveform_reader,(plot_stream,{"source":"waveform_reader","output_dir": "./output-images","tag": "observed-image"})]
            
#processes.append((fn, params))
chain = create_iterative_chain(processes, FunctionPE_class=SimpleFunctionPE)



watcher = WatchDirectory(0)
watcher_xml = WatchDirectory(1)
downloadPE.name = "downloadPE"
graph = WorkflowGraph()
graph.add(downloadPE)

graph.connect(downloadPE, 'output', watcher, "input")
        # the output stream and the trace statistics (metadata).
        return {'output': st, 'output_stats': st[0].stats}


# This fuctions prints the name of an station from the stats.


def name_station(stats):
    station_name = stats['station']
    print('Station: ' + station_name)
    return stats


from dispel4py.workflow_graph import WorkflowGraph

nameStation = SimpleFunctionPE(name_station)
streamProducer = StreamAndStatsProducer()
streamProducer.name = "streamProducer"
readStats = ReadStats()
graph = WorkflowGraph()
graph.connect(streamProducer, 'output', readStats, 'input')
# The following line can be only performed if streamProducer is
# a StreamAndStatsProducer object
graph.connect(streamProducer, 'output_stats', nameStation, 'input')


def detrend(st):
    st.detrend('simple')
    return st

Example #10
0
        IterativePE.__init__(self)
        self.index = index

    def _process(self, inputs):

        directory = inputs
        print("DIRECOTRY:%s " % str(directory))
        for dir_entry in os.listdir(directory[self.index]):

            dir_entry_path = os.path.join(directory[self.index], dir_entry)
            if os.path.isfile(dir_entry_path):
                self.write('output', dir_entry_path)


downloadPE = SimpleFunctionPE(download_data, {
    "add_end": 600,
    "add_start": 300
})  #fm
downloadPE.name = "downloadPE"
watcher = WatchDirectory(0)
watcher_xml = WatchDirectory(1)
waveformr = SimpleFunctionPE(waveform_reader)
xmlr = SimpleFunctionPE(stationxml_reader)

processes = [
    waveform_reader,
    (plot_stream, {
        "source": "waveform_reader",
        "output_dir": "./output-images",
        "tag": "observed-image"
    })
]
Example #11
0
from obspy.signal.cross_correlation import xcorr
import numpy

def xcorrelation(data, maxlag):
    st1 = data[1]
    st2 = data[2]
    tr1 = st1[0].data
    tr2 = st2[0].data
    tr1 = tr1/numpy.linalg.norm(tr1)
    tr2 = tr2/numpy.linalg.norm(tr2)
    return xcorr(tr1, tr2, maxlag, full_xcorr=True)[2]


from dispel4py.workflow_graph import WorkflowGraph

streamProducer1 = SimpleFunctionPE(stream_producer)
streamProducer1.name = "SP1"
streamProducer2 = SimpleFunctionPE(stream_producer)
streamProducer2.name = "SP2"
stats1 = SimpleFunctionPE(readstats) 
stats2 = SimpleFunctionPE(readstats) 
match_traces=MatchPE()
xcorrelation_traces= SimpleFunctionPE(xcorrelation, {'maxlag':1000}) 

pipeline = [
    (decimate, {'sps':4}), 
    detrend, 
    demean, 
    (filter, {'freqmin':0.01, 'freqmax':1., 'corners':4, 'zerophase':False}),
    spectralwhitening,
    readstats]
Example #12
0
###################################

collector = Collector()
collector.name = 'collector'

match = Match()
match.name = 'match'

#writer = Writer("-")
writer = Writer()
writer.name = 'writer'

sc4 = Visualiser()
sc4.name = 'vizu'

read = SimpleFunctionPE(reader)
#read=NetCDFReader()

#comb=SimpleFunctionPE(combine,{"operator":"-"})
comb = SimpleFunctionPE(combine)

#processes=[readn,multn]
#chain = create_iterative_chain(processes, FunctionPE_class=SimpleFunctionPE)

#Initialise the graph
graph = WorkflowGraph()

#Common way of composing the graph
graph.connect(collector, 'output_R', read, 'input')
graph.connect(read, 'output', match, 'input')
graph.connect(match, 'output_X', comb, 'input')
Example #13
0
import numpy


def xcorrelation(data, maxlag):
    st1 = data[1]
    st2 = data[2]
    tr1 = st1[0].data
    tr2 = st2[0].data
    tr1 = tr1 / numpy.linalg.norm(tr1)
    tr2 = tr2 / numpy.linalg.norm(tr2)
    return xcorr(tr1, tr2, maxlag, full_xcorr=True)[2]


from dispel4py.workflow_graph import WorkflowGraph

streamProducer = SimpleFunctionPE(stream_producer)
stats1 = SimpleFunctionPE(readstats)
stats2 = SimpleFunctionPE(readstats)
match_traces = MatchPE()
xcorrelation_traces = SimpleFunctionPE(xcorrelation, {'maxlag': 1000})

pipeline = [(decimate, {
    'sps': 4
}), detrend, demean,
            (filter, {
                'freqmin': 0.01,
                'freqmax': 1.,
                'corners': 4,
                'zerophase': False
            }), spectralwhitening, readstats]
preprocess_trace_1 = create_iterative_chain(pipeline)