Example #1
0
def create_pipeline(chain, name_prefix='SeismoStreamPE_', name_suffix=''):
    '''
    Creates a composite PE wrapping a pipeline that processes obspy streams.
    :param chain: list of functions that process obspy streams. The function takes one input parameter, stream, and returns an output stream.
    :param requestId: id of the request that the stream is associated with
    :param controlParameters: environment parameters for the processing elements
    :rtype: dictionary inputs and outputs of the composite PE that was created
    '''
    prev = None
    first = None
    graph = WorkflowGraph()

    for fn_desc in chain:
        try:
            fn = fn_desc[0]
            params = fn_desc[1]
        except TypeError:
            fn = fn_desc
            params = {}

        pe = SeismoStreamPE(fn, params)
        pe.name = name_prefix + fn.__name__ + name_suffix

        if prev:
            graph.connect(prev, SeismoStreamPE.OUTPUT_NAME, pe,
                          SeismoStreamPE.INPUT_NAME)
        else:
            first = pe
        prev = pe

    # Map inputs and outputs of the wrapper to the nodes in the subgraph
    graph.inputmappings = {'input': (first, INPUT_NAME)}
    graph.outputmappings = {'output': (prev, OUTPUT_NAME)}

    return graph
Example #2
0
def create_iterative_chain(functions,
                           FunctionPE_class=SimpleFunctionPE,
                           name_prefix='PE_',
                           name_suffix=''):

    prev = None
    first = None
    graph = WorkflowGraph()

    for fn_desc in functions:
        try:
            fn = fn_desc[0]
            params = fn_desc[1]
        except TypeError:
            fn = fn_desc
            params = {}

        # print 'adding %s to chain' % fn.__name__
        pe = FunctionPE_class()
        pe.compute_fn = fn
        pe.params = params
        pe.name = name_prefix + fn.__name__ + name_suffix

        if prev:
            graph.connect(prev, IterativePE.OUTPUT_NAME, pe,
                          IterativePE.INPUT_NAME)
        else:
            first = pe
        prev = pe

    # Map inputs and outputs of the wrapper to the nodes in the subgraph
    graph.inputmappings = {'input': (first, IterativePE.INPUT_NAME)}
    graph.outputmappings = {'output': (prev, IterativePE.OUTPUT_NAME)}

    return graph
Example #3
0
def parallel_aggregate(instPE, reducePE):
    composite = WorkflowGraph()
    reducePE.inputconnections[AggregatePE.INPUT_NAME]['grouping'] = 'global'
    reducePE.numprocesses = 1
    composite.connect(instPE, AggregatePE.OUTPUT_NAME, reducePE, AggregatePE.INPUT_NAME)
    composite.inputmappings = { 'input' : (instPE, AggregatePE.INPUT_NAME) }
    composite.outputmappings = { 'output' : (reducePE, AggregatePE.OUTPUT_NAME) }
    return composite
Example #4
0
def parallelAvg(index=0):
    composite = WorkflowGraph()
    parAvg = AverageParallelPE(index)
    reduceAvg = AverageReducePE()
    composite.connect(parAvg, parAvg.OUTPUT_NAME, reduceAvg, reduceAvg.INPUT_NAME)
    composite.inputmappings = { 'input' : (parAvg, parAvg.INPUT_NAME) }
    composite.outputmappings = { 'output' : (reduceAvg, reduceAvg.OUTPUT_NAME) }
    return composite
Example #5
0
def parallelStdDev(index=0):
    composite = WorkflowGraph()
    parStdDev = StdDevPE(index)
    reduceStdDev = StdDevReducePE()
    composite.connect(parStdDev, parStdDev.OUTPUT_NAME, reduceStdDev, reduceStdDev.INPUT_NAME)
    composite.inputmappings = { 'input' : (parStdDev, parStdDev.INPUT_NAME) }
    composite.outputmappings = { 'output' : (reduceStdDev, reduceStdDev.OUTPUT_NAME) }
    return composite
Example #6
0
def parallel_aggregate(instPE, reducePE):
    composite = WorkflowGraph()
    reducePE.inputconnections[AggregatePE.INPUT_NAME]['grouping'] = 'global'
    reducePE.numprocesses = 1
    composite.connect(instPE, AggregatePE.OUTPUT_NAME, reducePE,
                      AggregatePE.INPUT_NAME)
    composite.inputmappings = {'input': (instPE, AggregatePE.INPUT_NAME)}
    composite.outputmappings = {'output': (reducePE, AggregatePE.OUTPUT_NAME)}
    return composite
Example #7
0
def parallelAvg(index=0):
    composite = WorkflowGraph()
    parAvg = AverageParallelPE(index)
    reduceAvg = AverageReducePE()
    composite.connect(parAvg, parAvg.OUTPUT_NAME, reduceAvg,
                      reduceAvg.INPUT_NAME)
    composite.inputmappings = {'input': (parAvg, parAvg.INPUT_NAME)}
    composite.outputmappings = {'output': (reduceAvg, reduceAvg.OUTPUT_NAME)}
    return composite
Example #8
0
def parallelAvg(index=0):
    '''
    Creates an AVG composite PE that can be parallelised using a map-reduce pattern.
    '''
    composite = WorkflowGraph()
    parAvg = AverageParallelPE(index)
    reduceAvg = AverageReducePE()
    composite.connect(parAvg, parAvg.OUTPUT_NAME, reduceAvg, reduceAvg.INPUT_NAME)
    composite.inputmappings = { 'input' : (parAvg, parAvg.INPUT_NAME) }
    composite.outputmappings = { 'output' : (reduceAvg, reduceAvg.OUTPUT_NAME) }
    return composite
Example #9
0
def parallelStdDev(index=0):
    composite = WorkflowGraph()
    parStdDev = StdDevPE(index)
    reduceStdDev = StdDevReducePE()
    composite.connect(parStdDev, parStdDev.OUTPUT_NAME, reduceStdDev,
                      reduceStdDev.INPUT_NAME)
    composite.inputmappings = {'input': (parStdDev, parStdDev.INPUT_NAME)}
    composite.outputmappings = {
        'output': (reduceStdDev, reduceStdDev.OUTPUT_NAME)
    }
    return composite
Example #10
0
def parallelStdDev(index=0):
    '''
    Creates a STDDEV composite PE that can be parallelised using a map-reduce pattern.
    '''
    composite = WorkflowGraph()
    parStdDev = StdDevPE(index)
    reduceStdDev = StdDevReducePE()
    composite.connect(parStdDev, parStdDev.OUTPUT_NAME, reduceStdDev, reduceStdDev.INPUT_NAME)
    composite.inputmappings = { 'input' : (parStdDev, parStdDev.INPUT_NAME) }
    composite.outputmappings = { 'output' : (reduceStdDev, reduceStdDev.OUTPUT_NAME) }
    return composite
Example #11
0
def parallelAvg(index=0):
    '''
    Creates an AVG composite PE that can be parallelised using a map-reduce pattern.
    '''
    composite = WorkflowGraph()
    parAvg = AverageParallelPE(index)
    reduceAvg = AverageReducePE()
    composite.connect(parAvg, parAvg.OUTPUT_NAME, reduceAvg,
                      reduceAvg.INPUT_NAME)
    composite.inputmappings = {'input': (parAvg, parAvg.INPUT_NAME)}
    composite.outputmappings = {'output': (reduceAvg, reduceAvg.OUTPUT_NAME)}
    return composite
Example #12
0
def parallelStdDev(index=0):
    '''
    Creates a STDDEV composite PE that can be parallelised using a map-reduce pattern.
    '''
    composite = WorkflowGraph()
    parStdDev = StdDevPE(index)
    reduceStdDev = StdDevReducePE()
    composite.connect(parStdDev, parStdDev.OUTPUT_NAME, reduceStdDev,
                      reduceStdDev.INPUT_NAME)
    composite.inputmappings = {'input': (parStdDev, parStdDev.INPUT_NAME)}
    composite.outputmappings = {
        'output': (reduceStdDev, reduceStdDev.OUTPUT_NAME)
    }
    return composite
Example #13
0
def create_iterative_chain(functions,
                           FunctionPE_class=SimpleFunctionPE,
                           name_prefix='PE_',
                           name_suffix=''):

    '''
    Creates a composite PE wrapping a pipeline that processes obspy streams.
    :param chain: list of functions that process data iteratively. The function
    accepts one input parameter, data, and returns an output data block
    (or None).
    :param requestId: id of the request that the stream is associated with
    :param controlParameters: environment parameters for the processing
    elements
    :rtype: dictionary inputs and outputs of the composite PE that was created
    '''

    prev = None
    first = None
    graph = WorkflowGraph()

    for fn_desc in functions:
        try:
            fn = fn_desc[0]
            params = fn_desc[1]
        except TypeError:
            fn = fn_desc
            params = {}

        # print 'adding %s to chain' % fn.__name__
        pe = FunctionPE_class()
        pe.compute_fn = fn
        pe.params = params
        pe.name = name_prefix + fn.__name__ + name_suffix

        if prev:
            graph.connect(prev, IterativePE.OUTPUT_NAME,
                          pe, IterativePE.INPUT_NAME)
        else:
            first = pe
        prev = pe

    # Map inputs and outputs of the wrapper to the nodes in the subgraph
    graph.inputmappings = {'input': (first, IterativePE.INPUT_NAME)}
    graph.outputmappings = {'output': (prev, IterativePE.OUTPUT_NAME)}

    return graph
Example #14
0
def createProcessingComposite(chain,
                              suffix='',
                              controlParameters={},
                              provRecorder=None):
    '''
    Creates a composite PE wrapping a pipeline that processes obspy streams.
    :param chain: list of functions that process obspy streams. The function takes one input parameter, stream, and returns an output stream.
    :param requestId: id of the request that the stream is associated with
    :param controlParameters: environment parameters for the processing elements
    :rtype: dictionary inputs and outputs of the composite PE that was created
    '''
    prev = None
    first = None
    graph = WorkflowGraph()

    for fn_desc in chain:
        pe = ObspyStreamPE()
        try:
            fn = fn_desc[0]
            params = fn_desc[1]
        except TypeError:
            fn = fn_desc
            params = {}

        pe.compute_fn = fn
        pe.name = 'ObspyStreamPE_' + fn.__name__ + suffix
        pe.controlParameters = controlParameters
        pe.appParameters = dict(params)
        pe.setCompute(fn, params)

        # connect the metadata output to the provenance recorder PE if there is one
        if provRecorder:
            graph.connect(pe, 'metadata', provRecorder, 'metadata')

        if prev:
            graph.connect(prev, OUTPUT_NAME, pe, INPUT_NAME)
        else:
            first = pe
        prev = pe

    # Map inputs and outputs of the wrapper to the nodes in the subgraph
    graph.inputmappings = {'input': (first, INPUT_NAME)}
    graph.outputmappings = {'output': (prev, OUTPUT_NAME)}

    return graph
Example #15
0
def create_iterative_chain(functions,
                           FunctionPE_class=SimpleFunctionPE,
                           name_prefix='PE_',
                           name_suffix=''):
    '''
    Creates a composite PE wrapping a pipeline that processes obspy streams.
    :param chain: list of functions that process data iteratively. The function
    accepts one input parameter, data, and returns an output data block
    (or None).
    :param requestId: id of the request that the stream is associated with
    :param controlParameters: environment parameters for the processing
    elements
    :rtype: dictionary inputs and outputs of the composite PE that was created
    '''

    prev = None
    first = None
    graph = WorkflowGraph()

    for fn_desc in functions:
        try:
            fn = fn_desc[0]
            params = fn_desc[1]
        except TypeError:
            fn = fn_desc
            params = {}

        # print 'adding %s to chain' % fn.__name__
        pe = FunctionPE_class()
        pe.compute_fn = fn
        pe.params = params
        pe.name = name_prefix + fn.__name__ + name_suffix

        if prev:
            graph.connect(prev, IterativePE.OUTPUT_NAME, pe,
                          IterativePE.INPUT_NAME)
        else:
            first = pe
        prev = pe

    # Map inputs and outputs of the wrapper to the nodes in the subgraph
    graph.inputmappings = {'input': (first, IterativePE.INPUT_NAME)}
    graph.outputmappings = {'output': (prev, IterativePE.OUTPUT_NAME)}

    return graph
Example #16
0
def test_dot_composite():
    def inc(a):
        return a + 1

    def dec(a):
        return a - 1

    graph = WorkflowGraph()
    prod = TestProducer()
    comp = create_iterative_chain([inc, dec])
    cons = TestOneInOneOut()
    graph.connect(prod, "output", comp, "input")
    graph.connect(comp, "output", cons, "input")
    graph.inputmappings = {"input": (prod, "input")}
    root_prod = TestProducer()
    root_graph = WorkflowGraph()
    root_graph.connect(root_prod, "output", graph, "input")
    dot = draw(root_graph)
    tools.ok_("subgraph cluster_" in dot)
Example #17
0
def createProcessingComposite(chain, suffix='', controlParameters={}, provRecorder=None):
    '''
    Creates a composite PE wrapping a pipeline that processes obspy streams.
    :param chain: list of functions that process obspy streams. The function takes one input parameter, stream, and returns an output stream.
    :param requestId: id of the request that the stream is associated with
    :param controlParameters: environment parameters for the processing elements
    :rtype: dictionary inputs and outputs of the composite PE that was created
    '''
    prev = None
    first = None
    graph = WorkflowGraph()
    
    for fn_desc in chain:
        pe = ObspyStreamPE()
        try:
        	fn = fn_desc[0]
        	params = fn_desc[1]
        except TypeError:
            fn = fn_desc
            params = {}
	
        pe.compute_fn = fn
        pe.name = 'ObspyStreamPE_' + fn.__name__ + suffix
        pe.controlParameters = controlParameters
        pe.appParameters = dict(params)
        pe.setCompute(fn, params)
        
        # connect the metadata output to the provenance recorder PE if there is one
        if provRecorder:
            graph.connect(pe, 'metadata', provRecorder, 'metadata')
        
        if prev:
            graph.connect(prev, OUTPUT_NAME, pe, INPUT_NAME)
        else:
            first = pe
        prev = pe
            
    # Map inputs and outputs of the wrapper to the nodes in the subgraph
    graph.inputmappings =  { 'input'  : (first, INPUT_NAME) }
    graph.outputmappings = { 'output' : (prev, OUTPUT_NAME) }
    
    return graph
Example #18
0
def test_dot_composite():

    def inc(a):
        return a+1

    def dec(a):
        return a-1

    graph = WorkflowGraph()
    prod = TestProducer()
    comp = create_iterative_chain([inc, dec])
    cons = TestOneInOneOut()
    graph.connect(prod, 'output', comp, 'input')
    graph.connect(comp, 'output', cons, 'input')
    graph.inputmappings = {'input': (prod, 'input')}
    root_prod = TestProducer()
    root_graph = WorkflowGraph()
    root_graph.connect(root_prod, 'output', graph, 'input')
    dot = draw(root_graph)
    tools.ok_('subgraph cluster_' in dot)