Пример #1
0
from lightflow.models import Dag
from lightflow.tasks import PythonTask
from lightflow.models.task_data import TaskData, MultiTaskData

# TODO : make callback something else callback
#
from databroker import Broker
import matplotlib.pyplot as plt
import numpy as np

from SciStreams.workflows.one_image import one_image_dag

import numbers

parameters = Parameters([
    Option('data_folder', help='Specify data folder', type=str),
])

# TODO : put in config files in this repo
required_attributes = {'main': {}}
typesdict = {'float': float, 'int': int, 'number': numbers.Number, 'str': str}


# filter a streamdoc with certain attributes (set in the yml file)
# required_attributes, typesdict globals needed
def filter_attributes(attr, type='main'):
    '''
        Filter attributes.

        Note that this ultimately checks that attributes match what is seen in
        yml file.
Пример #2
0
    ret = {
        'type': scan_type,
        'uid': uid,
        'processing_ret': {
            'type': process_type,
            'data': data,
            'metadata': metadata
        }
    }

    return (requester.encode() + pickle.dumps(ret))


# required parameters for the call
parameters = Parameters([
    Option('request', help='Specify a uid', type=dict),
])


def create_ret_func(scan_type, uid, process_type, data, metadata, requester):
    ret = {
        'type': scan_type,
        'uid': uid,
        'processing_ret': {
            'type': process_type,
            'data': data,
            'metadata': metadata
        }
    }

    return (requester + pickle.dumps(ret)).encode()
Пример #3
0
there as the code below shows.

"""

from lightflow.models import Parameters, Option, Dag
from lightflow.tasks import PythonTask

# This workflow takes four parameters, three optional and one mandatory. All parameters
# without a default value are considered mandatory. In the example below, if the
# 'filepath' parameter is not specified the workflow will not start and an error message
# will be printed on the command line. Additionally, each parameter can have a help text
# and a type. If a type is given, the user provided value is automatically converted
# to this type.
parameters = Parameters([
    Option('filepath', help='Specify a file path', type=str),
    Option('recursive', default=True, help='Run recursively', type=bool),
    Option('iterations', default=1, help='The number of iterations', type=int),
    Option('threshold', default=0.4, help='The threshold value', type=float)
])


# the callback function that prints the value of the filepath parameter
def print_filepath(data, store, signal, context):
    print('The filepath is:', store.get('filepath'))


# the callback function that prints the value of the iterations parameter
def print_iterations(data, store, signal, context):
    print('Number of iterations:', store.get('iterations'))


# create the main DAG
Пример #4
0
""" Acquire some simple statistics about the files in a directory

This workflow traverses the specified directory recursively and counts the number of
files and accumulates their total size. Symbolic links are not counted.

"""
from lightflow.models import Parameters, Option, Dag
from lightflow_filesystem import WalkTask
from lightflow.tasks import PythonTask

# requires the path to the directory as an argument
parameters = Parameters([
    Option('path',
           help='The path for which the statistics should be acquired',
           type=str)
])


# set up the acquisition
def setup(data, store, signal, context):
    data['count'] = 0
    data['size'] = 0


# acquire some basic statistics for each file as long as it is not a symbolic link
def acquire_stats(entry, data, store, signal, context):
    if not entry.is_symlink():
        data['count'] += 1
        data['size'] += entry.stat(follow_symlinks=False).st_size

""" Watch an EPICS PV and start a dag if its value changes

This workflow demonstrates how to start a dag when a PV changes and its new value is
within a given range.

"""

import math

from lightflow.models import Parameters, Option, Dag
from lightflow.tasks import PythonTask
from lightflow_epics import PvTriggerTask

# the workflow requires the PV name to be supplied as an argument.
parameters = Parameters([
    Option('pvname', help='The PV to monitor', type=str)
])


# the callback function for the startup task.
def startup(data, store, signal, context):
    print('Starting the PV monitoring...')


# the callback function that is called when the PV changes. If the new value is
# in the range 3+-2, the name and value of the PV is stored in the data, the dag
# 'pv_action_dag' is started and the data is supplied to this dag.
def pv_callback(data, store, signal, context, pvname=None, value=None, **kwargs):
    print('Checking PV {} with value {}'.format(pvname, value))
    if math.fabs(value - 3.0) < 2.0:
        data['pv_name'] = pvname
Пример #6
0

def get_logger():
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)
    fh = logging.FileHandler(os.path.expandir('~/logs/stress_test.log'))
    fh.setLevel(logging.INFO)
    # host is your server that used for the log
    formatter = logging.Formatter('%(name)s: %(levelname)s %(message)s')
    fh.setFormatter(formatter)
    logger.addHandler(fh)
    return logger


parameters = Parameters([
    Option('N', help='Specify number of frames to test', default=10, type=int),
])


def main_func(data, store, signal, context):
    store.set("foo", "bar")
    print("in main func")
    dag_names = list()
    N = store.get("N")
    print(N)
    #logger = get_logger()
    #logger.info("test")
    for i in range(N):
        print("iteration {} of {}".format(i, N))
        dag_names.append(signal.start_dag(sub_dag, data=data))
Пример #7
0
# TODO : make callback something else callback
#
from databroker import Broker
import matplotlib.pyplot as plt
import numpy as np
import time

# the primary dag that reads and processes the data
# ALL dags used need to be imported at top level
# TODO : sort this out
from SciStreams.workflows.primary import primary_dag
from SciStreams.workflows.one_image import one_image_dag

parameters = Parameters([
    Option('start_time', help='Specify start_time', type=str),
    Option('stop_time', help='Specify stop_time', type=str),
    Option('dbname', help='Specify database name', type=str),
    Option('max_images', help='Specify max images', type=int),
])

#from SciStreams.workflows.circavg import ciravg_dag

#dbname = 'cms'


def get(d, key, default):
    if key in d:
        return d['key']
    else:
        return default

Пример #8
0
from lightflow.models import Parameters, Option, Dag

parameters = Parameters([
    Option('required_arg'),
])

d = Dag('dag')