예제 #1
0
import os
import pprint

from triggerflow.dags import DAG
from triggerflow.dags.operators import IBMCloudFunctionsCallAsyncOperator, IBMCloudFunctionsMapOperator, DummyOperator

water_consumption = DAG(dag_id='water_consumption')

# INPUT PARAMETERS
params = {
    'AREA_OF_INFLUENCE': 4000,
    'BUCKET': os.environ['BUCKET'],
    'SPLITS': 5,
    'r': -0.0056,
    'zdet': 2000,
    'DAY_OF_YEAR': 50
}

cos = {
    'private_endpoint':
    'https://s3.private.us-south.cloud-object-storage.appdomain.cloud',
    'public_endpoint':
    'https://s3.us-south.cloud-object-storage.appdomain.cloud',
    'aws_access_key_id': os.environ['AWS_ACCESS_KEY_ID'],
    'aws_secret_access_key': os.environ['AWS_SECRET_ACCESS_KEY']
}

url = 'http://siam.imida.es/apex/f?p=101:47:493289053024037:CSV::::'
# keys = ['PNOA_MDT05_ETRS89_HU30_0913_LID.asc', 'PNOA_MDT05_ETRS89_HU30_0952_LID.asc',
#         'PNOA_MDT05_ETRS89_HU30_0955_LID.asc', 'PNOA_MDT05_ETRS89_HU30_0977_LID.asc']
keys = ['PNOA_MDT05_ETRS89_HU30_0933_LID.asc']
예제 #2
0
import os
import pprint

from triggerflow.dags import DAG
from triggerflow.dags.operators import IBMCloudFunctionsCallAsyncOperator, IBMCloudFunctionsMapOperator, DummyOperator

water_consumption = DAG(dag_id='water_consumption')

# INPUT PARAMETERS
params = {
    'AREA_OF_INFLUENCE': 4000,
    'BUCKET': os.environ['BUCKET'],
    'SPLITS': 5,
    'r': -0.0056,
    'zdet': 2000,
    'DAY_OF_YEAR': 50
}

cos = {
    'private_endpoint':
    'https://s3.private.us-south.cloud-object-storage.appdomain.cloud',
    'public_endpoint':
    'https://s3.us-south.cloud-object-storage.appdomain.cloud',
    'aws_access_key_id': os.environ['AWS_ACCESS_KEY_ID'],
    'aws_secret_access_key': os.environ['AWS_SECRET_ACCESS_KEY']
}

url = 'http://siam.imida.es/apex/f?p=101:47:493289053024037:CSV::::'
# keys = ['PNOA_MDT05_ETRS89_HU30_0913_LID.asc', 'PNOA_MDT05_ETRS89_HU30_0952_LID.asc',
#         'PNOA_MDT05_ETRS89_HU30_0955_LID.asc', 'PNOA_MDT05_ETRS89_HU30_0977_LID.asc']
keys = ['PNOA_MDT05_ETRS89_HU30_0933_LID.asc']
from triggerflow.dags import DAG
from triggerflow.dags.operators import IBMCloudFunctionsMapOperator

dag = DAG(dag_id='map')

concurrency = 5

task = IBMCloudFunctionsMapOperator(
    task_id='map',
    function_name='sleep',
    function_package='triggerflow-tests',
    iter_data=('sleep', [5 for _ in range(concurrency)]),
    dag=dag,
)
예제 #4
0
from triggerflow.dags import DAG
from triggerflow.dags.operators import IBMCloudFunctionsCallAsyncOperator
from triggerflow.dags.other.helpers import chain

dag = DAG(dag_id='sequence')

sequence_length = 40

tasks = []

for i in range(sequence_length):
    task = IBMCloudFunctionsCallAsyncOperator(
        task_id=str(i),
        function_name='sleep',
        function_package='triggerflow-tests',
        invoke_kwargs={'sleep': 5},
        dag=dag,
    )
    tasks.append(task)

chain(*tasks)
예제 #5
0
from triggerflow.dags import DAG
from triggerflow.dags.operators import (IBMCloudFunctionsCallAsyncOperator,
                                        IBMCloudFunctionsMapOperator)

# DAG Parameters
SIMULATIONS = 15
LOOPS = 50

# Instantiate the DAG object
dag = DAG(dag_id='PiEstimationMontecarlo')

# TASKS
sim = IBMCloudFunctionsMapOperator(task_id='MontecarloSimulation',
                                   function_name='pi_montecarlo',
                                   function_package='triggerflow_examples',
                                   invoke_kwargs={'loops': LOOPS},
                                   iter_data=('n',
                                              [n for n in range(SIMULATIONS)]),
                                   dag=dag)

avg = IBMCloudFunctionsCallAsyncOperator(
    task_id='Average',
    function_name='array_average',
    function_package='triggerflow_examples',
    invoke_kwargs={
        'array': [3.14 for _ in range(SIMULATIONS)]
    },  # NOTE: Passing values between tasks is not currently supported :(
    dag=dag)

# DEPENDENCIES
sim >> avg
예제 #6
0
from triggerflow.dags import DAG
from triggerflow.dags.operators import (IBMCloudFunctionsCallAsyncOperator,
                                        IBMCloudFunctionsMapOperator)

dag = DAG(dag_id='mixed')

first_task = IBMCloudFunctionsCallAsyncOperator(
    task_id='first_task',
    function_name='echo',
    function_package='triggerflow-tests',
    invoke_kwargs={'sleep': 3},
    dag=dag,
)

branch1_callasync = IBMCloudFunctionsCallAsyncOperator(
    task_id='branch1_callasync_task',
    function_name='echo',
    function_package='triggerflow-tests',
    invoke_kwargs={'sleep': 15},
    dag=dag,
)

branch1_map = IBMCloudFunctionsMapOperator(
    task_id='branch1_map_task',
    function_name='echo',
    function_package='triggerflow-tests',
    invoke_kwargs={'sleep': 1},
    iter_data=('echo', [x for x in range(10)]),
    dag=dag)

branch2_callasync = IBMCloudFunctionsCallAsyncOperator(
예제 #7
0
from triggerflow.dags import DAG
from triggerflow.dags.operators import (IBMCloudFunctionsCallAsyncOperator,
                                        IBMCloudFunctionsMapOperator)

dag = DAG(dag_id='fault-tolerance')

first_task = IBMCloudFunctionsCallAsyncOperator(
    task_id='first_task',
    function_name='echo',
    function_package='triggerflow-tests',
    invoke_kwargs={'sleep': 1},
    dag=dag,
)

branch1_map = IBMCloudFunctionsMapOperator(
    task_id='branch1_map_task',
    function_name='echo',
    function_package='triggerflow-tests',
    invoke_kwargs={'sleep': 2},
    iter_data=('echo', [x for x in range(10)]),
    dag=dag)

branch2_map = IBMCloudFunctionsMapOperator(
    task_id='branch2_map_task',
    function_name='echo',
    function_package='triggerflow-tests',
    iter_data=('sleep', [2**x for x in range(5)]),
    dag=dag)

first_task >> [branch1_map, branch2_map]
예제 #8
0
from pprint import pprint
from triggerflow.dags import DAG, DAGRun

dag = DAG.import_from_json('ExampleDag.json')
pprint(dag.get_json_eventsource())