コード例 #1
0
ファイル: test_call.py プロジェクト: yiranjia/cloudburst
    def _create_fn_schedule(self,
                            dag,
                            arg,
                            target,
                            fnames,
                            consistency=NORMAL):
        schedule = DagSchedule()
        schedule.id = 'id'
        schedule.dag.CopyFrom(dag)
        schedule.target_function = target
        schedule.consistency = consistency

        # The BEGIN trigger is sent by the scheduler.
        schedule.triggers.append('BEGIN')

        # We set all locations as thread ID 0.
        for fname in fnames:
            schedule.locations[fname] = self.ip + ':0'
        val = schedule.arguments[target].values.add()

        # Set the square function's argument.
        serializer.dump(arg, val, False)
        schedule.start_time = time.time()

        # Create a trigger corresponding to this DAG.
        trigger = DagTrigger()
        trigger.id = schedule.id
        trigger.target_function = schedule.target_function
        trigger.source = 'BEGIN'

        return schedule, {'BEGIN': trigger}
コード例 #2
0
def _construct_trigger(sid, fname, result):
    trigger = DagTrigger()
    trigger.id = sid
    trigger.source = fname

    if type(result) != tuple:
        result = (result, )

    trigger.arguments.values.extend(
        list(map(lambda v: serializer.dump(v, None, False), result)))
    return trigger
コード例 #3
0
def call_dag(call, pusher_cache, dags, policy):
    dag, sources = dags[call.name]

    schedule = DagSchedule()
    schedule.id = str(uuid.uuid4())
    schedule.dag.CopyFrom(dag)
    schedule.start_time = time.time()
    schedule.consistency = call.consistency

    if call.response_address:
        schedule.response_address = call.response_address

    if call.output_key:
        schedule.output_key = call.output_key

    if call.client_id:
        schedule.client_id = call.client_id

    for fref in dag.functions:
        args = call.function_args[fref.name].values

        refs = list(
            filter(lambda arg: type(arg) == CloudburstReference,
                   map(lambda arg: serializer.load(arg), args)))

        result = policy.pick_executor(refs, fref.name)
        if result is None:
            response = GenericResponse()
            response.success = False
            response.error = NO_RESOURCES
            return response

        ip, tid = result
        schedule.locations[fref.name] = ip + ':' + str(tid)

        # copy over arguments into the dag schedule
        arg_list = schedule.arguments[fref.name]
        arg_list.values.extend(args)

    for fref in dag.functions:
        loc = schedule.locations[fref.name].split(':')
        ip = utils.get_queue_address(loc[0], loc[1])
        schedule.target_function = fref.name

        triggers = sutils.get_dag_predecessors(dag, fref.name)
        if len(triggers) == 0:
            triggers.append('BEGIN')

        schedule.ClearField('triggers')
        schedule.triggers.extend(triggers)

        sckt = pusher_cache.get(ip)
        sckt.send(schedule.SerializeToString())

    for source in sources:
        trigger = DagTrigger()
        trigger.id = schedule.id
        trigger.source = 'BEGIN'
        trigger.target_function = source

        ip = sutils.get_dag_trigger_address(schedule.locations[source])
        sckt = pusher_cache.get(ip)
        sckt.send(trigger.SerializeToString())

    response = GenericResponse()
    response.success = True
    if schedule.output_key:
        response.response_id = schedule.output_key
    else:
        response.response_id = schedule.id

    return response