def pin(pin_socket, pusher_cache, kvs, status, function_cache, runtimes, exec_counts, user_library, local): msg = pin_socket.recv_string() splits = msg.split(':') resp_ip, name = splits[0], splits[1] sckt = pusher_cache.get(sutils.get_pin_accept_port(resp_ip)) # We currently only allow one pinned function per container in non-local # mode. if (not local and ((len(function_cache) > 0 and name not in function_cache) or not status.running)): sutils.error.SerializeToString() sckt.send(sutils.error.SerializeToString()) return sckt.send(sutils.ok_resp) func = utils.retrieve_function(name, kvs, user_library) # The function must exist -- because otherwise the DAG couldn't be # registered -- so we keep trying to retrieve it. while not func: func = utils.retrieve_function(name, kvs, user_library) if name not in function_cache: function_cache[name] = func if name not in status.functions: status.functions.append(name) # Add metadata tracking for the newly pinned functions. runtimes[name] = [] exec_counts[name] = 0 logging.info('Adding function %s to my local pinned functions.' % (name))
def pin(pin_socket, pusher_cache, kvs, status, function_cache, runtimes, exec_counts, user_library, local, batching): serialized = pin_socket.recv() pin_msg = PinFunction() pin_msg.ParseFromString(serialized) sckt = pusher_cache.get( sutils.get_pin_accept_port(pin_msg.response_address)) name = pin_msg.name # We currently only allow one pinned function per container in non-local # mode. if not local: if (len(function_cache) > 0 and name not in function_cache): sutils.error.SerializeToString() sckt.send(sutils.error.SerializeToString()) return batching func = utils.retrieve_function(pin_msg.name, kvs, user_library) # The function must exist -- because otherwise the DAG couldn't be # registered -- so we keep trying to retrieve it. while not func: func = utils.retrieve_function(name, kvs, user_library) if name not in function_cache: print( f"writing function cache for entry {name}, it's a type {type(func)}" ) import cloudpickle if isinstance(func, bytes): func = cloudpickle.loads(func) function_cache[name] = func if name not in status.functions: status.functions.append(name) # Add metadata tracking for the newly pinned functions. runtimes[name] = [] exec_counts[name] = 0 logging.info('Adding function %s to my local pinned functions.' % (name)) if pin_msg.batching and len(status.functions) > 1: raise RuntimeError( 'There is more than one pinned function (we are' + ' operating in local mode), and the function' + ' attempting to be pinned has batching enabled. This' + ' is not allowed -- you can only use batching in' + ' cluster mode or in local mode with one function.') sckt.send(sutils.ok_resp) return pin_msg.batching
def exec_function(exec_socket, kvs, user_library, cache, function_cache): call = FunctionCall() call.ParseFromString(exec_socket.recv()) fargs = [serializer.load(arg) for arg in call.arguments.values] if call.name in function_cache: f = function_cache[call.name] else: f = utils.retrieve_function(call.name, kvs, user_library, call.consistency) if not f: logging.info('Function %s not found! Returning an error.' % (call.name)) sutils.error.error = FUNC_NOT_FOUND result = ('ERROR', sutils.error.SerializeToString()) else: function_cache[call.name] = f try: if call.consistency == NORMAL: result = _exec_func_normal(kvs, f, fargs, user_library, cache) logging.info('Finished executing %s: %s!' % (call.name, str(result))) else: dependencies = {} result = _exec_func_causal(kvs, f, fargs, user_library, dependencies=dependencies) except Exception as e: logging.exception('Unexpected error %s while executing function.' % (str(e))) sutils.error.error = EXECUTION_ERROR result = ('ERROR: ' + str(e), sutils.error.SerializeToString()) if call.consistency == NORMAL: result = serializer.dump_lattice(result) succeed = kvs.put(call.response_key, result) else: result = serializer.dump_lattice(result, MultiKeyCausalLattice, causal_dependencies=dependencies) succeed = kvs.causal_put(call.response_key, result) if not succeed: logging.info(f'Unsuccessful attempt to put key {call.response_key} ' + 'into the KVS.')
def exec_function(exec_socket, kvs, user_library, cache, function_cache, has_ephe=False): call = FunctionCall() call.ParseFromString(exec_socket.recv()) fargs = [serializer.load(arg) for arg in call.arguments.values] if call.name in function_cache: f = function_cache[call.name] else: f = utils.retrieve_function(call.name, kvs, user_library, call.consistency) if not f: logging.info('Function %s not found! Returning an error.' % (call.name)) sutils.error.error = FUNC_NOT_FOUND result = ('ERROR', sutils.error.SerializeToString()) else: function_cache[call.name] = f # We set the session as the response key from scheduler # It should be uuid and identical if has_ephe: user_library.session = call.response_key try: if call.consistency == NORMAL: result = _exec_func_normal(kvs, f, fargs, user_library, cache) logging.info('Finished executing %s: %s!' % (call.name, str(result))) else: dependencies = {} result = _exec_func_causal(kvs, f, fargs, user_library, dependencies=dependencies) except Exception as e: logging.exception('Unexpected error %s while executing function.' % (str(e))) sutils.error.error = EXECUTION_ERROR result = ('ERROR: ' + str(e), sutils.error.SerializeToString()) # When we use ephe kvs for coordination, we do not write the results to anna # Instead, we presume the function will put the result mannually if not has_ephe: if call.consistency == NORMAL: result = serializer.dump_lattice(result) succeed = kvs.put(call.response_key, result) else: result = serializer.dump_lattice(result, MultiKeyCausalLattice, causal_dependencies=dependencies) succeed = kvs.causal_put(call.response_key, result) if not succeed: logging.info( f'Unsuccessful attempt to put key {call.response_key} ' + 'into the KVS.')