def upload_funcs(ctx, host="localhost", port=None, peridx=False): """ Upload all the genomics functions If set to peridx=True, we will be deploying one function per index chunk, this may make it easier to schedule functions with the state associated with their index chunk. """ # When uploading genomics, we are uploading the mapper entrypoint as a normal # function, but the worker functions are all from the same source file # Upload the entrypoint function upload(ctx, "gene", "mapper") # Upload the worker functions (one for each index chunk) host, port = get_upload_host_port(host, port) if peridx: # Upload one function per index args = [(idx, host, port) for idx in INDEX_CHUNKS] p = Pool(os.cpu_count()) p.starmap(_do_func_upload, args) else: # Just upload one function that will be agnostic to index file_path = join(EXPERIMENTS_ROOT, "third-party/gem3-mapper/wasm_bin/gem-mapper") url = "http://{}:{}/f/gene/mapper_index".format(host, port) curl_file(url, file_path)
def _upload_function( user, func, py=False, file=None, local_copy=False, ): host, port = get_upload_host_port() if py and local_copy: storage_dir = join(FAASM_SHARED_STORAGE_ROOT, "pyfuncs", user, func) runtime_dir = join(FAASM_RUNTIME_ROOT, "pyfuncs", user, func) if exists(runtime_dir): rmtree(runtime_dir) if not exists(storage_dir): makedirs(storage_dir) src_file = join(FUNC_DIR, user, "{}.py".format(func)) dest_file = join(storage_dir, "function.py") copy(src_file, dest_file) elif py: func_file = join(PROJ_ROOT, "func", user, "{}.py".format(func)) url = "http://{}:{}/p/{}/{}".format(host, port, user, func) curl_file(url, func_file) else: if file: func_file = file else: func_file = join(WASM_DIR, user, func, "function.wasm") url = "http://{}:{}/f/{}/{}".format(host, port, user, func) curl_file(url, func_file)
def download(ctx, user, key, out_path): """ Downloads a state value to the given file """ host, port = get_upload_host_port(None, None) download_binary_state(user, key, out_path, host=host, port=port)
def shared_file(ctx, in_path, shared_path): """ Uploads a shared file to Faasm """ host, _ = get_upload_host_port(None, None) upload_shared_file(host, in_path, shared_path, quiet=True)
def upload(ctx, user, key, in_path): """ Uploads data from file into state """ host, _ = get_upload_host_port(None, None) upload_binary_state(user, key, in_path, host=host)
def upload(ctx, user, key, in_path): """ Uploads data from file into state """ host, _ = get_upload_host_port() print("Uploading state file at {} for user {}".format(in_path, user)) url = "http://{}:8002/s/{}/{}".format(host, user, key) curl_file(url, in_path)
def state(ctx, host=None): """ Upload Tensorflow lite demo state """ data_dir = join(FUNC_DIR, "tf", "data") model_file = "mobilenet_v1_1.0_224.tflite" host, _ = get_upload_host_port(host, None) file_path = join(data_dir, model_file) upload_binary_state("tf", "mobilenet_v1", file_path, host=host)
def download(ctx, user, key, out_path): """ Downloads a state value to the given file """ host, port = get_upload_host_port() print("Downloading state file {} for user {}".format(key, user)) url = "http://{}:{}/s/{}/{}".format(host, port, user, key) cmd = ["curl", "-X", "GET", url, "-o", out_path] cmd = " ".join(cmd) print(cmd) run(cmd, shell=True, check=True)
def _upload_function(user, func, port=None, host=None, ibm=False, py=False, ts=False, file=None, local_copy=False): host, port = get_upload_host_port(host, port) if py and local_copy: storage_dir = join(FAASM_SHARED_STORAGE_ROOT, "pyfuncs", user, func) runtime_dir = join(FAASM_RUNTIME_ROOT, "pyfuncs", user, func) if exists(runtime_dir): rmtree(runtime_dir) if not exists(storage_dir): makedirs(storage_dir) src_file = join(FUNC_DIR, user, "{}.py".format(func)) dest_file = join(storage_dir, "function.py") copy(src_file, dest_file) elif py: func_file = join(PROJ_ROOT, "func", user, "{}.py".format(func)) url = "http://{}:{}/p/{}/{}".format(host, port, user, func) curl_file(url, func_file) elif ts: func_file = join(PROJ_ROOT, "typescript", "build", "{}.wasm".format(func)) url = "http://{}:{}/f/ts/{}".format(host, port, func) curl_file(url, func_file) else: if file: func_file = file else: func_file = join(WASM_DIR, user, func, "function.wasm") if ibm: print("Uploading {}/{} to IBM cloud storage".format(user, func)) ibm_key = _get_s3_key(user, func) upload_file_to_ibm(func_file, RUNTIME_S3_BUCKET, ibm_key) else: url = "http://{}:{}/f/{}/{}".format(host, port, user, func) curl_file(url, func_file)
def upload(ctx, host=None, local_copy=False): """ Upload Tensorflow lite demo data """ host, port = get_upload_host_port(host, None) source_data = join(EXPERIMENTS_FUNC_DIR, "tf", "data") dest_root = join(FAASM_SHARED_STORAGE_ROOT, "tfdata") if local_copy: makedirs(dest_root, exist_ok=True) for root, dirs, files in walk(source_data): for filename in files: file_path = join(source_data, filename) if local_copy: dest_file = join(dest_root, filename) call("cp {} {}".format(file_path, dest_file), shell=True) else: shared_path = "tfdata/{}".format(filename) upload_shared_file(host, file_path, shared_path)
def upload(ctx, user, func, func_file, py=False, local_copy=False): """ Upload a function """ host, port = get_upload_host_port() if py and local_copy: storage_dir = join(FAASM_SHARED_STORAGE_ROOT, "pyfuncs", user, func) runtime_dir = join(FAASM_RUNTIME_ROOT, "pyfuncs", user, func) if exists(runtime_dir): rmtree(runtime_dir) if not exists(storage_dir): makedirs(storage_dir) dest_file = join(storage_dir, "function.py") copy(func_file, dest_file) elif py: url = "http://{}:{}/p/{}/{}".format(host, port, user, func) curl_file(url, func_file) else: url = "http://{}:{}/f/{}/{}".format(host, port, user, func) curl_file(url, func_file)
def download_output(ctx): """ Downloads the resuts from all the genomics functions """ read_idxs, _ = get_reads_from_dir() index_chunks, _ = get_index_chunks_present_locally() output_dir = join(GENOMICS_OUTPUT_DIR, "faasm") if not exists(output_dir): makedirs(output_dir) host, port = get_upload_host_port(None, None) task_args = list() for read_idx in read_idxs: for index_chunk in index_chunks: output_filename = "faasm_{}_{}.sam".format(read_idx, index_chunk) output_file = join(output_dir, output_filename) state_key = "map_out_{}_{}".format(read_idx, index_chunk) task_args.append( (GENOMICS_USER, state_key, output_file, host, port)) p = Pool(os.cpu_count()) p.starmap(download_binary_state, task_args)
def mapping(ctx, download=False): """ Run genomics mapping using Faasm """ read_idxs, _ = get_reads_from_dir() start_time = time() # Iterate through and make the calls to the worker call_ids = list() for read_idx in read_idxs: print("Mapping read chunk {}".format(read_idx)) call_id = invoke_impl( "gene", "mapper", input="{}".format(read_idx), asynch=True, poll=False, ) call_ids.append(call_id) # Poll for completion of each read completed_read_idxs = list() host, port = get_invoke_host_port() print("Polling workers...") while len(completed_read_idxs) < len(read_idxs): for i, read_idx in enumerate(read_idxs): sleep(1) # See whether this call is still running call_id = call_ids[i] result, output = status_call_impl("gene", "mapper", call_id, host, port, quiet=True) if result == STATUS_RUNNING: continue # Check for success or failure if result == STATUS_SUCCESS: print("Read chunk {} completed.".format(read_idx)) # Download the results of this read if download: print("Downloading output for read chunk {}.".format( read_idx)) state_key = "output_read_{}".format(read_idx) if not exists(GENOMICS_OUTPUT_DIR): makedirs(GENOMICS_OUTPUT_DIR) output_file = join(GENOMICS_OUTPUT_DIR, state_key) host, port = get_upload_host_port(None, None) download_binary_state("gene", state_key, output_file, host=host, port=port) elif result == STATUS_FAILED: print("Read chunk {} failed: {}", read_idx, output) # Check if we're done completed_read_idxs.append(read_idx) for call_id in call_ids: exec_graph( ctx, call_id=call_id, host=host, headless=True, output_file="/tmp/exec_graph_{}.png".format(call_id), ) print("-----------------------------------------") print("FAASM MAPPING COMPLETE") print("Time: {:.2f}s".format(time() - start_time)) print("-----------------------------------------")