def _upload_function( user, func, py=False, file=None, local_copy=False, ): host, port = get_upload_host_port() if py and local_copy: storage_dir = join(FAASM_SHARED_STORAGE_ROOT, "pyfuncs", user, func) runtime_dir = join(FAASM_RUNTIME_ROOT, "pyfuncs", user, func) if exists(runtime_dir): rmtree(runtime_dir) if not exists(storage_dir): makedirs(storage_dir) src_file = join(FUNC_DIR, user, "{}.py".format(func)) dest_file = join(storage_dir, "function.py") copy(src_file, dest_file) elif py: func_file = join(PROJ_ROOT, "func", user, "{}.py".format(func)) url = "http://{}:{}/p/{}/{}".format(host, port, user, func) curl_file(url, func_file) else: if file: func_file = file else: func_file = join(WASM_DIR, user, func, "function.wasm") url = "http://{}:{}/f/{}/{}".format(host, port, user, func) curl_file(url, func_file)
def upload_funcs(ctx, host="localhost", port=None, peridx=False): """ Upload all the genomics functions If set to peridx=True, we will be deploying one function per index chunk, this may make it easier to schedule functions with the state associated with their index chunk. """ # When uploading genomics, we are uploading the mapper entrypoint as a normal # function, but the worker functions are all from the same source file # Upload the entrypoint function upload(ctx, "gene", "mapper") # Upload the worker functions (one for each index chunk) host, port = get_upload_host_port(host, port) if peridx: # Upload one function per index args = [(idx, host, port) for idx in INDEX_CHUNKS] p = Pool(os.cpu_count()) p.starmap(_do_func_upload, args) else: # Just upload one function that will be agnostic to index file_path = join(EXPERIMENTS_ROOT, "third-party/gem3-mapper/wasm_bin/gem-mapper") url = "http://{}:{}/f/gene/mapper_index".format(host, port) curl_file(url, file_path)
def _do_func_upload(idx, host, port): func_name = "mapper_index{}".format(idx) print("Uploading function gene/{} to {}:{}".format(func_name, host, port)) file_path = join(EXPERIMENTS_ROOT, "third-party/gem3-mapper/wasm_bin/gem-mapper") url = "http://{}:{}/f/gene/{}".format(host, port, func_name) curl_file(url, file_path)
def upload(ctx, user, key, in_path): """ Uploads data from file into state """ host, _ = get_upload_host_port() print("Uploading state file at {} for user {}".format(in_path, user)) url = "http://{}:8002/s/{}/{}".format(host, user, key) curl_file(url, in_path)
def upload_shared_file(host, local_path, shared_path, quiet=False): url = "http://{}:8002/file/".format(host) local_filename = basename(local_path) print("Uploading {} to {}".format(local_filename, shared_path)) curl_file(url, local_path, headers={ "FilePath": shared_path, }, quiet=quiet)
def upload_binary_state(user, key, binary_file, host=None, s3_bucket=None): assert host or s3_bucket, "Must provide either host or S3 bucket" print("Uploading binary file at {} for user {}".format(binary_file, user)) if s3_bucket: s3_key = "{}/{}".format(user, key) print("Uploading matrix binary to S3 {} -> {}/{}".format( key, s3_bucket, s3_key)) upload_file_to_s3(binary_file, s3_bucket, s3_key) else: url = "http://{}:8002/s/{}/{}".format(host, user, key) curl_file(url, binary_file)
def upload(ctx, user, func, func_file, py=False, local_copy=False): """ Upload a function """ host, port = get_upload_host_port() if py and local_copy: storage_dir = join(FAASM_SHARED_STORAGE_ROOT, "pyfuncs", user, func) runtime_dir = join(FAASM_RUNTIME_ROOT, "pyfuncs", user, func) if exists(runtime_dir): rmtree(runtime_dir) if not exists(storage_dir): makedirs(storage_dir) dest_file = join(storage_dir, "function.py") copy(func_file, dest_file) elif py: url = "http://{}:{}/p/{}/{}".format(host, port, user, func) curl_file(url, func_file) else: url = "http://{}:{}/f/{}/{}".format(host, port, user, func) curl_file(url, func_file)
def _upload_function(user, func, port=None, host=None, ibm=False, py=False, ts=False, file=None, local_copy=False): host, port = get_upload_host_port(host, port) if py and local_copy: storage_dir = join(FAASM_SHARED_STORAGE_ROOT, "pyfuncs", user, func) runtime_dir = join(FAASM_RUNTIME_ROOT, "pyfuncs", user, func) if exists(runtime_dir): rmtree(runtime_dir) if not exists(storage_dir): makedirs(storage_dir) src_file = join(FUNC_DIR, user, "{}.py".format(func)) dest_file = join(storage_dir, "function.py") copy(src_file, dest_file) elif py: func_file = join(PROJ_ROOT, "func", user, "{}.py".format(func)) url = "http://{}:{}/p/{}/{}".format(host, port, user, func) curl_file(url, func_file) elif ts: func_file = join(PROJ_ROOT, "typescript", "build", "{}.wasm".format(func)) url = "http://{}:{}/f/ts/{}".format(host, port, func) curl_file(url, func_file) else: if file: func_file = file else: func_file = join(WASM_DIR, user, func, "function.wasm") if ibm: print("Uploading {}/{} to IBM cloud storage".format(user, func)) ibm_key = _get_s3_key(user, func) upload_file_to_ibm(func_file, RUNTIME_S3_BUCKET, ibm_key) else: url = "http://{}:{}/f/{}/{}".format(host, port, user, func) curl_file(url, func_file)
def upload_shared_file(host, local_path, shared_path): url = "http://{}:8002/file/".format(host) curl_file(url, local_path, headers={ "FilePath": shared_path, })