Exemple #1
0
def upload(ctx, user, func, host=None,
           s3=False, ibm=False, subdir=None,
           py=False, ts=False, prebuilt=False):
    host, port = _get_host_port(host, None)

    if py:
        func_file = join(PROJ_ROOT, "func", user, "{}.py".format(func))

        url = "http://{}:{}/p/{}/{}".format(host, port, user, func)
        curl_file(url, func_file)
    elif ts:
        func_file = join(PROJ_ROOT, "typescript", "build", "{}.wasm".format(func))
        url = "http://{}:{}/f/ts/{}".format(host, port, func)
        curl_file(url, func_file)
    else:
        base_dir = WASM_DIR if prebuilt else FUNC_BUILD_DIR

        if subdir:
            func_file = join(base_dir, user, subdir, "{}.wasm".format(func))
        elif prebuilt:
            func_file = join(base_dir, user, func, "function.wasm")
        else:
            func_file = join(base_dir, user, "{}.wasm".format(func))

        if s3:
            print("Uploading {}/{} to S3".format(user, func))
            s3_key = _get_s3_key(user, func)
            upload_file_to_s3(func_file, RUNTIME_S3_BUCKET, s3_key)
        if ibm:
            print("Uploading {}/{} to IBM cloud storage".format(user, func))
            ibm_key = _get_s3_key(user, func)
            upload_file_to_ibm(func_file, RUNTIME_S3_BUCKET, ibm_key)
        else:
            url = "http://{}:{}/f/{}/{}".format(host, port, user, func)
            curl_file(url, func_file)
Exemple #2
0
def upload_binary_state(user, key, binary_file, host=None, s3_bucket=None):
    assert host or s3_bucket, "Must provide either host or S3 bucket"

    print("Uploading binary file at {} for user {}".format(binary_file, user))

    if s3_bucket:
        s3_key = "{}/{}".format(user, key)
        print("Uploading matrix binary to S3 {} -> {}/{}".format(key, s3_bucket, s3_key))
        upload_file_to_s3(binary_file, s3_bucket, s3_key)
    else:
        url = "http://{}:8002/s/{}/{}".format(host, user, key)
        curl_file(url, binary_file)
def _do_s3_upload(tar_path, tar_dir, tar_name):
    # Compress
    print("Creating archive of data {}".format(tar_path))
    check_output("tar -cf {} {}".format(tar_path, tar_dir),
                 shell=True,
                 cwd=FAASM_DATA_DIR)

    # Upload
    print("Uploading archive to S3")
    upload_file_to_s3(tar_path, DATA_S3_BUCKET, tar_name)

    # Remove old tar
    print("Removing archive")
    check_output("rm {}".format(tar_path), shell=True)
Exemple #4
0
def backup_sysroot(ctx):
    print("Creating archive of Faasm sysroot")
    check_output("tar -cf {} llvm-sysroot".format(SYSROOT_TAR_NAME),
                 shell=True,
                 cwd=FAASM_LOCAL_DIR)

    # Upload
    print("Uploading archive to S3")
    upload_file_to_s3(SYSROOT_TAR_PATH,
                      MISC_S3_BUCKET,
                      SYSROOT_TAR_NAME,
                      public=True)

    # Remove old tar
    print("Removing archive")
    remove(SYSROOT_TAR_PATH)
Exemple #5
0
def backup_sysroot(ctx):
    tar_name = _get_sysroot_tar_name()
    tar_path = _get_sysroot_tar_path()

    print("Creating archive of Faasm sysroot")
    check_output("tar -cf {} llvm-sysroot".format(tar_name),
                 shell=True,
                 cwd=FAASM_LOCAL_DIR)

    # Upload
    print("Uploading archive to S3")
    upload_file_to_s3(tar_path, MISC_S3_BUCKET, tar_name, public=True)

    # Remove old tar
    print("Removing archive")
    remove(tar_path)
Exemple #6
0
def backup_toolchain(ctx):
    backup_sysroot(ctx)

    print("Creating archive of Faasm toolchain")
    check_output("tar -cf {} install".format(TOOLCHAIN_TAR_NAME),
                 shell=True,
                 cwd=TOOLCHAIN_ROOT)

    # Upload
    print("Uploading archive to S3")
    upload_file_to_s3(TOOLCHAIN_TAR_PATH,
                      MISC_S3_BUCKET,
                      TOOLCHAIN_TAR_NAME,
                      public=True)

    # Remove old tar
    print("Removing archive")
    remove(TOOLCHAIN_TAR_PATH)
Exemple #7
0
def backup_runtime_root(ctx):
    # Nuke the existing runtime root
    if exists(FAASM_RUNTIME_ROOT):
        print("Moving existing runtime root to {}".format(BACKUP_LOCATION))
        if exists(BACKUP_LOCATION):
            rmtree(BACKUP_LOCATION)

        call("mv {} {}".format(FAASM_RUNTIME_ROOT, BACKUP_LOCATION),
             shell=True)

    print("Creating new runtime root dir")
    makedirs(FAASM_RUNTIME_ROOT)

    # Run the Ansible set-up script
    ret = call("ansible-playbook runtime_fs.yml", cwd=ANSIBLE_ROOT, shell=True)
    if ret != 0:
        print("Running ansible script failed")
        return 1

    # Set up the Python runtime
    set_up_python_runtime(ctx)

    # Set up tensorflow data
    tf_upload_data(ctx, local_copy=True)

    # Create a tmp directory
    makedirs(join(FAASM_RUNTIME_ROOT, "tmp"))

    # Compress
    print("Creating archive of Faasm runtime root")
    check_output("tar -cf {} runtime_root".format(RUNTIME_TAR_PATH),
                 shell=True,
                 cwd=FAASM_LOCAL_DIR)

    # Upload
    print("Uploading archive to S3")
    upload_file_to_s3(RUNTIME_TAR_PATH,
                      MISC_S3_BUCKET,
                      RUNTIME_TAR_NAME,
                      public=True)

    # Remove old tar
    print("Removing archive")
    remove(RUNTIME_TAR_PATH)
Exemple #8
0
def _upload_function(user, func, port=None, host=None, s3=False, ibm=False, py=False, ts=False, file=None,
                     local_copy=False):
    host, port = get_upload_host_port(host, port)

    if py and local_copy:
        storage_dir = join(FAASM_SHARED_STORAGE_ROOT, "pyfuncs", user, func)
        shared_dir = join(FAASM_SHARED_ROOT, "pyfuncs", user, func)

        if exists(shared_dir):
            rmtree(shared_dir)

        if not exists(storage_dir):
            makedirs(storage_dir)

        src_file = join(FUNC_DIR, user, "{}.py".format(func))
        dest_file = join(storage_dir, "function.py")
        copy(src_file, dest_file)
    elif py:
        func_file = join(PROJ_ROOT, "func", user, "{}.py".format(func))

        url = "http://{}:{}/p/{}/{}".format(host, port, user, func)
        curl_file(url, func_file)
    elif ts:
        func_file = join(PROJ_ROOT, "typescript", "build", "{}.wasm".format(func))
        url = "http://{}:{}/f/ts/{}".format(host, port, func)
        curl_file(url, func_file)
    else:
        if file:
            func_file = file
        else:
            func_file = join(WASM_DIR, user, func, "function.wasm")

        if s3:
            print("Uploading {}/{} to S3".format(user, func))
            s3_key = _get_s3_key(user, func)
            upload_file_to_s3(func_file, RUNTIME_S3_BUCKET, s3_key)
        if ibm:
            print("Uploading {}/{} to IBM cloud storage".format(user, func))
            ibm_key = _get_s3_key(user, func)
            upload_file_to_ibm(func_file, RUNTIME_S3_BUCKET, ibm_key)
        else:
            url = "http://{}:{}/f/{}/{}".format(host, port, user, func)
            curl_file(url, func_file)
Exemple #9
0
def _do_upload_all(host=None,
                   port=None,
                   upload_s3=False,
                   py=False,
                   prebuilt=False,
                   local_copy=False):
    to_upload = []

    if py:
        dir_to_walk = FUNC_DIR
    else:
        dir_to_walk = WASM_DIR if prebuilt else FUNC_BUILD_DIR

    extension = ".py" if py else ".wasm"
    url_part = "p" if py else "f"

    if upload_s3 and py:
        raise RuntimeError("Not yet implemented python and S3 upload")

    if local_copy and not py:
        raise RuntimeError("Not yet implemented local copy for non-python")
    else:
        storage_dir = join(FAASM_SHARED_STORAGE_ROOT, "pyfuncs")
        if not exists(storage_dir):
            makedirs(storage_dir)

    # Walk the function directory tree
    for root, dirs, files in os.walk(dir_to_walk):
        # Strip original dir from root
        rel_path = root.replace(dir_to_walk, "")
        rel_path = rel_path.strip("/")

        path_parts = rel_path.split("/")
        path_parts = [p for p in path_parts if p]
        if not path_parts:
            continue

        if path_parts[0] not in DIRS_TO_INCLUDE:
            continue

        user = path_parts[0]

        for f in files:
            if f.endswith(extension):
                if prebuilt:
                    func = path_parts[1]
                else:
                    func = f.replace(extension, "")

                func_file = join(root, f)

                if upload_s3:
                    print("Uploading {}/{} to S3".format(user, func))
                    s3_key = _get_s3_key(user, func)
                    upload_file_to_s3(func_file, RUNTIME_S3_BUCKET, s3_key)
                elif local_copy:
                    # Copy files directly into place
                    func_storage_dir = join(storage_dir, user, func)
                    if not exists(func_storage_dir):
                        makedirs(func_storage_dir)

                    dest_file = join(func_storage_dir, "function.py")
                    call("cp {} {}".format(func_file, dest_file), shell=True)
                else:
                    print("Uploading {}/{} to host {}".format(
                        user, func, host))
                    url = "http://{}:{}/{}/{}/{}".format(
                        host, port, url_part, user, func)
                    to_upload.append((url, func_file))

    # Drop out if already done local copy
    if local_copy:
        return

    # Pool of uploaders
    p = multiprocessing.Pool(multiprocessing.cpu_count() - 1)
    p.starmap(curl_file, to_upload)
Exemple #10
0
def _upload_lambda_to_s3(s3_key, zip_file_path):
    print("Uploading lambda {} to S3".format(s3_key))
    upload_file_to_s3(zip_file_path, RUNTIME_S3_BUCKET, s3_key)