Exemple #1
0
def process(
    pfile: "Process file with the plugin+command instructions",
    remote: "Execute the commands in a remote server" = False,
    uri: "URI to the remote SuRVoS API Server" = default_uri,
):
    import yaml

    uri = uri if remote else None

    print(pfile)

    if not os.path.isabs(pfile):
        fworkflows = os.path.join(os.getcwd(), pfile)
    else:
        fworkflows = pfile

    with open(fworkflows) as f:
        workflows = yaml.safe_load(f.read())

    for workflow in workflows:
        name = workflow.pop("name", "Workflow")
        plugin = workflow.pop("plugin")
        command = workflow.pop("command")

        print("Running workflow:", name)

        run_command(plugin, command, workflow, remote=args.remote)
Exemple #2
0
def run_server(
    command: "Command to execute in `plugin.action` format.",
    server: "URI to the remote SuRVoS API Server",
    *args: "Extra keyword arguments in the form of `key=value` "
    "required by plugin/commands. Please refer to SuRVoS API.",
):

    """
    Run a plugin/command from terminal. If remote is `None` it will use
    the local SuRVoS installation.
    """

    # workaround issue with hug function args
    if server == "server=0:0":  # use server=0:0 for local
        server = None

    plugin, command = command.split(".")
    args = [k.split("=") for k in args]
    params = {k: v for k, v in args}

    print(f"Running command: {plugin} {command} on {server}")
    result = run_command(plugin, command, uri=server, **params)[0]

    if type(result) == dict:
        result = format_yaml(result)

    logger.info(result)
def add_feature(feature_vol, new_name, workspace_name):
    result = survos.run_command(
        "features", "create", uri=None, workspace=workspace_name, feature_type="raw"
    )
    new_feature_id = result[0]["id"]
    result = survos.run_command(
        "features",
        "rename",
        uri=None,
        feature_id=new_feature_id,
        new_name=new_name,
        workspace=workspace_name,
    )

    src = DataModel.g.dataset_uri(new_feature_id, group="features")

    with DatasetManager(src, out=src, dtype="float32", fillvalue=0) as DM:
        out_dataset = DM.out
        out_dataset[:] = feature_vol

    print(f"Created new feature with id: {new_feature_id}")
    def test_feature_shape(self, datamodel):
        DataModel = datamodel
        src = DataModel.g.dataset_uri("__data__", None)
        dst = DataModel.g.dataset_uri("001_gaussian_blur", group="features")

        survos.run_command("features",
                           "gaussian_blur",
                           uri=None,
                           src=src,
                           dst=dst)

        with DatasetManager(src, out=dst, dtype="float32", fillvalue=0) as DM:
            print(DM.sources[0].shape)
            src_dataset = DM.sources[0]
            dst_dataset = DM.out
            src_arr = src_dataset[:]
            dst_arr = dst_dataset[:]

        assert dst_arr.shape == (4, 4, 4)
        assert np.max(dst_arr) <= 1.0
        assert np.min(dst_arr) >= 0.0
def add_anno(anno_vol, new_name, workspace_name):
    result = survos.run_command(
        "annotations",
        "add_level",
        uri=None,
        workspace=workspace_name,
    )
    new_anno_id = result[0]["id"]
    
    src = DataModel.g.dataset_uri(new_anno_id, group="annotations")

    with DatasetManager(src, out=src, dtype="int32", fillvalue=0) as DM:
        out_dataset = DM.out
        out_dataset[:] = anno_vol

    print(f"Created new annotation with id: {new_anno_id}")
Exemple #6
0
def roi_ws(img_volume, ws_name):
    tmpvol_fullpath = "tmp\\tmpvol.h5"

    with h5py.File(tmpvol_fullpath, "w") as hf:
        hf.create_dataset("data", data=img_volume)

    survos.run_command("workspace", "create", workspace=ws_name)
    logger.info(f"Created workspace {ws_name}")

    survos.run_command(
        "workspace",
        "add_data",
        workspace=ws_name,
        data_fname=tmpvol_fullpath,
        dtype="float32",
    )

    response = survos.run_command(
        "workspace",
        "add_dataset",
        workspace=ws_name,
        dataset_name=ws_name + "_dataset",
        dtype="float32",
    )

    DataModel.g.current_workspace = ws_name

    survos.run_command("features",
                       "create",
                       uri=None,
                       workspace=ws_name,
                       feature_type="raw")
    src = DataModel.g.dataset_uri("__data__", None)
    dst = DataModel.g.dataset_uri("001_raw", group="features")
    with DatasetManager(src, out=dst, dtype="float32", fillvalue=0) as DM:
        print(DM.sources[0].shape)
        orig_dataset = DM.sources[0]
        dst_dataset = DM.out
        src_arr = orig_dataset[:]
        dst_dataset[:] = src_arr

    return response
Exemple #7
0
def init_ws(workspace_params):
    ws_name = workspace_params["workspace_name"]
    dataset_name = workspace_params["dataset_name"]
    datasets_dir = workspace_params["datasets_dir"]
    fname = workspace_params["vol_fname"]

    image_path = os.path.join(datasets_dir, fname)
    logger.info(
        f"Initialising workspace {ws_name} with image volume {image_path}")
    _, suffix = os.path.splitext(image_path)
    if suffix in [".h5", ".hdf5"]:
        original_data = h5py.File(image_path, "r")
    elif suffix in [".tif", ".tiff"]:
        original_data = None
        img_volume = io.imread(image_path)
    elif suffix in [".rec", ".mrc"]:
        original_data = mrcfile.mmap(image_path, "r+")
        img_volume = original_data.data
    if "group_name" in workspace_params:
        group_name = workspace_params["group_name"]
        logger.info("Extracting dataset and then group")
        img_volume = original_data[dataset_name]
        img_volume = img_volume[group_name]
    elif isinstance(original_data, h5py.Group):
        logger.info("Extracting dataset")
        try:
            img_volume = original_data[dataset_name]
        except KeyError as e:
            raise WorkspaceException(
                f"Internal HDF5 dataset: '{dataset_name}' does not exist!"
            ) from e

    logger.info(f"Loaded vol of size {img_volume.shape}")
    if "roi_limits" in workspace_params:
        x_start, x_end, y_start, y_end, z_start, z_end = map(
            int, workspace_params["roi_limits"])
        logger.info(f"Cropping data to predefined ROI z:{z_start}-{z_end},"
                    f"y:{y_start}-{y_end}, x:{x_start}-{x_end}")
        img_volume = img_volume[z_start:z_end, y_start:y_end, x_start:x_end]
    img_volume = preprocess(img_volume)

    if "precrop_coords" in workspace_params:
        precrop_coords = workspace_params["precrop_coords"]
        if "precrop_vol_size" in workspace_params:
            precrop_vol_size = workspace_params["precrop_vol_size"]

            if workspace_params["entities_name"] is not None:
                entities_name = workspace_params["entities_name"]

            img_volume, entities_df = precrop(img_volume, entities_df,
                                              precrop_coords, precrop_vol_size)

    if "downsample_by" in workspace_params:
        downby = int(workspace_params["downsample_by"])
        logger.info(f"Downsampling data by a factor of {downby}")
        img_volume = img_volume[::downby, ::downby, ::downby]

    tmpvol_fullpath = os.path.abspath(
        os.path.join(tempfile.gettempdir(),
                     os.urandom(24).hex() + ".h5"))
    logger.info(tmpvol_fullpath)

    with h5py.File(tmpvol_fullpath, "w") as hf:
        hf.create_dataset("data", data=img_volume)

    # survos.load_settings()

    # result = Launcher.g.run("workspace", "create", workspace=ws_name)

    survos.run_command("workspace", "create", workspace=ws_name)

    # result = Launcher.g.run("workspace", "add_data", workspace=ws_name,
    #                        data_fname=tmpvol_fullpath,
    #                        dtype="float32")

    survos.run_command(
        "workspace",
        "add_data",
        workspace=ws_name,
        data_fname=tmpvol_fullpath,
    )

    logger.info(
        f"Added data to workspace from {os.path.join(datasets_dir, fname)}")

    os.remove(tmpvol_fullpath)
    # result = Launcher.g.run("workspace", "add_dataset",
    #                        workspace=ws_name,
    #                        dataset_name=dataset_name,
    #                        dtype="float32"
    # )
    response = survos.run_command(
        "workspace",
        "add_dataset",
        workspace=ws_name,
        dataset_name=dataset_name,
        dtype="float32",
    )

    DataModel.g.current_workspace = ws_name

    # response = Launcher.g.run("features", "create",
    #                       workspace=ws_name,
    #                       feature_type="raw")

    survos.run_command("features",
                       "create",
                       uri=None,
                       workspace=ws_name,
                       feature_type="raw")

    src = DataModel.g.dataset_uri("__data__", None)
    dst = DataModel.g.dataset_uri("001_raw", group="features")
    with DatasetManager(src, out=dst, dtype="float32", fillvalue=0) as DM:
        print(DM.sources[0].shape)
        orig_dataset = DM.sources[0]
        dst_dataset = DM.out
        src_arr = orig_dataset[:]
        dst_dataset[:] = src_arr

    return (_, response)
def datamodel():
    # make test vol
    map_fullpath = os.path.join("./tmp/testvol_4x4x4b.h5")

    testvol = np.array([
        [
            [0.1761602, 0.6701295, 0.13151232, 0.95726678],
            [0.4795476, 0.48114134, 0.0410548, 0.29893265],
            [0.49127266, 0.70298447, 0.42751211, 0.08101552],
            [0.73805652, 0.83111601, 0.36852477, 0.38732476],
        ],
        [
            [0.2847222, 0.96054574, 0.25430756, 0.35403861],
            [0.54439093, 0.65897414, 0.1959487, 0.90714872],
            [0.84462152, 0.90754182, 0.02455657, 0.26180662],
            [0.1711208, 0.40122666, 0.54562598, 0.01419861],
        ],
        [
            [0.59280376, 0.42706895, 0.86637913, 0.87831645],
            [0.57991401, 0.31989204, 0.85869799, 0.6333411],
            [0.21539274, 0.63780214, 0.64204493, 0.74425482],
            [0.1903691, 0.81962537, 0.31774673, 0.34812628],
        ],
        [
            [0.40880077, 0.595773, 0.28856063, 0.19316746],
            [0.03195766, 0.62475541, 0.50762591, 0.34700798],
            [0.98913461, 0.07883111, 0.96534233, 0.57697606],
            [0.71496714, 0.70764578, 0.92294417, 0.91300531],
        ],
    ])

    with h5py.File(map_fullpath, "w") as hf:
        hf.create_dataset("data", data=testvol)

    tmp_ws_name = "testworkspace_tmp1"
    print(DataModel.g.CHROOT)

    result = survos.run_command("workspace",
                                "get",
                                uri=None,
                                workspace=tmp_ws_name)

    if not type(result[0]) == dict:
        logger.debug("Creating temp workspace")
        survos.run_command("workspace",
                           "create",
                           uri=None,
                           workspace=tmp_ws_name)
    else:
        logger.debug("tmp exists, deleting and recreating")
        survos.run_command("workspace",
                           "delete",
                           uri=None,
                           workspace=tmp_ws_name)
        logger.debug("workspace deleted")
        survos.run_command("workspace",
                           "create",
                           uri=None,
                           workspace=tmp_ws_name)
        logger.debug("workspace recreated")

    # add data to workspace
    survos.run_command(
        "workspace",
        "add_data",
        uri=None,
        workspace=tmp_ws_name,
        data_fname=map_fullpath,
        dtype="float32",
    )

    DataModel.g.current_workspace = tmp_ws_name

    return DataModel