def handle_get_manifest(table_id, node_id):
    if len(request.data) > 0:
        data = json.loads(request.data)
    else:
        data = {}

    if "start_layer" in data:
        start_layer = int(data["start_layer"])
    else:
        start_layer = None

    verify = request.args.get('verify', False)
    verify = verify in ['True', 'true', '1', True]

    # TODO: Read this from config
    MESH_MIP = 2

    cg = app_utils.get_cg(table_id)
    seg_ids = meshgen_utils.get_highest_child_nodes_with_meshes(
        cg,
        np.uint64(node_id),
        stop_layer=2,
        start_layer=start_layer,
        verify_existence=verify)

    filenames = [meshgen_utils.get_mesh_name(cg, s, MESH_MIP) for s in seg_ids]

    return jsonify(fragments=filenames)
Exemple #2
0
def handle_root(table_id, atomic_id):
    current_app.table_id = table_id

    user_id = str(g.auth_user["id"])
    current_app.user_id = user_id

    # Convert seconds since epoch to UTC datetime
    try:
        timestamp = float(request.args.get("timestamp", time.time()))
        timestamp = datetime.fromtimestamp(timestamp, UTC)
    except (TypeError, ValueError) as e:
        raise (cg_exceptions.BadRequest(
            "Timestamp parameter is not a valid unix timestamp"))

    stop_layer = request.args.get("stop_layer", None)
    if stop_layer is not None:
        try:
            stop_layer = int(stop_layer)
        except (TypeError, ValueError) as e:
            raise (cg_exceptions.BadRequest("stop_layer is not an integer"))

    # Call ChunkedGraph
    cg = app_utils.get_cg(table_id)
    root_id = cg.get_root(np.uint64(atomic_id),
                          stop_layer=stop_layer,
                          time_stamp=timestamp)

    # Return root ID
    return root_id
Exemple #3
0
def handle_rollback(table_id):
    if table_id in ["fly_v26", "fly_v31"]:
        raise cg_exceptions.InternalServerError(
            "Rollback not supported for this chunkedgraph table.")

    current_app.table_id = table_id

    user_id = str(g.auth_user["id"])
    current_app.user_id = user_id
    target_user_id = request.args["user_id"]

    # Call ChunkedGraph
    cg = app_utils.get_cg(table_id)
    user_operations = all_user_operations(table_id)
    operation_ids = user_operations["operation_id"]
    timestamps = user_operations["timestamp"]
    operations = list(zip(operation_ids, timestamps))
    operations.sort(key=lambda op: op[1])

    for operation in operations:
        operation_id = operation[0]
        try:
            ret = cg.undo_operation(user_id=target_user_id,
                                    operation_id=operation_id)
        except cg_exceptions.LockingError as e:
            raise cg_exceptions.InternalServerError(
                "Could not acquire root lock for undo operation.")
        except (cg_exceptions.PreconditionError,
                cg_exceptions.PostconditionError) as e:
            raise cg_exceptions.BadRequest(str(e))

        if ret.new_lvl2_ids.size > 0:
            trigger_remesh(table_id, ret.new_lvl2_ids, is_priority=False)

    return user_operations
Exemple #4
0
def handle_redo(table_id):
    if table_id in ["fly_v26", "fly_v31"]:
        raise cg_exceptions.InternalServerError(
            "Redo not supported for this chunkedgraph table.")

    current_app.table_id = table_id

    data = json.loads(request.data)
    is_priority = request.args.get('priority', True, type=str2bool)
    user_id = str(g.auth_user["id"])
    current_app.user_id = user_id

    current_app.logger.debug(data)

    # Call ChunkedGraph
    cg = app_utils.get_cg(table_id)
    operation_id = np.uint64(data["operation_id"])

    try:
        ret = cg.redo(user_id=user_id, operation_id=operation_id)
    except cg_exceptions.LockingError as e:
        raise cg_exceptions.InternalServerError(
            "Could not acquire root lock for redo operation.")
    except (cg_exceptions.PreconditionError,
            cg_exceptions.PostconditionError) as e:
        raise cg_exceptions.BadRequest(str(e))

    current_app.logger.debug(("after redo:", ret.new_root_ids))
    current_app.logger.debug(("lvl2_nodes:", ret.new_lvl2_ids))

    if ret.new_lvl2_ids.size > 0:
        trigger_remesh(table_id, ret.new_lvl2_ids, is_priority=is_priority)

    return ret
Exemple #5
0
def handle_preview_meshes(table_id, node_id):
    if len(request.data) > 0:
        data = json.loads(request.data)
    else:
        data = {}

    node_id = np.uint64(node_id)

    cg = app_utils.get_cg(table_id)

    if "seg_ids" in data:
        seg_ids = data["seg_ids"]

        chunk_id = cg.get_chunk_id(node_id)
        supervoxel_ids = [cg.get_node_id(seg_id, chunk_id)
                          for seg_id in seg_ids]
    else:
        supervoxel_ids = None

    meshgen.mesh_lvl2_preview(cg, node_id, supervoxel_ids=supervoxel_ids,
                              cv_path=None, cv_mesh_dir=None, mip=2,
                              simplification_factor=999999,
                              max_err=40, parallel_download=1, verbose=True,
                              cache_control='no-cache')
    return Response(status=200)
Exemple #6
0
def handle_valid_frags(table_id, node_id):
    cg = app_utils.get_cg(table_id)

    seg_ids = meshgen_utils.get_highest_child_nodes_with_meshes(
        cg, np.uint64(node_id), stop_layer=1, verify_existence=True)

    return app_utils.tobinary(seg_ids)
Exemple #7
0
def all_user_operations(table_id):
    current_app.table_id = table_id
    user_id = str(g.auth_user["id"])
    current_app.user_id = user_id
    target_user_id = request.args["user_id"]

    try:
        start_time = float(request.args.get("start_time", 0))
        start_time = datetime.fromtimestamp(start_time, UTC)
    except (TypeError, ValueError):
        raise (cg_exceptions.BadRequest(
            "start_time parameter is not a valid unix timestamp"))

    # Call ChunkedGraph
    cg_instance = app_utils.get_cg(table_id)

    log_rows = cg_instance.read_log_rows(start_time=start_time)

    valid_entry_ids = []
    timestamp_list = []

    entry_ids = np.sort(list(log_rows.keys()))
    for entry_id in entry_ids:
        entry = log_rows[entry_id]
        user_id = entry[column_keys.OperationLogs.UserID]

        if user_id == target_user_id:
            valid_entry_ids.append(entry_id)
            timestamp = entry["timestamp"]
            timestamp_list.append(timestamp)

    return {"operation_id": valid_entry_ids, "timestamp": timestamp_list}
Exemple #8
0
def tabular_change_log_recent(table_id):
    current_app.table_id = table_id
    user_id = str(g.auth_user["id"])
    current_app.user_id = user_id

    try:
        start_time = float(request.args.get("start_time", 0))
        start_time = datetime.fromtimestamp(start_time, UTC)
    except (TypeError, ValueError):
        raise (cg_exceptions.BadRequest(
            "start_time parameter is not a valid unix timestamp"))

    # Call ChunkedGraph
    cg_instance = app_utils.get_cg(table_id)

    log_rows = cg_instance.read_log_rows(start_time=start_time)

    timestamp_list = []
    user_list = []

    entry_ids = np.sort(list(log_rows.keys()))
    for entry_id in entry_ids:
        entry = log_rows[entry_id]

        timestamp = entry["timestamp"]
        timestamp_list.append(timestamp)

        user_id = entry[column_keys.OperationLogs.UserID]
        user_list.append(user_id)

    return pd.DataFrame.from_dict({
        "operation_id": entry_ids,
        "timestamp": timestamp_list,
        "user_id": user_list
    })
Exemple #9
0
def tabular_change_log(table_id, root_id, get_root_ids, filtered):
    if get_root_ids:
        current_app.request_type = "tabular_changelog_wo_ids"
    else:
        current_app.request_type = "tabular_changelog"

    current_app.table_id = table_id
    user_id = str(g.auth_user["id"])
    current_app.user_id = user_id

    # Call ChunkedGraph
    cg = app_utils.get_cg(table_id)
    segment_history = cg_history.SegmentHistory(cg, int(root_id))

    tab = segment_history.get_tabular_changelog(with_ids=get_root_ids,
                                                filtered=filtered)

    try:
        tab["user_name"] = get_usernames(
            np.array(tab["user_id"], dtype=np.int).squeeze(),
            current_app.config['AUTH_TOKEN'])
    except:
        current_app.logger.error(
            f"Could not retrieve user names for {root_id}")

    return tab
Exemple #10
0
def handle_roots(table_id, is_binary=False):
    current_app.request_type = "roots"
    current_app.table_id = table_id

    if is_binary:
        node_ids = np.frombuffer(request.data, np.uint64)
    else:
        node_ids = np.array(json.loads(request.data)["node_ids"],
                            dtype=np.uint64)
    # Convert seconds since epoch to UTC datetime
    try:
        timestamp = float(request.args.get("timestamp", time.time()))
        timestamp = datetime.fromtimestamp(timestamp, UTC)
    except (TypeError, ValueError) as e:
        raise (cg_exceptions.BadRequest("Timestamp parameter is not a valid"
                                        " unix timestamp"))

    stop_layer = request.args.get("stop_layer", None)
    if stop_layer is not None:
        stop_layer = int(stop_layer)

    # Call ChunkedGraph
    cg = app_utils.get_cg(table_id)
    root_ids = cg.get_roots(node_ids,
                            stop_layer=stop_layer,
                            time_stamp=timestamp)

    return root_ids
Exemple #11
0
def handle_info(table_id):
    cg = app_utils.get_cg(table_id)

    dataset_info = cg.dataset_info
    app_info = {"app": {"supported_api_versions": list(__api_versions__)}}
    combined_info = {**dataset_info, **app_info}

    return jsonify(combined_info)
Exemple #12
0
def handle_get_manifest(table_id, node_id):
    current_app.request_type = "manifest"
    current_app.table_id = table_id

    user_id = str(g.auth_user["id"])
    current_app.user_id = user_id

    if len(request.data) > 0:
        data = json.loads(request.data)
    else:
        data = {}

    if "bounds" in request.args:
        bounds = request.args["bounds"]
        bounding_box = np.array([b.split("-") for b in bounds.split("_")],
                                dtype=np.int).T
    else:
        bounding_box = None

    verify = request.args.get("verify", False)
    verify = verify in ["True", "true", "1", True]

    cg = app_utils.get_cg(table_id)

    if "start_layer" in data:
        start_layer = int(data["start_layer"])
    else:
        start_layer = cg.get_chunk_layer(np.uint64(node_id))

    if "flexible_start_layer" in data:
        flexible_start_layer = int(data["flexible_start_layer"])
    else:
        flexible_start_layer = None

    seg_ids = meshgen_utils.get_highest_child_nodes_with_meshes(
        cg,
        np.uint64(node_id),
        stop_layer=2,
        start_layer=start_layer,
        bounding_box=bounding_box,
        verify_existence=verify,
        flexible_start_layer=flexible_start_layer)

    filenames = [meshgen_utils.get_mesh_name(cg, s) for s in seg_ids]

    resp = {"fragments": filenames}

    if "return_seg_id_layers" in data:
        if app_utils.toboolean(data["return_seg_id_layers"]):
            resp["seg_id_layers"] = cg.get_chunk_layers(seg_ids)

    if "return_seg_chunk_coordinates" in data:
        if app_utils.toboolean(data["return_seg_chunk_coordinates"]):
            resp["seg_chunk_coordinates"] = [
                cg.get_chunk_coordinates(seg_id) for seg_id in seg_ids
            ]

    return resp
def handle_root_main(table_id, atomic_id, timestamp):
    current_app.request_type = "root"

    # Call ChunkedGraph
    cg = app_utils.get_cg(table_id)
    root_id = cg.get_root(np.uint64(atomic_id), time_stamp=timestamp)

    # Return binary
    return app_utils.tobinary(root_id)
Exemple #14
0
def handle_root():
    atomic_id = np.uint64(json.loads(request.data)[0])

    # Call ChunkedGraph
    cg = app_utils.get_cg()
    root_id = cg.get_root(atomic_id)

    # Return binary
    return app_utils.tobinary(root_id)
Exemple #15
0
def handle_get_layer2_graph(table_id, node_id):
    current_app.request_type = "get_lvl2_graph"
    current_app.table_id = table_id
    user_id = str(g.auth_user["id"])
    current_app.user_id = user_id

    cg = app_utils.get_cg(table_id)
    edge_graph = pathing.get_lvl2_edge_list(cg, int(node_id))
    return {'edge_graph': edge_graph}
Exemple #16
0
def last_edit(table_id, root_id):
    current_app.table_id = table_id
    user_id = str(g.auth_user["id"])
    current_app.user_id = user_id

    cg = app_utils.get_cg(table_id)

    segment_history = cg_history.SegmentHistory(cg, int(root_id))

    return segment_history.last_edit.timestamp
Exemple #17
0
def handle_valid_frags(table_id, node_id):
    current_app.table_id = table_id

    user_id = str(g.auth_user["id"])
    current_app.user_id = user_id

    cg = app_utils.get_cg(table_id)

    seg_ids = meshgen_utils.get_highest_child_nodes_with_meshes(
        cg, np.uint64(node_id), stop_layer=1, verify_existence=True)

    return app_utils.tobinary(seg_ids)
Exemple #18
0
def handle_split_preview(table_id):
    current_app.table_id = table_id
    user_id = str(g.auth_user["id"])
    current_app.user_id = user_id

    data = json.loads(request.data)
    current_app.logger.debug(data)

    cg = app_utils.get_cg(table_id)

    data_dict = {}
    for k in ["sources", "sinks"]:
        data_dict[k] = collections.defaultdict(list)

        for node in data[k]:
            node_id = node[0]
            x, y, z = node[1:]
            coordinate = np.array([x, y, z]) / cg.segmentation_resolution

            atomic_id = cg.get_atomic_id_from_coord(
                coordinate[0],
                coordinate[1],
                coordinate[2],
                parent_id=np.uint64(node_id))

            if atomic_id is None:
                raise cg_exceptions.BadRequest(
                    f"Could not determine supervoxel ID for coordinates "
                    f"{coordinate}.")

            data_dict[k]["id"].append(atomic_id)
            data_dict[k]["coord"].append(coordinate)

    current_app.logger.debug(data_dict)

    try:
        supervoxel_ccs, illegal_split = cg._run_multicut(
            source_ids=data_dict["sources"]["id"],
            sink_ids=data_dict["sinks"]["id"],
            source_coords=data_dict["sources"]["coord"],
            sink_coords=data_dict["sinks"]["coord"],
            bb_offset=(240, 240, 24),
            split_preview=True)

    except cg_exceptions.PreconditionError as e:
        raise cg_exceptions.BadRequest(str(e))

    resp = {
        "supervoxel_connected_components": supervoxel_ccs,
        "illegal_split": illegal_split
    }
    return resp
Exemple #19
0
def oldest_timestamp(table_id):
    current_app.table_id = table_id
    user_id = str(g.auth_user["id"])
    current_app.user_id = user_id

    cg = app_utils.get_cg(table_id)

    try:
        earliest_timestamp = cg.get_earliest_timestamp()
    except cg_exceptions.PreconditionError:
        raise cg_exceptions.InternalServerError("No timestamp available")

    return earliest_timestamp
Exemple #20
0
def handle_find_path(table_id, precision_mode):
    current_app.table_id = table_id
    user_id = str(g.auth_user["id"])
    current_app.user_id = user_id

    nodes = json.loads(request.data)

    current_app.logger.debug(nodes)
    assert len(nodes) == 2

    # Call ChunkedGraph
    cg = app_utils.get_cg(table_id)

    def _get_supervoxel_id_from_node(node):
        node_id = node[0]
        x, y, z = node[1:]
        coordinate = np.array([x, y, z]) / cg.segmentation_resolution

        supervoxel_id = cg.get_atomic_id_from_coord(
            coordinate[0],
            coordinate[1],
            coordinate[2],
            parent_id=np.uint64(node_id))
        if supervoxel_id is None:
            raise cg_exceptions.BadRequest(
                f"Could not determine supervoxel ID for coordinates "
                f"{coordinate}.")

        return supervoxel_id

    source_supervoxel_id = _get_supervoxel_id_from_node(nodes[0])
    target_supervoxel_id = _get_supervoxel_id_from_node(nodes[1])
    source_l2_id = cg.get_parent(source_supervoxel_id)
    target_l2_id = cg.get_parent(target_supervoxel_id)

    l2_path = analysis.find_l2_shortest_path(cg, source_l2_id, target_l2_id)
    if precision_mode:
        centroids, failed_l2_ids = analysis.compute_mesh_centroids_of_l2_ids(
            cg, l2_path, flatten=True)
        return {
            "centroids_list": centroids,
            "failed_l2_ids": failed_l2_ids,
            "l2_path": l2_path
        }
    else:
        centroids = analysis.compute_rough_coordinate_path(cg, l2_path)
        return {
            "centroids_list": centroids,
            "failed_l2_ids": [],
            "l2_path": l2_path
        }
Exemple #21
0
def handle_get_manifest(table_id, node_id):
    verify = request.args.get('verify', False)
    verify = verify in ['True', 'true', '1', True]

    # TODO: Read this from config
    MESH_MIP = 2

    cg = app_utils.get_cg(table_id)
    seg_ids = meshgen_utils.get_highest_child_nodes_with_meshes(
        cg, np.uint64(node_id), stop_layer=2, verify_existence=verify)

    filenames = [meshgen_utils.get_mesh_name(cg, s, MESH_MIP) for s in seg_ids]

    return jsonify(fragments=filenames)
def handle_children(table_id, parent_id):
    current_app.request_type = "children"

    cg = app_utils.get_cg(table_id)

    parent_id = np.uint64(parent_id)
    layer = cg.get_chunk_layer(parent_id)

    if layer > 1:
        children = cg.get_children(parent_id)
    else:
        children = np.array([])

    # Return binary
    return app_utils.tobinary(children)
Exemple #23
0
def remeshing(table_id, lvl2_nodes):
    lvl2_nodes = np.array(lvl2_nodes, dtype=np.uint64)
    cg = app_utils.get_cg(table_id)

    current_app.logger.debug(
        f"remeshing {lvl2_nodes} {cg.get_serialized_info()}")

    # TODO: stop_layer and mip should be configurable by dataset
    meshgen.remeshing(cg,
                      lvl2_nodes,
                      stop_layer=4,
                      cv_path=None,
                      cv_mesh_dir=None,
                      mip=1,
                      max_err=320)
Exemple #24
0
def handle_children(table_id, parent_id):
    current_app.table_id = table_id
    user_id = str(g.auth_user["id"])
    current_app.user_id = user_id

    cg = app_utils.get_cg(table_id)

    parent_id = np.uint64(parent_id)
    layer = cg.get_chunk_layer(parent_id)

    if layer > 1:
        children = cg.get_children(parent_id)
    else:
        children = np.array([])

    return children
Exemple #25
0
def handle_subgraph(root_id):
    if "bounds" in request.args:
        bounds = request.args["bounds"]
        bounding_box = np.array([b.split("-") for b in bounds.split("_")],
                                dtype=np.int).T
    else:
        bounding_box = None

    # Call ChunkedGraph
    cg = app_utils.get_cg()
    atomic_edges = cg.get_subgraph(int(root_id),
                                   get_edges=True,
                                   bounding_box=bounding_box,
                                   bb_is_coordinate=True)[0]
    # Return binary
    return app_utils.tobinary(atomic_edges)
Exemple #26
0
def merge_log(table_id, root_id):
    current_app.table_id = table_id
    user_id = str(g.auth_user["id"])
    current_app.user_id = user_id

    try:
        time_stamp_past = float(request.args.get("timestamp", 0))
        time_stamp_past = datetime.fromtimestamp(time_stamp_past, UTC)
    except (TypeError, ValueError) as e:
        raise (cg_exceptions.BadRequest("Timestamp parameter is not a valid"
                                        " unix timestamp"))

    # Call ChunkedGraph
    cg = app_utils.get_cg(table_id)

    segment_history = cg_history.SegmentHistory(cg, int(root_id))
    return segment_history.merge_log(correct_for_wrong_coord_type=True)
def handle_leaves(table_id, root_id):
    current_app.request_type = "leaves"

    if "bounds" in request.args:
        bounds = request.args["bounds"]
        bounding_box = np.array([b.split("-") for b in bounds.split("_")],
                                dtype=np.int).T
    else:
        bounding_box = None

    # Call ChunkedGraph
    cg = app_utils.get_cg(table_id)
    atomic_ids = cg.get_subgraph_nodes(int(root_id),
                                       bounding_box=bounding_box,
                                       bb_is_coordinate=True)

    # Return binary
    return app_utils.tobinary(atomic_ids)
def change_log(table_id, root_id):
    current_app.request_type = "change_log"

    try:
        time_stamp_past = float(request.args.get('timestamp', 0))
        time_stamp_past = datetime.fromtimestamp(time_stamp_past, UTC)
    except (TypeError, ValueError) as e:
        raise(cg_exceptions.BadRequest("Timestamp parameter is not a valid"
                                       " unix timestamp"))

    # Call ChunkedGraph
    cg = app_utils.get_cg(table_id)

    change_log = cg.get_change_log(root_id=np.uint64(root_id),
                                   correct_for_wrong_coord_type=True,
                                   time_stamp_past=time_stamp_past)

    return jsonify(change_log)
Exemple #29
0
def handle_leaves_from_leave(atomic_id):
    if "bounds" in request.args:
        bounds = request.args["bounds"]
        bounding_box = np.array([b.split("-") for b in bounds.split("_")],
                                dtype=np.int).T
    else:
        bounding_box = None

    # Call ChunkedGraph
    cg = app_utils.get_cg()
    root_id = cg.get_root(int(atomic_id))

    atomic_ids = cg.get_subgraph(root_id,
                                 bounding_box=bounding_box,
                                 bb_is_coordinate=True)
    # Return binary
    return app_utils.tobinary(np.concatenate([np.array([root_id]),
                                              atomic_ids]))
Exemple #30
0
def handle_subgraph(table_id, root_id):
    current_app.table_id = table_id
    user_id = str(g.auth_user["id"])
    current_app.user_id = user_id

    if "bounds" in request.args:
        bounds = request.args["bounds"]
        bounding_box = np.array([b.split("-") for b in bounds.split("_")],
                                dtype=np.int).T
    else:
        bounding_box = None

    # Call ChunkedGraph
    cg = app_utils.get_cg(table_id)
    atomic_edges = cg.get_subgraph_edges(int(root_id),
                                         bounding_box=bounding_box,
                                         bb_is_coordinate=True)[0]

    return atomic_edges