Exemplo n.º 1
0
    def _neuroglancer_link(self):
        options = Options.instance()
        store_path = Path(options.runs_base_dir).expanduser()

        viewer = neuroglancer.Viewer()
        with viewer.txn() as s:

            train_layers = {}
            for i, dataset in enumerate(self.train):
                train_layers.update(
                    dataset._neuroglancer_layers(
                        exclude_layers=set(train_layers.keys())))

            validate_layers = {}
            if self.validate is not None:
                for i, dataset in enumerate(self.validate):
                    validate_layers.update(
                        dataset._neuroglancer_layers(
                            exclude_layers=set(validate_layers.keys())))

            for layer_name, (layer, kwargs) in itertools.chain(
                    train_layers.items(), validate_layers.items()):
                s.layers.append(
                    name=layer_name,
                    layer=layer,
                    **kwargs,
                )

            s.layout = neuroglancer.row_layout([
                neuroglancer.LayerGroupViewer(
                    layers=list(train_layers.keys())),
                neuroglancer.LayerGroupViewer(
                    layers=list(validate_layers.keys())),
            ])
        return f"http://neuroglancer-demo.appspot.com/#!{json.dumps(viewer.state.to_json())}"
Exemplo n.º 2
0
def create_array_store():
    """Create an array store based on the global DaCapo options."""

    options = Options.instance()

    # currently, only the LocalArrayStore is supported
    base_dir = Path(options.runs_base_dir).expanduser()
    return LocalArrayStore(base_dir)
Exemplo n.º 3
0
Arquivo: db.py Projeto: pattonw/dacapo
def options(request, tmp_path):
    # TODO: Clean up this fixture. Its a bit clunky to use.
    # Maybe just write the dacapo.yaml file instead of assigning to Options._instance
    kwargs_from_file = {}
    if request.param == "mongo":
        options_from_file = Options.instance()
        kwargs_from_file.update({
            "mongo_db_host": options_from_file.mongo_db_host,
            "mongo_db_name": "dacapo_tests",
        })
    Options._instance = None
    options = Options.instance(type=request.param,
                               runs_base_dir=f"{tmp_path}",
                               **kwargs_from_file)
    yield options
    if request.param == "mongo":
        client = pymongo.MongoClient(host=options.mongo_db_host)
        client.drop_database("dacapo_tests")
    Options._instance = None
Exemplo n.º 4
0
Arquivo: db.py Projeto: pattonw/dacapo
def mongo_db_available():
    try:
        options = Options.instance()
        client = pymongo.MongoClient(host=options.mongo_db_host,
                                     serverSelectionTimeoutMS=1000)
        Options._instance = None
    except RuntimeError:
        # cannot find a dacapo config file, mongodb is not available
        Options._instance = None
        return False
    try:
        client.admin.command("ping")
        return True
    except pymongo.errors.ConnectionFailure:
        return False
Exemplo n.º 5
0
def create_stats_store():
    """Create a statistics store based on the global DaCapo options."""

    options = Options.instance()

    try:
        store_type = options.type
    except RuntimeError:
        store_type = "mongo"
    if store_type == "mongo":
        db_host = options.mongo_db_host
        db_name = options.mongo_db_name
        return MongoStatsStore(db_host, db_name)
    elif store_type == "files":
        store_path = Path(options.runs_base_dir).expanduser()
        return FileStatsStore(store_path / "stats")
Exemplo n.º 6
0
    def _neuroglancer_source(self):
        source_type = "n5" if self.file_name.name.endswith(".n5") else "zarr"
        options = Options.instance()
        base_dir = Path(options.runs_base_dir).expanduser()
        try:
            relpath = self.file_name.relative_to(base_dir)
        except ValueError:
            relpath = str(self.file_name.absolute())
        symlink_path = f"data_symlinks/{relpath}"

        # Check if data is symlinked to a servable location
        if not (base_dir / symlink_path).exists():
            if not (base_dir / symlink_path).parent.exists():
                (base_dir / symlink_path).parent.mkdir(parents=True)
            (base_dir / symlink_path).symlink_to(Path(self.file_name))

        dataset = self.dataset
        parent_attributes_path = (base_dir / symlink_path /
                                  self.dataset).parent / "attributes.json"
        if parent_attributes_path.exists():
            dataset_parent_attributes = json.loads(
                open(
                    (base_dir / symlink_path / self.dataset).parent /
                    "attributes.json",
                    "r",
                ).read())
            if "scales" in dataset_parent_attributes:
                dataset = "/".join(self.dataset.split("/")[:-1])

        file_server = options.file_server
        try:
            file_server = file_server.format(username=options.file_server_user,
                                             password=options.file_server_pass)
        except RuntimeError as e:
            # if options doesn't have a file_server user or password simply continue
            # without authentications
            pass
        source = {
            "url": f"{source_type}://{file_server}/{symlink_path}/{dataset}",
            "transform": {
                "matrix": self._transform_matrix(),
                "outputDimensions": self._output_dimensions(),
            },
        }
        logger.warning(source)
        return source