Ejemplo n.º 1
0
def get_staging_data(root_dir: Path) -> pd.DataFrame:
    """
    Given a root registration directory, collate all the staging CSVs into one file.
    Write out the combined organ volume CSV into the root registration directory.

    Parameters
    ----------
    root_dir
        The path to the root registration directory

    Returns
    -------
    The combined dataframe of all the organ volumes
    """
    output_dir = root_dir / 'output'

    dataframes = []

    for line_dir, specimen_dir in specimen_iterator(output_dir):

        staging_info = specimen_dir / 'output' / common.STAGING_INFO_FILENAME

        if not staging_info.is_file():
            raise FileNotFoundError(f'Cannot find staging info file {staging_info}')

        df = pd.read_csv(staging_info, index_col=0)
        df['line'] = line_dir.name
        dataframes.append(df)

    # Write the concatenated staging info to the
    all_staging = pd.concat(dataframes)
    outpath = output_dir / common.STAGING_INFO_FILENAME
    all_staging.to_csv(outpath)

    return all_staging
Ejemplo n.º 2
0
    def memorymap_data(self, lama_root_dir: Path) -> Dict[str, np.memmap]:
        """
        Iterate over output folder getting each ...........
        Parameters
        ----------
        lama_root_dir

        Returns
        -------

        """

        imgs = OrderedDict()

        for line_dir, spec_dir in specimen_iterator(lama_root_dir):
            config_file = common.getfile_endswith(
                '.toml')  # Get the Lama config from the specimen directory
            config = LamaConfig(config_file)
            reg_dir = config['root_reg_dir']
            basename = os.path.basename(imgpath)
            loader = common.LoadImage(imgpath)

            if not loader:
                logging.error("Problem normalising image: {}".format(
                    loader.error_msg))
                sys.exit()
            arr = loader.array
            t = tempfile.TemporaryFile()
            m = np.memmap(t, dtype=arr.dtype, mode='w+', shape=arr.shape)
            m[:] = arr
            imgs[basename] = m
        return imgs
Ejemplo n.º 3
0
def move_stuff(root_dir: Path, out_dir: Path, spec_ids: [List]):
    for line_dir, spec_dir in specimen_iterator(root_dir):
        if spec_dir.name in spec_ids:
            print(f"Moving {spec_dir.name}")

            dest = out_dir / spec_dir.name
            shutil.move(spec_dir, dest)
Ejemplo n.º 4
0
def get_staging_data(root: Path, line=None) -> pd.DataFrame:
    """
    Collate all the staging data from a folder. Include specimens from all lines.
    Save a combined csv in the 'output' directory and return as a DataFrame too.

    Parameters
    ----------
    root
        The root directory to search
    line
        Only select staging data for this line

    """

    output_dir = root / 'output'

    dataframes = []

    for line_dir, specimen_dir in specimen_iterator(output_dir):

        if line and line_dir.name != line:
            continue

        staging_file = specimen_dir / 'output' / common.STAGING_INFO_FILENAME

        if not staging_file.is_file():
            raise FileNotFoundError(
                f'Cannot find organ volume file {staging_file}')

        df = pd.read_csv(staging_file, index_col=0)
        df['line'] = line_dir.name
        dataframes.append(df)

    # Write the concatenated organ vol file to single csv
    staging = pd.concat(dataframes)

    # Temp fix to deal with old data
    # If first column is 1 or 'value', change it to staging
    staging.rename(columns={'1': 'staging', 'value': 'staging'}, inplace=True)

    outpath = output_dir / common.STAGING_INFO_FILENAME
    staging.to_csv(outpath)

    return staging
Ejemplo n.º 5
0
def get_organ_volume_data(root_dir: Path) -> pd.DataFrame:
    """
    Given a root registration directory, collate all the organ volume CSVs into one file.
    Write out the combined organ volume CSV into the root registration directory.

    Parameters
    ----------
    root_dir
        The path to the root registration directory

    Returns
    -------
    The combined data frame of all the organ volumes
    specimen id in index organs in rows
    """
    output_dir = root_dir / 'output'

    dataframes = []

    for line_dir, specimen_dir in specimen_iterator(output_dir):

        organ_vol_file = specimen_dir / 'output' / common.ORGAN_VOLUME_CSV_FILE

        if not organ_vol_file.is_file():
            raise FileNotFoundError(f'Cannot find organ volume file {organ_vol_file}')

        df = pd.read_csv(organ_vol_file, index_col=0)

        if len(df) == 0:
            raise ValueError(f'{organ_vol_file} is empty')

        dataframes.append(df)

    # Write the concatenated organ vol file to single csv
    all_organs = pd.concat(dataframes)

    # outpath = output_dir / common.ORGAN_VOLUME_CSV_FILE
    # all_organs.to_csv(outpath)

    return all_organs
Ejemplo n.º 6
0
    def _get_organ_volumes(self, root_dir: Path) -> pd.DataFrame:
        """
        Given a root registration directory, collate all the organ volume CSVs into one file.
        Write out the combined organ volume CSV into the root registration directory.

        Parameters
        ----------
        root_dir
            The path to the root registration directory

        Returns
        -------
        The combined dataframe of all the organ volumes
        """
        output_dir = root_dir / 'output'

        dataframes = []

        for line_dir, specimen_dir in specimen_iterator(output_dir):

            organ_vol_file = specimen_dir / 'output' / common.ORGAN_VOLUME_CSV_FILE

            if not organ_vol_file.is_file():
                raise FileNotFoundError(
                    f'Cannot find organ volume file {organ_vol_file}')

            df = pd.read_csv(organ_vol_file, index_col=0)
            self._drop_empty_columns(df)
            df['line'] = line_dir.name
            dataframes.append(df)

        # Write the concatenated organ vol file to single csv
        if not dataframes:
            raise ValueError(
                f'No data forund in output directory: {output_dir}')
        all_organs = pd.concat(dataframes)

        return all_organs
Ejemplo n.º 7
0
import sys

from PyQt5 import QtGui

from vpv.vpv import Vpv
from lama.paths import specimen_iterator

root_dir = Path('/mnt/IMPC_research/neil/E14.5/baselines/output')

app = QtGui.QApplication([])
ex = Vpv()

vols = []

t = 0
for line_dir, spec_dir in specimen_iterator(root_dir):
    def_img = spec_dir / 'output' / 'registrations' / 'deformable_50_to_10' / spec_dir.name / (
        spec_dir.name + '.nrrd')

    if not def_img.is_file():
        print(f'{def_img} not found')
        continue
    if 'xy' in spec_dir.name.lower():
        vols.append(def_img)
        # if t > 5:
        #     break
        # t +=1

ex.load_volumes(vols, 'vol')

sys.exit(app.exec_())