コード例 #1
0
def valid_gene_graph(genes, input_shape, min_size=0) -> mt.GeneGraph:
    """

    Args:
        genes:
        input_shape:
        min_size:

    Returns:

    """
    original_input_shape = input_shape[:]
    graph = None
    valid_shape = False
    while not valid_shape:
        graph = mt.GeneGraph(input_shape, genes)
        too_small, odd_output = shape_check(graph, min_size)
        if too_small:
            raise ValueError('Desired input shape results in invalid output '
                             'shape')
        if odd_output:
            # Keep decreasing shape by 1 until we get a valid output
            if len(input_shape) == 2:
                input_shape = [s - 1 for s in input_shape]
            else:
                # Don't mess with the z dimension
                input_shape = [input_shape[0]] + \
                              [s - 1 for s in input_shape[1:]]
        else:
            # We're good to go
            valid_shape = True

    if original_input_shape != input_shape:
        logger.info(f'Changed input shape from {original_input_shape}'
                    f'to {input_shape} to make a valid network.')

    return graph
コード例 #2
0
def residual_gene_graph(input_shape: Sequence[int],
                        n_classes: int,
                        net_settings: Optional[Dict[str, Any]] = None,
                        predictor_settings: Optional[Dict[str, Any]] = None,
                        optim_settings: Optional[Dict[str, Any]] = None,
                        gene_dict: Optional[Dict[str, mt.Gene]] = None) -> \
        mt.GeneGraph:
    """Create a GeneGraph which builds an encoder-decoder
    segmentation network with residual blocks.

    Args:
        input_shape (Sequence[int]): Desired network input window shape.
            Actual shape may be smaller if the specified shape is
            incompatible with the encoder-decoder network architecture.
        n_classes (int): Number of segmentation classes.
        net_settings (Optional[Dict[str, Any]]): A dictionary of
            EncoderDecoderGene hyperparameter instance_settings. Keys are names of
            hyperparameters, values are new values to which those
            hyperparameters are set. See
            genenet.hyperparameter_configs.encoder_decoder for
            hyperparameter information.
        predictor_settings (Optional[Dict[str, Any]]): A dictionary of
            PredictorGene hyperparameter instance_settings. Keys are names of
            hyperparameters, values are new values to which those
            hyperparameters are set. See
            genenet.hyperparameter_configs.predictor for
            hyperparameter information.
        optim_settings (Optional[Dict[str, Any]]): A dictionary of
            optimizer hyperparameter instance_settings. Keys are names of
            hyperparameters, values are new values to which those
            hyperparameters are set. See
            genenet.hyperparameter_configs.adam_optimizer for
            hyperparameter information.
        gene_dict (Optional[Dict[str, mt.Gene]]): If supplied, use
            already-created root Genes instead of new ones. Valid keys are
            'encoderdecoder' for an EncoderDecoderGene, or 'predictor' for a
            PredictorGene

    Returns:
        (mt.GeneGraph): The newly-created "default" GeneGraph.

    """
    if gene_dict is None:
        gene_dict = {}

    # Encoder-decoder network setup

    if 'encoderdecoder' in gene_dict:
        # Use a supplied EncoderDecoderGene if present
        net_gene: EncoderDecoderGene = gene_dict['encoderdecoder']
    else:
        # Set up a new EncoderDecoderGene with hyperparameters set by the
        # input `net_settings`

        # Encoder-decoder hyperparam config
        ed_hyperparams = configs.encoder_decoder()
        # Create a Gene to build an encoder-decoder network
        net_gene = EncoderDecoderGene('net', ed_hyperparams)
        # Apply hyperparameter setting changes if supplied
        if net_settings is not None:
            net_gene.set(**net_settings)

    # Turn the convolution blocks into residual blocks
    for blockset in net_gene.children:
        for block in blockset.children:
            if len(block.children) > 1:
                # Add an identity layer that adds the final convolution layer
                # and the first convolution layer
                identity = IdentityGene('identity', block)
                block.children.append(identity)
                # Identity layer will get the last conv as an input by
                # default, now also add in a residual connection from the
                # first conv
                residual_edge = ResidualEdge(block.children[0])
                identity.add_input(residual_edge)

    # Set up Encoder-decoder gene tree edges
    net_gene.setup_edges()

    # Class predictor module setup

    if 'predictor' in gene_dict:
        # Use a supplied PredictorGene if present
        predictor_gene: PredictorGene = gene_dict['predictor']

    else:
        # Class predictor hyperparameters
        pred_hyperparams = configs.predictor()

        predictor_gene = PredictorGene(n_classes, 'predictor',
                                       pred_hyperparams)
        # Set the PredictorGene to use the same n_kernels value as the last
        # descendant of the EncoderDecoderGene
        n_kernels = net_gene.last_descendant().hyperparam('n_kernels')
        predictor_gene.set(n_kernels=n_kernels)
        # By default, take the regularization instance_settings from net_settings
        if predictor_settings is None and net_settings is not None:
            predictor_settings = {}
            for key in net_settings:
                if 'log_' in key:
                    predictor_settings[key] = net_settings[key]
        # Apply hyperparameter setting changes if supplied
        if predictor_settings is not None:
            predictor_gene.set(**predictor_settings)

    # Set up class predictor gene edges
    predictor_gene.setup_edges(ForwardEdge(net_gene.last_descendant()))

    # ADAM optimizer hyperparameters
    adam_hyperparams = configs.adam_optimizer()
    # Convert to a dict to pass to a GeneNet.model_fn()
    optim_dict = adam_hyperparams.values()
    # Directly modify that dict because it's easier
    for key in optim_settings:
        optim_dict[key] = optim_settings[key]

    # Create the GeneGraph
    genes = OrderedDict()
    genes['encoderdecoder'] = net_gene
    genes['predictor'] = predictor_gene
    # Make sure we have a valid input shape
    valid_shape = False
    # Track the original input shape for later reference
    original_input_shape = input_shape[:]
    graph = None
    while not valid_shape:
        graph = mt.GeneGraph(input_shape, genes)
        too_small, odd_output = shape_check(graph)
        if too_small:
            raise ValueError('Desired input shape results in invalid output '
                             'shape')
        if odd_output:
            # Keep decreasing shape by 1 until we get a valid output
            if len(input_shape) == 2:
                input_shape = [s - 1 for s in input_shape]
            else:
                # Don't mess with the z dimension
                input_shape = [input_shape[0]] + \
                              [s - 1 for s in input_shape[1:]]
        else:
            # We're good to go
            valid_shape = True
    # Add the optimizer hyperparameter dict to the GeneGraph
    graph.add_hyperparameter_config('optim', optim_dict)

    if original_input_shape != input_shape:
        logger.info(f'Changed input shape from {original_input_shape}'
                    f'to {input_shape} to make a valid network.')

    return graph
コード例 #3
0
def gene_graph(input_shape: Sequence[int],
               n_classes: int,
               net_settings: Optional[Dict[str, Any]]=None,
               predictor_settings: Optional[Dict[str, Any]]=None,
               optim_settings: Optional[Dict[str, Any]]=None,
               gene_dict: Optional[Dict[str, mt.Gene]]=None,
               min_size: Union[int, Sequence[int]]=0) -> \
        mt.GeneGraph:
    """Create a "default" GeneGraph which builds an encoder-decoder
    segmentation network as a GeneNet.

    Args:
        input_shape (Sequence[int]): Desired network input window shape.
            Actual shape may be smaller if the specified shape is
            incompatible with the encoder-decoder network architecture.
        n_classes (int): Number of segmentation classes.
        net_settings (Optional[Dict[str, Any]]): A dictionary of
            EncoderDecoderGene hyperparameter instance_settings. Keys are names of
            hyperparameters, values are new values to which those
            hyperparameters are set. See
            genenet.hyperparameter_configs.encoder_decoder for
            hyperparameter information.
        predictor_settings (Optional[Dict[str, Any]]): A dictionary of
            PredictorGene hyperparameter instance_settings. Keys are names of
            hyperparameters, values are new values to which those
            hyperparameters are set. See
            genenet.hyperparameter_configs.predictor for
            hyperparameter information.
        optim_settings (Optional[Dict[str, Any]]): A dictionary of
            optimizer hyperparameter instance_settings. Keys are names of
            hyperparameters, values are new values to which those
            hyperparameters are set. See
            genenet.hyperparameter_configs.adam_optimizer for
            hyperparameter information.
        gene_dict (Optional[Dict[str, mt.Gene]]): If supplied, use
            already-created root Genes instead of new ones. Valid keys are
            'encoderdecoder' for an EncoderDecoderGene, or 'predictor' for a
            PredictorGene
        min_size (Union[int, Sequence[int]]): If a nonnegative number,
            the minimum acceptable size for the graph's output window along
            each axis. If a sequence of numbers, the sequence should have the
            same length as the graph's output window's shape, and sizes are
            compared elementwise.

    Returns:
        (mt.GeneGraph): The newly-created "default" GeneGraph.

    """
    if gene_dict is None:
        gene_dict = {}

    # Encoder-decoder network setup

    if 'encoderdecoder' in gene_dict:
        # Use a supplied EncoderDecoderGene if present
        net_gene: EncoderDecoderGene = gene_dict['encoderdecoder']
    else:
        # Set up a new EncoderDecoderGene with hyperparameters set by the
        # input `net_settings`

        # Encoder-decoder hyperparam config
        ed_hyperparams = configs.encoder_decoder()
        # Create a Gene to build an encoder-decoder network
        net_gene = EncoderDecoderGene('net',
                                      ed_hyperparams)
        # Apply hyperparameter setting changes if supplied
        if net_settings is not None:
            net_gene.set(**net_settings)

    # Set up Encoder-decoder gene tree edges
    net_gene.setup_edges()

    # Class predictor module setup

    if 'predictor' in gene_dict:
        # Use a supplied PredictorGene if present
        predictor_gene: PredictorGene = gene_dict['predictor']

    else:
        # Class predictor hyperparameters
        pred_hyperparams = configs.predictor()

        predictor_gene = PredictorGene(n_classes,
                                       'predictor',
                                       pred_hyperparams)
        # Set the PredictorGene to use the same n_kernels value as the last
        # descendant of the EncoderDecoderGene
        n_kernels = net_gene.last_descendant().hyperparam('n_kernels')
        predictor_gene.set(n_kernels=n_kernels)
        # By default, take the regularization instance_settings from net_settings
        if predictor_settings is None and net_settings is not None:
            predictor_settings = {}
            for key in net_settings:
                if 'log_' in key:
                    predictor_settings[key] = net_settings[key]
        # Apply hyperparameter setting changes if supplied
        if predictor_settings is not None:
            predictor_gene.set(**predictor_settings)

    # Set up class predictor gene edges
    predictor_gene.setup_edges(ForwardEdge(net_gene.last_descendant()))

    # ADAM optimizer hyperparameters
    adam_hyperparams = configs.adam_optimizer()
    # Convert to a dict to pass to a GeneNet.model_fn()
    optim_dict = adam_hyperparams.values()
    # Directly modify that dict because it's easier
    if optim_settings is not None:
        for key in optim_settings:
            optim_dict[key] = optim_settings[key]

    # Create the GeneGraph
    genes = OrderedDict()
    genes['encoderdecoder'] = net_gene
    genes['predictor'] = predictor_gene
    graph = mt.GeneGraph(input_shape, genes)
    graph = valid_gene_graph(genes, input_shape, min_size)

    # Add the optimizer hyperparameter dict to the GeneGraph
    graph.add_hyperparameter_config('optim', optim_dict)

    return graph
コード例 #4
0
def hybrid_gene_graph(
        input_shape: Sequence[int],
        n_classes: int,
        comps_list: Tuple[int],
        net_3d_combine_mode: str = 'add',
        net_2d_settings: Optional[Dict[str, Any]] = None,
        predictor_2d_settings: Optional[Dict[str, Any]] = None,
        net_3d_settings: Optional[Dict[str, Any]] = None,
        predictor_3d_settings: Optional[Dict[str, Any]] = None,
        optim_settings: Optional[Dict[str, Any]] = None,
        gene_dict: Optional[Dict[str, mt.Gene]] = None) -> \
        mt.GeneGraph:
    """Create a "default" GeneGraph which builds an encoder-decoder
    segmentation network as a GeneNet.

    Args:
        input_shape (Sequence[int]): Desired network input window shape.
            Actual shape may be smaller if the specified shape is
            incompatible with the encoder-decoder network architecture.
        n_classes (int): Number of segmentation classes.
        comps_list (Tuple[int]): List of spatial pyramid computation sizes
        net_3d_combine_mode (str): Either 'add' or 'merge'.
        net_2d_settings (Optional[Dict[str, Any]]): A dictionary of
            EncoderDecoderGene hyperparameter settings. Keys are names of
            hyperparameters, values are new values to which those
            hyperparameters are set. See
            genenet.hyperparameter_configs.encoder_decoder for
            hyperparameter information.
        predictor_2d_settings (Optional[Dict[str, Any]]): A dictionary of
            PredictorGene hyperparameter settings. Keys are names of
            hyperparameters, values are new values to which those
            hyperparameters are set. See
            genenet.hyperparameter_configs.predictor for
            hyperparameter information.
        net_3d_settings (Optional[Dict[str, Any]]): A dictionary of
            SpatialPyramidGene hyperparameter settings. Keys are names of
            hyperparameters, values are new values to which those
            hyperparameters are set. See
            genenet.hyperparameter_configs.encoder_decoder for
            hyperparameter information.
        predictor_3d_settings (Optional[Dict[str, Any]]): A dictionary of
            PredictorGene hyperparameter settings. Keys are names of
            hyperparameters, values are new values to which those
            hyperparameters are set. See
            genenet.hyperparameter_configs.predictor for
            hyperparameter information.
        optim_settings (Optional[Dict[str, Any]]): A dictionary of
            optimizer hyperparameter settings. Keys are names of
            hyperparameters, values are new values to which those
            hyperparameters are set. See
            genenet.hyperparameter_configs.adam_optimizer for
            hyperparameter information.
        gene_dict (Optional[Dict[str, mt.Gene]]): If supplied, use
            already-created root Genes instead of new ones. Valid keys are
            'encoderdecoder' for an EncoderDecoderGene, 'spatialpyramid' for a
            SpatialPyramidGene, or 'predictor_3d' for a PredictorGene.

    Returns:
        (mt.GeneGraph): The newly-created "default" GeneGraph.

    """
    if gene_dict is None:
        gene_dict = {}
    ''' 2D Encoder-decoder module setup '''

    # Encoder-decoder hyperparam config
    ed_hyperparams = gn.hyperparameter_configs.encoder_decoder()
    if 'net_2d' in gene_dict:
        # Use a supplied EncoderDecoderGene if present
        net_2d_gene: mt.Gene = gene_dict['net_2d']
    else:
        # Set up a new EncoderDecoderGene with hyperparameters set by the
        # input `net_2d_settings`
        # Create a Gene to build an encoder-decoder module
        net_2d_gene = Mixed3DEncoderDecoderGene(
            'net', conv3d_k_width=3, hyperparameter_config=ed_hyperparams)
        # Apply hyperparameter setting changes if supplied
        if net_2d_settings is not None:
            net_2d_gene.set(**net_2d_settings)

    # Set up Encoder-decoder gene tree edges
    # noinspection PyArgumentList
    net_2d_gene.setup_edges()
    ''' 2D predictor setup'''

    if 'predictor_2d' in gene_dict:
        # Use a supplied PredictorGene if present
        predictor_2d_gene: mt.Gene = gene_dict['predictor_2d']

    else:
        # Class predictor_2d hyperparameters
        pred_hyperparams = gn.hyperparameter_configs.predictor()

        predictor_2d_gene = gn.genes.PredictorGene(n_classes, 'predictor_2d',
                                                   pred_hyperparams)
        # Set the PredictorGene to use the same n_kernels value as the last
        # descendant of the EncoderDecoderGene
        n_kernels = net_2d_gene.last_descendant().hyperparam('n_kernels')
        predictor_2d_gene.set(n_kernels=n_kernels)
        # By default, take the regularization settings from net_2d_settings
        if not predictor_2d_settings and net_2d_settings:
            predictor_2d_settings = {}
            for key in net_2d_settings:
                if 'log_' in key:
                    predictor_2d_settings[key] = net_2d_settings[key]
        # Apply hyperparameter setting changes if supplied
        if predictor_2d_settings:
            predictor_2d_gene.set(**predictor_2d_settings)

    # Set up class predictor_3d gene edges
    predictor_2d_gene.setup_edges(
        gn.genes.ForwardEdge(net_2d_gene.last_descendant()))
    ''' 3D Spatial pyramid module setup '''

    if 'net_3d' in gene_dict:
        net_3d_gene: mt.Gene = gene_dict['net_3d']

    else:
        # Spatial pyramid hyperparam config
        sp_hyperparams = gn.hyperparameter_configs.encoder_decoder()
        # Create a Gene to build a spatial pyramid module
        net_3d_gene = SpatialPyramidGene(combine_mode=net_3d_combine_mode,
                                         name='spatialpyramid',
                                         hyperparameter_config=sp_hyperparams)
        # By default, take the regularization settings from net_2d_settings
        if net_3d_settings is None and net_2d_settings is not None:
            net_3d_settings = {}
            for key in net_2d_settings:
                if 'log_' in key:
                    net_3d_settings[key] = net_2d_settings[key]
        # Additional default settings
        net_3d_settings = {
            'padding_type': 'same',
            'spatial_mode': 1,
            'n_blocks': len(comps_list)
        }
        # Apply hyperparameter setting changes
        net_3d_gene.set(**net_3d_settings)
        # Set number of convs in each child block
        for child, n_comps in zip(net_3d_gene.children, comps_list):
            child.set(n_comps=n_comps)

    net_3d_gene.setup_edges([
        gn.genes.ForwardEdge(predictor_2d_gene.last_descendant()),
        gn.genes.MergeEdge(net_2d_gene.last_descendant())
    ])

    # Class predictor_3d module setup

    if 'predictor_3d' in gene_dict:
        # Use a supplied PredictorGene if present
        predictor_3d_gene = gene_dict['predictor_3d']

    else:
        # Class predictor_3d hyperparameters
        pred_hyperparams = gn.hyperparameter_configs.predictor()

        predictor_3d_gene = gn.genes.PredictorGene(n_classes, 'predictor_3d',
                                                   pred_hyperparams)
        # Set the PredictorGene to use the same n_kernels value as the last
        # descendant of the EncoderDecoderGene
        n_kernels = net_3d_gene.last_descendant().hyperparam('n_kernels')
        predictor_3d_gene.set(n_kernels=n_kernels)
        # By default, take the regularization settings from net_2d_settings
        if not predictor_3d_settings and net_2d_settings:
            predictor_3d_settings = {}
            for key in net_2d_settings:
                if 'log_' in key:
                    predictor_3d_settings[key] = net_2d_settings[key]
        # Apply hyperparameter setting changes if supplied
        if predictor_3d_settings:
            predictor_3d_gene.set(**predictor_3d_settings)

    # Set up class predictor_3d gene edges
    predictor_3d_gene.setup_edges(
        gn.genes.ForwardEdge(net_3d_gene.last_descendant()))

    # ADAM optimizer hyperparameters
    adam_hyperparams = gn.hyperparameter_configs.adam_optimizer()
    # Convert to a dict to pass to a GeneNet.model_fn()
    optim_dict = adam_hyperparams.values()
    # Directly modify that dict because it's easier
    if optim_settings is not None:
        for key in optim_settings:
            optim_dict[key] = optim_settings[key]

    # Create the GeneGraph
    genes = OrderedDict()
    genes['net_2d'] = net_2d_gene
    genes['predictor_2d'] = predictor_2d_gene
    genes['net_3d'] = net_3d_gene
    genes['predictor_3d'] = predictor_3d_gene
    graph = mt.GeneGraph(input_shape, genes)

    # Add the optimizer hyperparameter dict to the GeneGraph
    graph.add_hyperparameter_config('optim', optim_dict)

    return graph
コード例 #5
0
def dilated_gene_graph(input_shape: Sequence[int],
                       n_classes: int,
                       net_settings: Optional[Dict[str, Any]]=None,
                       predictor_settings: Optional[Dict[str, Any]]=None,
                       optim_settings: Optional[Dict[str, Any]]=None,
                       gene_dict: Optional[Dict[str, mt.Gene]]=None) -> \
        mt.GeneGraph:
    """Create a "default" GeneGraph which builds a dilated
    segmentation network as a GeneNet.

    Args:
        input_shape (Sequence[int]): Desired network input window shape.
            Actual shape may be smaller if the specified shape is
            incompatible with the Dilated network architecture.
        n_classes (int): Number of segmentation classes.
        net_settings (Optional[Dict[str, Any]]): A dictionary of
            DilatedGene hyperparameter instance_settings. Keys are names of
            hyperparameters, values are new values to which those
            hyperparameters are set. See
            genenet.hyperparameter_configs.encoder_decoder for
            hyperparameter information.
        predictor_settings (Optional[Dict[str, Any]]): A dictionary of
            PredictorGene hyperparameter instance_settings. Keys are names of
            hyperparameters, values are new values to which those
            hyperparameters are set. See
            genenet.hyperparameter_configs.predictor for
            hyperparameter information.
        optim_settings (Optional[Dict[str, Any]]): A dictionary of
            optimizer hyperparameter instance_settings. Keys are names of
            hyperparameters, values are new values to which those
            hyperparameters are set. See
            genenet.hyperparameter_configs.adam_optimizer for
            hyperparameter information.
        gene_dict (Optional[Dict[str, mt.Gene]]): If supplied, use
            already-created root Genes instead of new ones. Valid keys are
            'dilated' for a DilatedGene, or 'predictor' for a
            PredictorGene,

    Returns:
        (mt.GeneGraph): The newly-created "default" GeneGraph.

    """
    if gene_dict is None:
        gene_dict = {}

    # Dilated network setup

    if 'dilated' in gene_dict:
        # Use a supplied DilatedGene if present
        net_gene: DilatedGene = gene_dict['dilated']
    else:
        # Set up a new DilatedGene with hyperparameters set by the
        # input `net_settings`

        # Dilated hyperparam config
        dilated_hyperparams = configs.encoder_decoder()
        # Create a Gene to build an Dilated network
        net_gene = DilatedGene('net',
                               hyperparameter_config=dilated_hyperparams)
        # Apply hyperparameter setting changes if supplied
        if net_settings is not None:
            net_gene.set(**net_settings)

        for conv_block in net_gene.children:

            # Add an identity layer that adds the final convolution layer
            # and the first convolution layer
            identity = IdentityGene('identity', conv_block)
            conv_block.children.append(identity)
            # Identity layer will get the last conv as an input by
            # default, now also add in a residual connection from the
            # first conv
            residual_edge = ResidualEdge(conv_block.children[0])
            identity.add_input(residual_edge)

    # Set up Dilated gene tree edges
    net_gene.setup_edges()

    # Class predictor module setup

    if 'predictor' in gene_dict:
        # Use a supplied PredictorGene if present
        predictor_gene: PredictorGene = gene_dict['predictor']

    else:
        # Class predictor hyperparameters
        pred_hyperparams = configs.predictor()

        predictor_gene = PredictorGene(n_classes, 'predictor',
                                       pred_hyperparams)
        # Set the PredictorGene to use the same n_kernels value as the last
        # descendant of the DilatedGene
        n_kernels = net_gene.last_descendant().hyperparam('n_kernels')
        predictor_gene.set(n_kernels=n_kernels)
        # By default, take the regularization instance_settings from net_settings
        if predictor_settings is None and net_settings is not None:
            predictor_settings = {}
            for key in net_settings:
                if 'log_' in key:
                    predictor_settings[key] = net_settings[key]
        # Apply hyperparameter setting changes if supplied
        if predictor_settings is not None:
            predictor_gene.set(**predictor_settings)

    # Set up class predictor gene edges
    predictor_gene.setup_edges(ForwardEdge(net_gene.last_descendant()))

    # ADAM optimizer hyperparameters
    adam_hyperparams = configs.adam_optimizer()
    # Convert to a dict to pass to a GeneNet.model_fn()
    optim_dict = adam_hyperparams.values()
    # Directly modify that dict because it's easier
    if optim_settings is not None:
        for key in optim_settings:
            optim_dict[key] = optim_settings[key]

    # Create the GeneGraph
    genes = OrderedDict()
    genes['dilated'] = net_gene
    genes['predictor'] = predictor_gene
    graph = mt.GeneGraph(input_shape, genes)

    # Add the optimizer hyperparameter dict to the GeneGraph
    graph.add_hyperparameter_config('optim', optim_dict)

    return graph