Пример #1
0
def make_spike_dict(spkinds, spkts):
    """
    Given arrays with cell indices and spike times, returns a dictionary with per-cell spike times.
    """
    spk_dict = defaultdict(list)
    for spkind, spkt in zip(np.nditer(spkinds), np.nditer(spkts)):
        spk_dict[int(spkind)].append(float(spkt))
    return spk_dict
Пример #2
0
def spatial_coactive_sets(population,
                          spkdict,
                          time_bins,
                          trajectory,
                          return_tree=False):
    """
    Estimates spatially co-active activity ensembles from the given spike dictionary.
    """

    import sklearn
    from sklearn.neighbors import BallTree

    x, y, d, t = trajectory

    pch_x = interpolate.pchip(t, x)
    pch_y = interpolate.pchip(t, y)

    spatial_bins = np.column_stack(
        [pch_x(time_bins[:-1]), pch_y(time_bins[:-1])])

    acv_dict = {
        gid: np.histogram(np.asarray(lst), bins=time_bins)[0]
        for (gid, lst) in viewitems(spkdict[population]) if len(lst) > 1
    }
    n_features = len(time_bins) - 1
    n_samples = len(acv_dict)

    active_gid = {}
    active_bins = np.zeros((n_samples, n_features), dtype=np.bool)
    for i, (gid, acv) in enumerate(viewitems(acv_dict)):
        active_bins[i, :] = acv > 0
        active_gid[i] = gid

    tree = BallTree(active_bins, metric='jaccard')
    qbins = np.zeros((n_features, n_features), dtype=np.bool)
    for ibin in range(n_features):
        qbins[ibin, ibin] = True

    nnrs, nndists = tree.query_radius(qbins, r=1, return_distance=True)

    fnnrs = []
    fnndists = []
    for i, (nns, nndist) in enumerate(zip(nnrs, nndists)):
        inds = [
            inn for inn, nn in enumerate(nns)
            if np.any(np.logical_and(active_bins[nn, :], active_bins[i, :]))
        ]
        fnns = np.asarray([nns[inn] for inn in inds])
        fdist = np.asarray([nndist[inn] for inn in inds])
        fnnrs.append(fnns)
        fnndists.append(fdist)

    if return_tree:
        return n_samples, spatial_bins, fnnrs, fnndists, (tree, active_gid)
    else:
        return n_samples, spatial_bins, fnnrs, fnndists
Пример #3
0
def histogram_autocorrelation(spkdata, bin_size=1., lag=1, quantity='count'):
    """Compute autocorrelation coefficients of the spike count or firing rate histogram of each population. """

    spkpoplst = spkdata['spkpoplst']
    spkindlst = spkdata['spkindlst']
    spktlst = spkdata['spktlst']
    num_cell_spks = spkdata['num_cell_spks']
    pop_active_cells = spkdata['pop_active_cells']
    tmin = spkdata['tmin']
    tmax = spkdata['tmax']

    bins = np.arange(tmin, tmax, bin_size)

    corr_dict = {}
    for subset, spkinds, spkts in zip(spkpoplst, spkindlst, spktlst):
        i = 0
        spk_dict = defaultdict(list)
        for spkind, spkt in zip(np.nditer(spkinds), np.nditer(spkts)):
            spk_dict[int(spkind)].append(spkt)
        x_lst = []
        for ind, lst in viewitems(spk_dict):
            spkts = np.asarray(lst)
            if quantity == 'rate':
                q = akde(spkts / 1000., time_bins / 1000.)[0]
            else:
                count, bin_edges = np.histogram(spkts, bins=bins)
                q = count
            x_lst.append(q)
            i = i + 1

        x_matrix = np.matrix(x_lst)

        corr_matrix = np.apply_along_axis(lambda y: autocorr(y, lag), 1,
                                          x_matrix)

        corr_dict[subset] = corr_matrix

    return corr_dict
Пример #4
0
def place_fields(population,
                 bin_size,
                 rate_dict,
                 trajectory,
                 arena_id=None,
                 trajectory_id=None,
                 nstdev=1.5,
                 binsteps=5,
                 baseline_fraction=None,
                 output_file_path=None,
                 progress=False,
                 **kwargs):
    """
    Estimates place fields from the given instantaneous spike rate dictionary.
    :param population: str
    :param bin_size: float
    :param rate_dict: dict
    :param trajectory: tuple of array
    :param arena_id: str
    :param trajectory_id: str
    :param nstdev: float
    :param binsteps: float
    :param baseline_fraction: float
    :param min_pf_width: float
    :param output_file_path: str (path to file)
    :param verbose: bool
    :return: dict
    """

    if progress:
        from tqdm import tqdm

    analysis_options = copy.copy(default_pf_analysis_options)
    analysis_options.update(kwargs)

    min_pf_width = analysis_options['Minimum Width']
    min_pf_rate = analysis_options['Minimum Rate']

    (trj_x, trj_y, trj_d, trj_t) = trajectory

    pf_dict = {}
    pf_total_count = 0
    pf_cell_count = 0
    cell_count = 0
    pf_min = sys.maxsize
    pf_max = 0
    ncells = len(rate_dict)

    if progress:
        it = tqdm(viewitems(rate_dict))
    else:
        it = viewitems(rate_dict)

    for ind, valdict in it:
        t = valdict['time']
        rate = valdict['rate']
        m = np.mean(rate)
        rate1 = np.subtract(rate, m)
        if baseline_fraction is None:
            s = np.std(rate1)
        else:
            k = rate1.shape[0] / baseline_fraction
            s = np.std(rate1[np.argpartition(rate1, k)[:k]])
        tmin = t[0]
        tmax = t[-1]
        bins = np.arange(tmin, tmax, bin_size)
        bin_rates = []
        bin_norm_rates = []
        pf_ibins = []
        for ibin in range(1, len(bins)):
            binx = np.linspace(bins[ibin - 1], bins[ibin], binsteps)
            interp_rate1 = np.interp(binx, t,
                                     np.asarray(rate1, dtype=np.float64))
            interp_rate = np.interp(binx, t, np.asarray(rate,
                                                        dtype=np.float64))
            r_n = np.mean(interp_rate1)
            r = np.mean(interp_rate)
            bin_rates.append(r)
            bin_norm_rates.append(r_n)
            if r_n > nstdev * s:
                pf_ibins.append(ibin - 1)

        bin_rates = np.asarray(bin_rates)
        bin_norm_rates = np.asarray(bin_norm_rates)

        if len(pf_ibins) > 0:
            pf_consecutive_ibins = []
            pf_consecutive_bins = []
            pf_widths = []
            pf_rates = []
            for pf_ibin_array in consecutive(pf_ibins):
                pf_ibin_range = np.asarray(
                    [np.min(pf_ibin_array),
                     np.max(pf_ibin_array)])
                pf_bin_range = np.asarray(
                    [bins[pf_ibin_range[0]], bins[pf_ibin_range[1]]])
                pf_bin_rates = [bin_rates[ibin] for ibin in pf_ibin_array]
                pf_width = np.diff(np.interp(pf_bin_range, trj_t, trj_d))[0]
                pf_consecutive_ibins.append(pf_ibin_range)
                pf_consecutive_bins.append(pf_bin_range)
                pf_widths.append(pf_width)
                pf_rates.append(np.mean(pf_bin_rates))

            if min_pf_rate is None:
                pf_filtered_ibins = [
                    pf_consecutive_ibins[i]
                    for i, pf_width in enumerate(pf_widths)
                    if pf_width >= min_pf_width
                ]
            else:
                pf_filtered_ibins = [
                    pf_consecutive_ibins[i]
                    for i, (pf_width,
                            pf_rate) in enumerate(zip(pf_widths, pf_rates))
                    if (pf_width >= min_pf_width) and (pf_rate >= min_pf_rate)
                ]

            pf_count = len(pf_filtered_ibins)
            pf_ibins = [
                list(range(pf_ibin[0], pf_ibin[1] + 1))
                for pf_ibin in pf_filtered_ibins
            ]
            pf_mean_width = []
            pf_mean_rate = []
            pf_peak_rate = []
            pf_mean_norm_rate = []
            pf_x_locs = []
            pf_y_locs = []
            for pf_ibin_iter in pf_ibins:
                pf_ibin_array = list(pf_ibin_iter)
                pf_ibin_range = np.asarray(
                    [np.min(pf_ibin_array),
                     np.max(pf_ibin_array)])
                pf_bin_range = np.asarray(
                    [bins[pf_ibin_range[0]], bins[pf_ibin_range[1]]])
                pf_mean_width.append(
                    np.mean(
                        np.asarray([
                            pf_width for pf_width in pf_widths
                            if pf_width >= min_pf_width
                        ])))
                pf_mean_rate.append(
                    np.mean(np.asarray(bin_rates[pf_ibin_array])))
                pf_peak_rate.append(
                    np.max(np.asarray(bin_rates[pf_ibin_array])))
                pf_mean_norm_rate.append(
                    np.mean(np.asarray(bin_norm_rates[pf_ibin_array])))
                pf_x_range = np.interp(pf_bin_range, trj_t, trj_x)
                pf_y_range = np.interp(pf_bin_range, trj_t, trj_y)
                pf_x_locs.append(np.mean(pf_x_range))
                pf_y_locs.append(np.mean(pf_y_range))

            pf_min = min(pf_count, pf_min)
            pf_max = max(pf_count, pf_max)
            pf_cell_count += 1
            pf_total_count += pf_count
        else:
            pf_count = 0
            pf_mean_width = []
            pf_mean_rate = []
            pf_peak_rate = []
            pf_mean_norm_rate = []
            pf_x_locs = []
            pf_y_locs = []

        cell_count += 1
        pf_dict[ind] = {
            'pf_count': np.asarray([pf_count], dtype=np.uint32),
            'pf_mean_width': np.asarray(pf_mean_width, dtype=np.float32),
            'pf_mean_rate': np.asarray(pf_mean_rate, dtype=np.float32),
            'pf_peak_rate': np.asarray(pf_peak_rate, dtype=np.float32),
            'pf_mean_norm_rate': np.asarray(pf_mean_norm_rate,
                                            dtype=np.float32),
            'pf_x_locs': np.asarray(pf_x_locs),
            'pf_y_locs': np.asarray(pf_y_locs)
        }

    logger.info('%s place fields: %i cells min %i max %i mean %f\n' %
                (population, cell_count, pf_min, pf_max,
                 float(pf_total_count) / float(cell_count)))
    if output_file_path is not None:
        if arena_id is None or trajectory_id is None:
            raise RuntimeError(
                'spikedata.place_fields: arena_id and trajectory_id required to write %s namespace'
                % 'Place Fields')
        namespace = 'Place Fields %s %s' % (arena_id, trajectory_id)
        write_cell_attributes(output_file_path,
                              population,
                              pf_dict,
                              namespace=namespace)

    return pf_dict
Пример #5
0
    def parse_connection_config(self):
        """

        :return:
        """
        connection_config = self.model_config['Connection Generator']

        self.connection_velocity = connection_config['Connection Velocity']

        syn_mech_names = connection_config['Synapse Mechanisms']
        syn_param_rules = connection_config['Synapse Parameter Rules']

        self.synapse_attributes = SynapseAttributes(self, syn_mech_names,
                                                    syn_param_rules)

        extent_config = connection_config['Axon Extent']
        self.connection_extents = {}

        for population in extent_config:

            pop_connection_extents = {}
            for layer_name in extent_config[population]:

                if layer_name == 'default':
                    pop_connection_extents[layer_name] = \
                        {'width': extent_config[population][layer_name]['width'], \
                         'offset': extent_config[population][layer_name]['offset']}
                else:
                    layer_index = self.layers[layer_name]
                    pop_connection_extents[layer_index] = \
                        {'width': extent_config[population][layer_name]['width'], \
                         'offset': extent_config[population][layer_name]['offset']}

            self.connection_extents[population] = pop_connection_extents

        synapse_config = connection_config['Synapses']
        connection_dict = {}

        for (key_postsyn, val_syntypes) in viewitems(synapse_config):
            connection_dict[key_postsyn] = {}

            for (key_presyn, syn_dict) in viewitems(val_syntypes):
                val_type = syn_dict['type']
                val_synsections = syn_dict['sections']
                val_synlayers = syn_dict['layers']
                val_proportions = syn_dict['proportions']
                if 'contacts' in syn_dict:
                    val_contacts = syn_dict['contacts']
                else:
                    val_contacts = 1
                mechparams_dict = None
                swctype_mechparams_dict = None
                if 'mechanisms' in syn_dict:
                    mechparams_dict = syn_dict['mechanisms']
                else:
                    swctype_mechparams_dict = syn_dict['swctype mechanisms']

                res_type = self.Synapse_Types[val_type]
                res_synsections = []
                res_synlayers = []
                res_mechparams = {}

                for name in val_synsections:
                    res_synsections.append(self.SWC_Types[name])
                for name in val_synlayers:
                    res_synlayers.append(self.layers[name])
                if swctype_mechparams_dict is not None:
                    for swc_type in swctype_mechparams_dict:
                        swc_type_index = self.SWC_Types[swc_type]
                        res_mechparams[
                            swc_type_index] = self.parse_syn_mechparams(
                                swctype_mechparams_dict[swc_type])
                else:
                    res_mechparams['default'] = self.parse_syn_mechparams(
                        mechparams_dict)

                connection_dict[key_postsyn][key_presyn] = \
                    SynapseConfig(res_type, res_synsections, res_synlayers, val_proportions, val_contacts, \
                                  res_mechparams)

            config_dict = defaultdict(lambda: 0.0)
            for (key_presyn,
                 conn_config) in viewitems(connection_dict[key_postsyn]):
                for (s, l, p) in zip(conn_config.sections, conn_config.layers,
                                     conn_config.proportions):
                    config_dict[(conn_config.type, s, l)] += p

            for (k, v) in viewitems(config_dict):
                try:
                    assert (np.isclose(v, 1.0))
                except Exception as e:
                    self.logger.error(
                        'Connection configuration: probabilities for %s do not sum to 1: %s = %f'
                        % (key_postsyn, str(k), v))
                    raise e

        self.connection_config = connection_dict
def main(connections_path, destination, sources, io_size, verbose):
    """

    :param connections_path: str
    :param destination: string
    :param sources: string list
    :param verbose:  bool
    """

    utils.config_logging(verbose)
    logger = utils.get_script_logger(os.path.basename(__file__))

    comm = MPI.COMM_WORLD
    rank = comm.rank

    if io_size == -1:
        io_size = comm.size
    if rank == 0:
        logger.info('%i ranks have been allocated' % comm.size)

    pop_ranges, pop_size = read_population_ranges(connections_path, comm=comm)

    count = 0
    gid_count = 0
    start_time = time.time()

    connection_gen_list = [NeuroH5ProjectionGen(connections_path, source, destination, \
                                                    namespaces=['Connections'], \
                                                    comm=comm) for source in sources]

    distance_stats_dict = {source: RunningStats() for source in sources}
    for attr_gen_package in zip(*connection_gen_list):
        local_time = time.time()
        conn_attr_dict = None
        destination_gid = attr_gen_package[0][0]
        if not all([
                attr_gen_items[0] == destination_gid
                for attr_gen_items in attr_gen_package
        ]):
            raise Exception(
                'Rank: %i; destination: %s; destination_gid %i not matched across multiple attribute generators: %s'
                % (rank, destination, destination_gid,
                   str([
                       attr_gen_items[0] for attr_gen_items in attr_gen_package
                   ])))
        if destination_gid is not None:
            for (this_destination_gid,
                 (source_gid_array,
                  conn_attr_dict)), source in zip(attr_gen_package, sources):
                for j in range(len(source_gid_array)):
                    this_source_gid = source_gid_array[j]
                    this_distance = conn_attr_dict['Connections']['distance'][
                        j]
                    distance_stats_dict[source].update(this_distance)
            count += 1
        else:
            logger.info('Rank: %i received destination_gid as None' % rank)
        gid_count += 1

    for source in sorted(distance_stats_dict):
        distance_stats = distance_stats_dict[source]
        all_stats = comm.reduce(distance_stats,
                                root=0,
                                op=mpi_op_combine_rstats)
        if rank == 0:
            logger.info('Projection %s -> %s: mean distance: n=%d min=%.2f max=%.f mean=%.2f variance=%.3f' % \
                        (source, destination, all_stats.n, all_stats.min, all_stats.max, \
                        all_stats.mean(), all_stats.variance()))

    global_count = comm.gather(count, root=0)
    if rank == 0:
        logger.info('destination: %s; %i ranks obtained distances for %i cells in %.2f s' % \
                    (destination, comm.size, np.sum(global_count), time.time() - start_time))
    MPI.Finalize()
Пример #7
0
def generate_synaptic_connections(rank,
                                  gid,
                                  ranstream_syn,
                                  ranstream_con,
                                  cluster_seed,
                                  destination_gid,
                                  synapse_dict,
                                  population_dict,
                                  projection_synapse_dict,
                                  projection_prob_dict,
                                  connection_dict,
                                  random_choice=random_choice_w_replacement):
    """
    Given a set of synapses for a particular gid, projection
    configuration, projection and connection probability dictionaries,
    generates a set of possible connections for each synapse. The
    procedure first assigns each synapse to a projection, using the
    given proportions of each synapse type, and then chooses source
    gids for each synapse using the given projection probability
    dictionary.

    :param ranstream_syn: random stream for the synapse partitioning step
    :param ranstream_con: random stream for the choosing source gids step
    :param destination_gid: destination gid
    :param synapse_dict: synapse configurations, a dictionary with fields: 1) syn_ids (synapse ids) 2) syn_types (excitatory, inhibitory, etc).,
                        3) swc_types (SWC types(s) of synapse location in the neuronal morphological structure 3) syn_layers (synapse layer placement)
    :param population_dict: mapping of population names to population indices
    :param projection_synapse_dict: mapping of projection names to a tuple of the form: <syn_layer, swc_type, syn_type, syn_proportion>
    :param projection_prob_dict: mapping of presynaptic population names to sets of source probabilities and source gids
    :param connection_dict: output connection dictionary
    :param random_choice: random choice procedure (default uses np.ranstream.multinomial)

    """
    num_projections = len(projection_synapse_dict)
    prj_pop_index = {
        population: i
        for (i, population) in enumerate(projection_synapse_dict)
    }
    synapse_prj_counts = np.zeros((num_projections, ))
    synapse_prj_partition = defaultdict(lambda: defaultdict(list))
    maxit = 10
    it = 0
    ## assign each synapse to a projection
    while (np.count_nonzero(synapse_prj_counts) < num_projections) and (it <
                                                                        maxit):
        log_flag = it > 1
        if log_flag:
            logger.info("generate_synaptic_connections: gid %i: iteration %i" %
                        (gid, it))
        synapse_prj_counts.fill(0)
        synapse_prj_partition.clear()
        for (syn_id, syn_type, swc_type, syn_layer) in zip(
                synapse_dict['syn_ids'], synapse_dict['syn_types'],
                synapse_dict['swc_types'], synapse_dict['syn_layers']):
            projection = choose_synapse_projection(ranstream_syn, syn_layer, swc_type, syn_type, \
                                                   population_dict, projection_synapse_dict, log=log_flag)
            if log_flag:
                logger.info('generate_synaptic_connections: gid %i: ' \
                            'syn_id = %i syn_type = %i swc_type = %i syn_layer = %i projection = %s' % \
                            (gid, syn_id, syn_type, swc_type, syn_layer, projection))
            assert (projection is not None)
            synapse_prj_counts[prj_pop_index[projection]] += 1
            synapse_prj_partition[projection][syn_layer].append(syn_id)
        it += 1

    empty_projections = []

    for projection in projection_synapse_dict:
        logger.debug('Rank %i: gid %i: projection %s has %i synapses' %
                     (rank, destination_gid, projection,
                      len(synapse_prj_partition[projection])))
        if not (len(synapse_prj_partition[projection]) > 0):
            empty_projections.append(projection)

    if len(empty_projections) > 0:
        logger.warning('Rank %i: gid %i: projections %s have an empty synapse list; ' \
                       'swc types are %s layers are %s' % \
                       (rank, destination_gid, str(empty_projections), str(set(synapse_dict['swc_types'].flat)), \
                        str(set(synapse_dict['syn_layers'].flat))))
    assert (len(empty_projections) == 0)

    ## Choose source connections based on distance-weighted probability
    count = 0
    for projection, prj_layer_dict in viewitems(synapse_prj_partition):
        (syn_config_type, syn_config_layers, syn_config_sections, syn_config_proportions, syn_config_contacts) = \
            projection_synapse_dict[projection]
        gid_dict = connection_dict[projection]
        prj_source_vertices = []
        prj_syn_ids = []
        prj_distances = []
        for prj_layer, syn_ids in viewitems(prj_layer_dict):
            source_probs, source_gids, distances_u, distances_v = \
                projection_prob_dict[projection][prj_layer]
            distance_dict = {source_gid: distance_u + distance_v \
                             for (source_gid, distance_u, distance_v) in \
                             zip(source_gids, distances_u, distances_v)}
            if len(source_gids) > 0:
                n_syn_groups = int(
                    math.ceil(
                        float(len(syn_ids)) / float(syn_config_contacts)))
                source_gid_counts = random_choice(ranstream_con, n_syn_groups,
                                                  source_probs)
                total_count = 0
                if syn_config_contacts > 1:
                    ncontacts = int(math.ceil(syn_config_contacts))
                    for i in range(0, len(source_gid_counts)):
                        if source_gid_counts[i] > 0:
                            source_gid_counts[i] *= ncontacts
                if len(source_gid_counts) == 0:
                    logger.warning('Rank %i: source vertices list is empty for gid: %i projection: %s layer: %s ' \
                                   'source probs: %s distances_u: %s distances_v: %s' % \
                                   (rank, destination_gid, projection, str(layer), \
                                    str(source_probs), str(distances_u), str(distances_v)))

                uv_distance_sums = np.add(distances_u,
                                          distances_v,
                                          dtype=np.float32)
                source_vertices = np.asarray(random_clustered_shuffle(len(source_gids), \
                                                                      source_gid_counts, \
                                                                      center_ids=source_gids, \
                                                                      cluster_std=2.0, \
                                                                      random_seed=cluster_seed), \
                                             dtype=np.uint32)[0:len(syn_ids)]
                assert (len(source_vertices) == len(syn_ids))
                distances = np.asarray([distance_dict[gid] for gid in source_vertices], \
                                       dtype=np.float32).reshape(-1, )
                prj_source_vertices.append(source_vertices)
                prj_syn_ids.append(syn_ids)
                prj_distances.append(distances)
                gid_dict[destination_gid] = (np.asarray([], dtype=np.uint32), {
                    'Synapses': {
                        'syn_id': np.asarray([], dtype=np.uint32)
                    },
                    'Connections': {
                        'distance': np.asarray([], dtype=np.float32)
                    }
                })
                cluster_seed += 1
        if len(prj_source_vertices) > 0:
            prj_source_vertices_array = np.concatenate(prj_source_vertices)
        else:
            prj_source_vertices_array = np.asarray([], dtype=np.uint32)
        del (prj_source_vertices)
        if len(prj_syn_ids) > 0:
            prj_syn_ids_array = np.concatenate(prj_syn_ids)
        else:
            prj_syn_ids_array = np.asarray([], dtype=np.uint32)
        del (prj_syn_ids)
        if len(prj_distances) > 0:
            prj_distances_array = np.concatenate(prj_distances)
        else:
            prj_distances_array = np.asarray([], dtype=np.float32)
        del (prj_distances)
        if len(prj_source_vertices_array) == 0:
            logger.warning(
                'Rank %i: source gid list is empty for gid: %i projection: %s'
                % (rank, destination_gid, projection))
        count += len(prj_source_vertices_array)
        gid_dict[destination_gid] = (prj_source_vertices_array,
                                     {'Synapses': {'syn_id': np.asarray(prj_syn_ids_array, \
                                                                        dtype=np.uint32)},
                                      'Connections': {'distance': prj_distances_array}
                                      })

    return count
Пример #8
0
def icp_transform(comm,
                  env,
                  soma_coords,
                  projection_ls,
                  population_extents,
                  rotate=None,
                  populations=None,
                  icp_iter=1000,
                  opt_iter=100):
    """
    Uses the iterative closest point (ICP) algorithm of the PCL library to transform soma coordinates onto a surface for a particular L value.
    http://pointclouds.org/documentation/tutorials/iterative_closest_point.php#iterative-closest-point

    """

    import dlib, pcl

    rank = comm.rank
    size = comm.size

    if populations is None:
        populations = list(soma_coords.keys())

    srf_resample = 25

    layer_extents = env.geometry['Parametric Surface']['Layer Extents']

    (extent_u, extent_v, extent_l) = get_total_extents(layer_extents)

    min_u, max_u = extent_u
    min_v, max_v = extent_v
    min_l, max_l = extent_l

    ## This parameter is used to expand the range of L and avoid
    ## situations where the endpoints of L end up outside of the range
    ## of the distance interpolant
    safety = 0.01

    extent_u = (min_u - safety, max_u + safety)
    extent_v = (min_v - safety, max_v + safety)

    projection_ptclouds = []
    for obs_l in projection_ls:
        srf = make_surface(extent_u, extent_v, obs_l, rotate=rotate)
        U, V = srf._resample_uv(srf_resample, srf_resample)
        meshpts = srf.ev(U, V)
        projection_ptcloud = pcl.PointCloud()
        projection_ptcloud.from_array(meshpts)
        projection_ptclouds.append(projection_ptcloud)

    soma_coords_dict = {}
    for pop in populations:
        coords_dict = soma_coords[pop]
        if rank == 0:
            logger.info('Computing point transformation for population %s...' %
                        pop)
        count = 0
        xyz_coords = []
        gids = []
        for gid, coords in viewitems(coords_dict):
            if gid % size == rank:
                soma_u, soma_v, soma_l = coords
                xyz_coords.append(
                    DG_volume(soma_u, soma_v, soma_l, rotate=rotate))
                gids.append(gid)
        xyz_pts = np.vstack(xyz_coords)

        cloud_in = pcl.PointCloud()
        cloud_in.from_array(xyz_pts)

        icp = cloud_in.make_IterativeClosestPoint()

        all_est_xyz_coords = []
        all_est_uvl_coords = []
        all_interp_err = []

        for (k, cloud_prj) in enumerate(projection_ls):
            k_est_xyz_coords = np.zeros((len(gids), 3))
            k_est_uvl_coords = np.zeros((len(gids), 3))
            interp_err = np.zeros((len(gids), ))
            converged, transf, estimate, fitness = icp.icp(cloud_in,
                                                           cloud_prj,
                                                           max_iter=icp_iter)
            logger.info('Transformation of population %s has converged: ' %
                        (pop) + str(converged) + ' score: %f' % (fitness))
            for i, gid in zip(list(range(0, estimate.size)), gids):
                est_xyz_coords = estimate[i]
                k_est_xyz_coords[i, :] = est_xyz_coords
                f_uvl_distance = make_uvl_distance(est_xyz_coords,
                                                   rotate=rotate)
                uvl_coords, err = dlib.find_min_global(f_uvl_distance,
                                                       limits[0], limits[1],
                                                       opt_iter)
                k_est_uvl_coords[i, :] = uvl_coords
                interp_err[i, ] = err
                if rank == 0:
                    logger.info(
                        'gid %i: u: %f v: %f l: %f' %
                        (gid, uvl_coords[0], uvl_coords[1], uvl_coords[2]))
            all_est_xyz_coords.append(k_est_xyz_coords)
            all_est_uvl_coords.append(k_est_uvl_coords)
            all_interp_err.append(interp_err)

        coords_dict = {}
        for (i, gid) in enumerate(gids):
            coords_dict[gid] = {
                'X Coordinate':
                np.asarray([col[i, 0] for col in all_est_xyz_coords],
                           dtype='float32'),
                'Y Coordinate':
                np.asarray([col[i, 1] for col in all_est_xyz_coords],
                           dtype='float32'),
                'Z Coordinate':
                np.asarray([col[i, 2] for col in all_est_xyz_coords],
                           dtype='float32'),
                'U Coordinate':
                np.asarray([col[i, 0] for col in all_est_uvl_coords],
                           dtype='float32'),
                'V Coordinate':
                np.asarray([col[i, 1] for col in all_est_uvl_coords],
                           dtype='float32'),
                'L Coordinate':
                np.asarray([col[i, 2] for col in all_est_uvl_coords],
                           dtype='float32'),
                'Interpolation Error':
                np.asarray([err[i] for err in all_interp_err], dtype='float32')
            }

        soma_coords_dict[pop] = coords_dict

    return soma_coords_dict