def test():
    ### Parameters or externals
    info_ret = {'order': 2}
    locs = np.random.random((1000, 2))
    inttypes = [int, np.int32, np.int64]
#    mainmapper1 = generate_random_relations(25, store='sparse')
#    mainmapper2 = generate_random_relations(100, store='sparse')
#    mainmapper3 = generate_random_relations(5000, store='sparse')
    griddisc1 = GridSpatialDisc((5, 5), xlim=(0, 1), ylim=(0, 1))
    griddisc2 = GridSpatialDisc((10, 10), xlim=(0, 1), ylim=(0, 1))
    griddisc3 = GridSpatialDisc((50, 100), xlim=(0, 1), ylim=(0, 1))

    ### Testing utities
    ## util_spatial_relations
    f = lambda x, y: x + y
    sp_elements = np.random.random(10)
    general_spatial_relation(sp_elements[0], sp_elements[0], f)
    general_spatial_relations(sp_elements, f, simmetry=False)
    general_spatial_relations(sp_elements, f, simmetry=True)

    ## format_out_relations
    mainmapper1 = randint_sparse_matrix(0.8, (25, 25))
    format_out_relations(mainmapper1, 'sparse')
    format_out_relations(mainmapper1, 'network')
    format_out_relations(mainmapper1, 'sp_relations')
    lista = format_out_relations(mainmapper1, 'list')
    u_regs = mainmapper1.data

    ## Element metrics
    element_i, element_j = 54, 2
    pars1 = {'periodic': 60}
    pars2 = {}
    unidimensional_periodic(element_i, element_j, pars=pars1)
    unidimensional_periodic(element_i, element_j, pars=pars2)
    unidimensional_periodic(element_j, element_i, pars=pars1)
    unidimensional_periodic(element_j, element_i, pars=pars2)
    measure_difference(element_i, element_j, pars=pars1)
    measure_difference(element_i, element_j, pars=pars2)
    measure_difference(element_j, element_i, pars=pars1)
    measure_difference(element_j, element_i, pars=pars2)

    def ensure_output(neighs, dists, mainmapper):
#        print dists
        assert(all([len(e.shape) == 2 for e in dists]))
        assert(all([len(e) == 0 for e in dists if np.prod(e.shape) == 0]))
        if mainmapper._out == 'indices':
#            print neighs
            correcness = []
            for nei in neighs:
                if len(nei):
                    correcness.append(all([type(e) in inttypes for e in nei]))
                else:
                    correcness.append(nei.dtype in inttypes)
            assert(correcness)

    ###########################################################################
    ### Massive combinatorial testing
    # Possible parameters
    relations, pars_rel, _data =\
        compute_ContiguityRegionDistances(griddisc1, store='sparse')
    pos_relations = [relations, relations.A,
                     format_out_relations(relations, 'network')]
    pos_distanceorweighs, pos_sym = [True, False], [True, False]
    pos_inputstypes, pos_outputtypes = [[None, 'indices', 'elements_id']]*2
    pos_input_type = [None, 'general', 'integer', 'array', 'array1', 'array2',
                      'list', 'list_int', 'list_array']
    pos_inputs = [[0], 0, 0, np.array([0]), np.array([0]), np.array([0]),
                  [0], [0], [np.array([0])]]
    pos_data_in, pos_data = [[]]*2
    possibles = [pos_relations, pos_distanceorweighs, pos_sym, pos_outputtypes,
                 pos_inputstypes, pos_input_type]
    # Combinations
    for p in product(*possibles):
        mainmapper1 = RegionDistances(relations=p[0], distanceorweighs=p[1],
                                      symmetric=p[2], output=p[3], input_=p[4],
                                      input_type=p[5])
        mainmapper1[slice(0, 1)]
        # Define input
        if p[5] is None:
            if p[4] != 'indices':
                neighs, dists = mainmapper1[mainmapper1.data[0]]
                ensure_output(neighs, dists, mainmapper1)
                neighs, dists = mainmapper1[0]
                ensure_output(neighs, dists, mainmapper1)
                neighs, dists = mainmapper1[np.array([-1])]
                ensure_output(neighs, dists, mainmapper1)
            if p[4] != 'elements_id':
                neighs, dists = mainmapper1[0]
                ensure_output(neighs, dists, mainmapper1)
                try:
                    boolean = False
                    mainmapper1[-1]
                    boolean = True
                    raise Exception("It has to halt here.")
                except:
                    if boolean:
                        raise Exception("It has to halt here.")
        else:
            if p[5] == 'list':
                # Get item
                neighs, dists = mainmapper1[[0]]
                ensure_output(neighs, dists, mainmapper1)
                neighs, dists = mainmapper1[[np.array([0])]]
                ensure_output(neighs, dists, mainmapper1)
                try:
                    boolean = False
                    mainmapper1[[None]]
                    boolean = True
                    raise Exception("It has to halt here.")
                except:
                    if boolean:
                        raise Exception("It has to halt here.")
            idxs = pos_inputs[pos_input_type.index(p[5])]
            neighs, dists = mainmapper1[idxs]
            ensure_output(neighs, dists, mainmapper1)
        # Functions
        mainmapper1.set_inout(p[5], p[4], p[3])
        mainmapper1.transform(lambda x: x)
        mainmapper1.data
        mainmapper1.data_input
        mainmapper1.data_output
        mainmapper1.shape
        ## Extreme cases

    ## Individual extreme cases
    ## Instantiation
    pars_rel = {'symmetric': False}
    relations = relations.A
    data_in = list(np.arange(len(relations)))
    wrond_data = np.random.random((100))
    mainmapper3 = RegionDistances(relations=relations, _data=wrond_data,
                                  data_in=data_in, **pars_rel)
    data_in = np.arange(len(relations)).reshape((len(relations), 1))
    mainmapper3 = RegionDistances(relations=relations, _data=wrond_data,
                                  data_in=data_in, **pars_rel)
    try:
        boolean = False
        wrond_data = np.random.random((100, 3, 4))
        mainmapper3 = RegionDistances(relations=relations, _data=wrond_data,
                                      data_in=data_in, **pars_rel)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    try:
        boolean = False
        wrond_data = np.random.random((100, 3, 4))
        sparse_rels = randint_sparse_matrix(0.8, (25, 25))
        mainmapper3 = RegionDistances(relations=relations, _data=wrond_data,
                                      data_in=data_in, **pars_rel)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    try:
        boolean = False
        wrond_data = np.random.random((100, 3, 4))
        mainmapper3 = RegionDistances(relations=relations, _data=wrond_data,
                                      data_in=data_in, **pars_rel)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    relations = np.random.random((20, 20))
    try:
        boolean = False
        wrond_data = np.random.random((100, 3, 4))
        mainmapper3 = RegionDistances(relations=relations, _data=wrond_data,
                                      data_in=data_in, **pars_rel)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")

    ## Other cases
    # Dummymap instantiation
    regs0 = np.unique(np.random.randint(0, 1000, 200))
    regs1 = regs0.reshape((len(regs0), 1))
    regs2 = regs0.reshape((len(regs0), 1, 1))
    pos_regs = [regs0, list(regs0), regs1]
    possibles = [pos_regs, pos_input_type]
    for p in product(*possibles):
        dummymapper = DummyRegDistance(p[0], p[1])
        # Get item
        idxs = pos_inputs[pos_input_type.index(p[1])]
        neighs, dists = dummymapper[idxs]
        ensure_output(neighs, dists, dummymapper)
        neighs, dists = dummymapper[slice(0, 1)]
        ensure_output(neighs, dists, dummymapper)
        ## Functions
        dummymapper.transform(lambda x: x)
        dummymapper.data
        dummymapper.data_input
        dummymapper.data_output
        dummymapper.shape
    # Halting cases
    try:
        boolean = False
        dummymapper = DummyRegDistance(regs2)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    try:
        boolean = False
        dummymapper = DummyRegDistance(None)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    try:
        boolean = False
        dummymapper[None]
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    try:
        boolean = False
        dummymapper[-1]
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")

    ###########################################################################
    ### Auxiliar parsing creation functions test
    ############################################
    # Standarts
    #    * relations object
    #    * (main_relations_info, pars_rel)
    #    * (main_relations_info, pars_rel, _data)
    #    * (main_relations_info, pars_rel, _data, data_in)
    #
    ## Main relations information
    relations = np.random.random((100, 20))
    _data = np.arange(20)
    _data_input = np.arange(100)

    relations_info = (relations, {})
    relations_object = _relations_parsing_creation(relations_info)
    assert(isinstance(relations_object, RegionDistances))

    relations_info = (relations, {}, _data)
    relations_object = _relations_parsing_creation(relations_info)
    assert(isinstance(relations_object, RegionDistances))

    relations_info = (relations, {}, _data, _data_input)
    relations_object = _relations_parsing_creation(relations_info)
    assert(isinstance(relations_object, RegionDistances))

    relations_object = _relations_parsing_creation(relations_object)
    assert(isinstance(relations_object, RegionDistances))

    relations_object = _relations_parsing_creation(relations)
    assert(isinstance(relations_object, RegionDistances))

    ###########################################################################
    ### Computers testing
    #####################
    ## aux_regionmetrics
    # Get regions activated
    elements = griddisc1.get_regions_id()
    get_regions4distances(griddisc1, elements=None, activated=None)
    get_regions4distances(griddisc1, elements, activated=elements)

    # Filter possible neighs
    only_possible = np.unique(np.random.randint(0, 100, 50))
    neighs = [np.unique(np.random.randint(0, 100, 6)) for i in range(4)]
    dists = [np.random.random(len(neighs[i])) for i in range(4)]
    filter_possible_neighs(only_possible, neighs, dists)
    filter_possible_neighs(only_possible, neighs, None)

    # TODO: Sync with other classes as sp_desc_models
    lista = [[0, 1, 2, 3], [0, 2, 3, 5], [1, 1, 1, 1]]
    u_regs = np.arange(25)
    regions_id = np.arange(25)
    elements_i = np.arange(25)
    element_labels = np.arange(25)
    discretizor = GridSpatialDisc((5, 5), xlim=(0, 1), ylim=(0, 1))

    locs = np.random.random((100, 2))
    retriever = KRetriever
    info_ret = np.ones(100)*4
    descriptormodel = DummyDescriptor()

    sp_descriptor = discretizor, locs, retriever, info_ret, descriptormodel

    sparse_from_listaregneighs(lista, u_regs, symmetric=True)
    sparse_from_listaregneighs(lista, u_regs, symmetric=False)
    ret_selfdists = KRetriever(locs, 4, ifdistance=True)
    compute_selfdistances(ret_selfdists, np.arange(100), typeoutput='network',
                          symmetric=True)
    compute_selfdistances(ret_selfdists, np.arange(100), typeoutput='sparse',
                          symmetric=True)
    compute_selfdistances(ret_selfdists, np.arange(100), typeoutput='matrix',
                          symmetric=True)

#    create_sp_descriptor_points_regs(sp_descriptor, regions_id, elements_i)
#    create_sp_descriptor_regionlocs(sp_descriptor, regions_id, elements_i)

    ## Compute Avg distance
    locs = np.random.random((100, 2))
    sp_descriptor = (griddisc1, locs), (KRetriever, {'info_ret': 5}), None
    relations, pars_rel, _data =\
        compute_AvgDistanceRegions(sp_descriptor, store='network')
    regdists = RegionDistances(relations=relations, _data=_data, **pars_rel)
    relations, pars_rel, _data =\
        compute_AvgDistanceRegions(sp_descriptor, store='matrix')
    regdists = RegionDistances(relations=relations, _data=_data, **pars_rel)
    relations, pars_rel, _data =\
        compute_AvgDistanceRegions(sp_descriptor, store='sparse')
    regdists = RegionDistances(relations=relations, _data=_data, **pars_rel)

    # Region spatial relations
    # For future (TODO)

    ### RegionDistances Computers
    griddisc1 = GridSpatialDisc((5, 5), xlim=(0, 1), ylim=(0, 1))
    ## Compute Contiguity
    relations, pars_rel, _data =\
        compute_ContiguityRegionDistances(griddisc1, store='matrix')
    relations, pars_rel, _data =\
        compute_ContiguityRegionDistances(griddisc1, store='network')
    relations, pars_rel, _data =\
        compute_ContiguityRegionDistances(griddisc1, store='sparse')
    mainmapper1 = RegionDistances(relations=relations, _data=None,
                                  **pars_rel)
    neighs, dists = mainmapper1.retrieve_neighs([0])
    assert(len(neighs) == len(dists))
    assert(len(neighs) == 1)
    neighs, dists = mainmapper1.retrieve_neighs([0, 1])
    assert(len(neighs) == len(dists))
    assert(len(neighs) == 2)

    ## Compute CenterLocs
    sp_descriptor = griddisc1, None, None
    ## TODO: pdist problem
    relations, pars_rel, _data =\
        compute_CenterLocsRegionDistances(sp_descriptor, store='network',
                                          elements=None, symmetric=True,
                                          activated=None)
    relations, pars_rel, _data =\
        compute_CenterLocsRegionDistances(sp_descriptor, store='matrix',
                                          elements=None, symmetric=True,
                                          activated=None)
    relations, pars_rel, _data =\
        compute_CenterLocsRegionDistances(sp_descriptor, store='sparse',
                                          elements=None, symmetric=True,
                                          activated=None)
    sp_descriptor = griddisc1, (KRetriever, {'info_ret': 2}), None

### TODO: Descriptormodel
    relations, pars_rel, _data =\
        compute_CenterLocsRegionDistances(sp_descriptor, store='sparse',
                                          elements=None, symmetric=True,
                                          activated=None)

    ## Retriever tuple
    ## Retriever object

    ## Spdesc

    ## Compute PointsNeighsIntersection

    ## Aux_regionmetrics
    #sparse_from_listaregneighs(lista, u_regs, symmetric)

    ###########################################################################
    ### Relative positioner testing
    ###############################
    n_el, n_dim = 5, 2
    elements_i = np.random.random((n_el, n_dim))
    elements_neighs = []
    for i in range(n_el):
        aux_neigh = np.random.random((np.random.randint(1, 4), n_dim))
        elements_neighs.append(aux_neigh)
    rel_pos = BaseRelativePositioner(metric_distances)
    rel_pos.compute(elements_i, elements_neighs)

    rel_pos = BaseRelativePositioner(diff_vectors)
    rel_pos.compute(elements_i, elements_neighs)
示例#2
0
def test():
    ###########################################################################
    ############################# Artificial data #############################
    ###########################################################################
    ## Random relations
    n, density, shape = 100, 0.1, (10, 10)
    randint_sparse_matrix(density, shape, maxvalue=10)
    generate_randint_relations(density, shape, p0=0., maxvalue=1)
    generate_random_relations_cutoffs(n, 0.5, 0.9, True, 'network')
    generate_random_relations_cutoffs(n, 0.5, 0.9, False, 'network')
    generate_random_relations_cutoffs(n, 0.5, 0.9, True, 'sparse')

    n_elements, n_collections = 100, 10
    random_membership(n_elements, n_collections, multiple=True)
    random_membership(n_elements, n_collections, multiple=False)

    ## Random points
    n_points, n_dim, funct = 100, 2, np.cos
    random_transformed_space_points(n_points, n_dim, funct)
    random_transformed_space_points(n_points, n_dim, None)
    random_space_points(n_points, n_dim)

    ## Artificial grid data
    create_random_image(shape, n_modes=1)
    create_random_image(shape, n_modes=3)

    ## Artificial regions
    n_poly = 10
    random_shapely_polygon(bounding=(None, None), n_edges=0)
    random_shapely_polygon(bounding=((0., 1.), None), n_edges=0)
    random_shapely_polygon(bounding=(None, None), n_edges=4)
    random_shapely_polygons(n_poly, bounding=(None, None), n_edges=0)

    ## Artificial random features
    n, n_feats = np.random.randint(10, 1000), np.random.randint(2, 20)
    n_feats2 = [np.random.randint(2, 20) for i in range(n_feats)]
    ks = np.random.randint(1, 20)

    feats = continuous_array_features(n, n_feats)
    assert(len(feats.shape) == 2)
    feats = categorical_array_features(n, n_feats)
    assert(len(feats.shape) == 2)
    feats = categorical_array_features(n, n_feats2)
    assert(len(feats.shape) == 2)
    feats = continuous_dict_features(n, n_feats)
    assert(type(feats[0]) == dict)
    feats = categorical_dict_features(n, n_feats)
    assert(type(feats[0]) == dict)

    feats = continuous_agg_array_features(n, n_feats, ks)
    assert(len(feats.shape) == 3)
    feats = categorical_agg_array_features(n, n_feats, ks)
    assert(len(feats.shape) == 3)
    feats = categorical_agg_array_features(n, n_feats2, ks)
    assert(len(feats.shape) == 3)
    feats = continuous_agg_dict_features(n, n_feats, ks)
    assert(type(feats[0][0]) == dict)
    feats = categorical_agg_dict_features(n, n_feats, ks)
    assert(type(feats[0][0]) == dict)

    ## Artificial measures
    n_vals_i, n_iss = np.random.randint(2, 30), np.random.randint(1, 30)

    create_empty_features_array(n_feats, n_iss, ks)
    create_empty_features_dict(n_feats, n_iss, ks)
    create_features_i_array(n_feats, n_iss, ks)
    create_features_i_dict(n_feats, n_iss, ks)

    create_vals_i(n_iss, n_vals_i, ks)

    create_empty_array(ks, n_vals_i, n_feats)
    create_empty_append(ks, n_iss, n_feats)
    create_empty_replacelist(ks, n_iss, n_feats)

    create_artificial_measure_array(ks, n_vals_i, n_feats)
    create_artificial_measure_append(ks, n_vals_i, n_feats)
    create_artificial_measure_replacelist(ks, n_vals_i, n_feats)
    create_artificial_measure_replacelist(ks, n_vals_i, n_feats, True)

    ###########################################################################
    ############################ Spatial Elements #############################
    ###########################################################################
    ## Parameters
    words = m.replace('\n', ' ').replace('.', ' ').strip().split(" ")
    ids = [hash(e) for e in words]
    functs = [lambda x: str(x)+words[i] for i in range(len(words))]
    regs = random_shapely_polygons(10, bounding=(None, None), n_edges=0)

    ## Testing Elemets
    words_id = np.arange(len(words))
    words_elements = SpatialElementsCollection(words, words_id)
    words_elements2 = SpatialElementsCollection(words, list(words_id))
    words_elements = SpatialElementsCollection(words)
    ids_elements = SpatialElementsCollection(ids)
    functs_elements = SpatialElementsCollection(functs)
    polys_elements = SpatialElementsCollection(regs, np.arange(len(regs)))

    # Testing error instantiation
    try:
        flag_error = False
        SpatialElementsCollection(0)
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")
    try:
        flag_error = False
        SpatialElementsCollection(words, np.arange(len(words)+1))
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")
    try:
        flag_error = False
        tags = range(len(words)) + [len(words)-1]
        SpatialElementsCollection(words, tags)
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")
    try:
        flag_error = False
        SpatialElementsCollection(words, 5)
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")

    # Class functions
    words_elements[0]
    try:
        flag_error = False
        words_elements[len(words_elements)]
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")
    try:
        flag_error = False
        words_elements2[words[0]]
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")

    words_elements.elements_id = None
    try:
        flag_error = False
        words_elements[words[0]]
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")

    words_elements[0]

    for e in words_elements:
        pass

    for e in words_elements2:
        pass

    words_elements == words[0]
    relabel_map = np.arange(len(words))
    try:
        flag_error = False
        words_elements.relabel_elements(range(len(words)))
        flag_error = True
    except:
        if flag_error:
            raise Exception("It has to halt here.")

    words_elements.relabel_elements(relabel_map)
    relabel_map = dict(zip(relabel_map, relabel_map))
    words_elements.relabel_elements(relabel_map)

    ids_elements[0]
    for e in ids_elements:
        pass
    ids_elements == words[0]

    functs_elements[0]
    for e in functs_elements:
        pass
    functs_elements == words[0]

    # Polygon collections
    polys_elements == polys_elements[0]

    ############################ Locations Object #############################
    ###########################################################################
    ## Locations
    locs1 = np.random.random((100, 5))
    locs2 = np.random.random((100, 1))
    locs3 = np.random.random(100)
    locs4 = np.random.random((100, 2))
    sptrans = lambda x, p: np.sin(x)

    class Translocs:
        def __init__(self):
            pass

        def apply_transformation(self, x, p={}):
            return sptrans(x, p)
    sptrans2 = Translocs()

    lspcol = SpatialElementsCollection(locs1, np.arange(len(locs1)))
    lspcol == lspcol[0]

    try:
        flag_error = False
        locs = Locations(locs1, 5)
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")
    try:
        flag_error = False
        locs = Locations(locs1, list(range(len(locs1)+1)))
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")
    try:
        flag_error = False
        tags = list(range(len(locs1)))
        tags[0] = 1
        locs = Locations(locs1, tags)
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")

    locs = Locations(locs1)
    locsbis = Locations(locs1, list(range(len(locs1))))
    for l in locs:
        pass
    try:
        flag_error = False
        locs[-1]
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")
    try:
        flag_error = False
        locsbis[slice(0, 9)]
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")

    locsbis[0]
    locs[0]
    assert((locs == locs1[0])[0])
    locs.compute_distance(locs[1])
    locs.space_transformation(sptrans, {})
    locs.space_transformation(sptrans2, {})
    locs._check_coord(0)
    locs._check_coord(locs[0])
    locs._check_coord([0, 3])
    locs._check_coord(np.random.random(locs.locations.shape[1]))
    locs._check_coord([locs1[0], locs1[3]])
    locs._check_coord(None)
    locs.in_radio(locs[0], 0.2)
    locs.data

    locs = Locations(locs2)
    assert((locs == locs2[0])[0])
    locs.compute_distance(locs[1])
    locs.space_transformation(sptrans, {})
    locs.space_transformation(sptrans2, {})
    locs.in_manhattan_d(locs[0], 0.2)

    locs = Locations(locs3)
    assert((locs == locs3[0])[0])
    locs.compute_distance(locs[1])
    locs.space_transformation(sptrans, {})
    locs.space_transformation(sptrans2, {})

    locs = Locations(locs4)
    locs.in_block_distance_d(np.random.random((1, 2)), 0.2)

    ###########################################################################
    ############################### Membership ################################
    ###########################################################################
    # artificial data
    random_membership(10, 20, True)
    random_membership(10, 20, False)

    n_in, n_out = 100, 20
    relations = [np.unique(np.random.randint(0, n_out,
                                             np.random.randint(n_out)))
                 for i in range(n_in)]
    relations = [list(e) for e in relations]
    memb1 = Membership(relations)

    memb1.to_network()
    memb1.to_dict()
    memb1.to_sparse()
    memb1.reverse_mapping()
    memb1.getcollection(0)
    memb1.getcollection(memb1.max_collection_id-1)
    memb1.collections_id
    memb1.n_collections
    memb1.n_elements
    memb1.membership
    str(memb1)
    memb1[0]
    memb1 == 0
    for e in memb1:
        pass

#    op2 = np.all([t == dict for t in types])
    relations = [dict(zip(e, len(e)*[{'membership': 1}])) for e in relations]
    memb1_dict = Membership(relations)
    memb1_dict.to_network()
    memb1_dict.to_dict()
    memb1_dict.to_sparse()
    memb1_dict.reverse_mapping()
    memb1_dict.getcollection(0)
    memb1.getcollection(memb1.max_collection_id-1)
    memb1_dict.collections_id
    memb1_dict.n_collections
    memb1_dict.n_elements
    memb1_dict.membership
    memb1.shape
    memb1.max_collection_id

    memb2 = Membership(np.random.randint(0, 20, 100))
    memb2.to_network()
    memb2.to_dict()
    memb2.to_sparse()
    memb2.reverse_mapping()
    memb2.getcollection(0)
    memb2.getcollection(memb2.max_collection_id-1)
    memb2.collections_id
    memb2.n_collections
    memb2.n_elements
    memb2.membership
    str(memb2)
    memb2[0]
    memb2 == 0
    for e in memb2:
        pass
    memb2.shape
    memb2.max_collection_id

    sparse = randint_sparse_matrix(0.2, (200, 100), 1)
    memb3 = Membership(sparse)
    memb3.to_dict()
    memb3.to_network()
    memb3.to_sparse()
    memb3.reverse_mapping()
    memb3.getcollection(0)
    memb3.getcollection(memb3.max_collection_id-1)
    memb3.collections_id
    memb3.n_collections
    memb3.n_elements
    memb3.membership
    str(memb3)
    memb3[0]
    memb3 == 0
    for e in memb3:
        pass
    memb3.shape
    memb3.max_collection_id

    relations = [[np.random.randint(10)] for i in range(50)]
    memb4 = Membership(relations)
    memb4.to_network()
    memb4.to_dict()
    memb4.to_sparse()
    memb4.reverse_mapping()
    memb4.getcollection(0)
    memb4.getcollection(memb4.max_collection_id-1)
    memb4.collections_id
    memb4.n_collections
    memb4.n_elements
    memb4.membership
    str(memb4)
    memb4[0]
    memb4 == 0
    for e in memb4:
        pass
    memb4.shape
    memb4.max_collection_id

    relations[0].append(0)
    memb5 = Membership(relations)
    memb5.to_network()
    memb5.to_dict()
    memb5.to_sparse()
    memb5.reverse_mapping()
    memb5.getcollection(0)
    memb5.getcollection(memb5.max_collection_id-1)
    memb5.collections_id
    memb5.n_collections
    memb5.n_elements
    memb5.membership
    str(memb5)
    memb5[0]
    memb5 == 0
    for e in memb5:
        pass
    memb5.shape
    memb5.max_collection_id

    relations[0].append(0)
    memb6 = Membership((sparse, np.arange(100)))
    memb6.to_network()
    memb6.to_dict()
    memb6.to_sparse()
    memb6.reverse_mapping()
    memb6.getcollection(0)
    memb6.getcollection(memb6.max_collection_id-1)
    memb6.collections_id
    memb6.n_collections
    memb6.n_elements
    memb6.membership
    str(memb6)
    memb6[0]
    memb6 == 0
    for e in memb6:
        pass
    memb6.shape
    memb6.max_collection_id

    ###########################################################################
    ############################### Mapper vals ###############################
    ###########################################################################
    feat_arr0 = np.random.randint(0, 20, 100)

    def map_vals_i_t(s, i, k):
        k_p, k_i = s.features[0]._map_perturb(k)
        i_n = s.features[0]._perturbators[k_p].apply2indice(i, k_i)
        return feat_arr0[i_n].ravel()[0]
    map_vals_i = create_mapper_vals_i(map_vals_i_t, feat_arr0)

    # correlation
    map_vals_i = create_mapper_vals_i('correlation', feat_arr0)
    map_vals_i = create_mapper_vals_i(('correlation', 100, 20), feat_arr0)
    map_vals_i = create_mapper_vals_i('matrix')
    map_vals_i = create_mapper_vals_i('matrix', feat_arr0)
    map_vals_i = create_mapper_vals_i('matrix', feat_arr0.reshape((100, 1)))
    map_vals_i = create_mapper_vals_i(('matrix', 20), list(feat_arr0))
    map_vals_i = create_mapper_vals_i(('matrix', 100, 20), len(feat_arr0))
    map_vals_i = create_mapper_vals_i('matrix', slice(0, 100, 1))
    map_vals_i.set_prefilter(slice(0, 100, 1))
    map_vals_i.set_prefilter(10)
    map_vals_i.set_prefilter([0, 2])
    map_vals_i.set_sptype('correlation')
    map_vals_i[(None, [0], 0)]
    map_vals_i.apply(None, [0], 0)

    map_vals_i = create_mapper_vals_i(map_vals_i)
    map_vals_i = create_mapper_vals_i(feat_arr0.reshape(100, 1))
    map_vals_i = create_mapper_vals_i(None)

    map_vals_i = Map_Vals_i(100)
    map_vals_i = Map_Vals_i((1000, 20))
    map_vals_i = Map_Vals_i(map_vals_i)
    map_vals_i = Map_Vals_i(memb1)

    ## Stress testing
    try:
        boolean = False
        map_vals_i = create_mapper_vals_i('correlation')
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("The test has to halt here.")

    ###########################################################################
    ############################## Spdesc_mapper ##############################
    ###########################################################################
    #selector1 = Sp_DescriptorMapper()
    mapper_array = np.random.randint(0, 5, 100)
    mapper_function = lambda idx: mapper_array[idx]
    mapper_function1 = lambda idx: tuple([mapper_array[idx]]*2)

    pos_mappers = [{'mapper': mapper_array}, {'mapper': mapper_function},
                   {'mapper': mapper_function, 'compute': True},
                   {'mapper': mapper_function, 'n_in': 5, 'n_out': 6},
                   {'mapper': mapper_function1, 'n_in': 5, 'n_out': [3, 4]},
                   {'mapper': mapper_function1, 'n_in': 5, 'compute': True}]

    for p in pos_mappers:
        comb_selector = DummySelector(**p)
#        comb_selector = GeneralSelector(**p)
        comb_selector[0]

        # Impossible cases
        try:
            ## Non-integer key getitem
            boolean = False
            map_vals_i = comb_selector[.2]
            boolean = True
            raise Exception("It has to halt here.")
        except:
            if boolean:
                raise Exception("The test has to halt here.")

        ## Functions
        DummySelector(comb_selector)
        comb_selector[0]
        comb_selector.set_pars(2, lambda x: (0, 0), n_out=[1, 1])
        comb_selector[0]

    selector1 = DummySelector(mapper_array)
    selector2 = DummySelector(lambda idx: mapper_array[idx], n_in=100, n_out=3)
    selector3 = DummySelector(lambda idx: [mapper_array[idx]]*3, n_in=100)
    sl = BaseCollectionSelectors([selector1, selector2, selector3])

    # Spatial retriever selector
    sel = Spatial_RetrieverSelector(np.array([mapper_array]*2).T)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Spatial_RetrieverSelector(mapper_array, mapper_array)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Spatial_RetrieverSelector(mapper_function1)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Spatial_RetrieverSelector(mapper_function, mapper_function)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Spatial_RetrieverSelector(sel)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Spatial_RetrieverSelector((0, 0))
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Spatial_RetrieverSelector(0, 0)
    sel[0], sel[0, 1], sel[[0, 1]]
    try:
        ## Different types of core mappers
        boolean = False
        Spatial_RetrieverSelector(mapper_array, mapper_function)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("The test has to halt here.")
    try:
        ## Not correct shape
        boolean = False
        Spatial_RetrieverSelector(mapper_array)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("The test has to halt here.")

    # FeatureInd retriever selector
    sel = FeatInd_RetrieverSelector(np.array([mapper_array]*2).T)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = FeatInd_RetrieverSelector(mapper_array, mapper_array)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = FeatInd_RetrieverSelector(mapper_function1)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = FeatInd_RetrieverSelector(mapper_function, mapper_function)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = FeatInd_RetrieverSelector(sel)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = FeatInd_RetrieverSelector((0, 0))
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = FeatInd_RetrieverSelector(0, 0)
    sel[0], sel[0, 1], sel[[0, 1]]
    try:
        ## Different types of core mappers
        boolean = False
        FeatInd_RetrieverSelector(mapper_array, mapper_function)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("The test has to halt here.")
    try:
        ## Different types of core mappers
        boolean = False
        FeatInd_RetrieverSelector(np.array([mapper_array]*10).T)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("The test has to halt here.")

    # FeatureInd retriever selector
    sel = Desc_RetrieverSelector(np.array([mapper_array]*2).T)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Desc_RetrieverSelector(mapper_array, mapper_array)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Desc_RetrieverSelector(mapper_function1)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Desc_RetrieverSelector(mapper_function, mapper_function)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Desc_RetrieverSelector(sel)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Desc_RetrieverSelector((0, 0))
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Desc_RetrieverSelector(0, 0)
    sel[0], sel[0, 1], sel[[0, 1]]
    try:
        ## Different types of core mappers
        boolean = False
        Desc_RetrieverSelector(mapper_array, mapper_function)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("The test has to halt here.")
    try:
        ## Different types of core mappers
        boolean = False
        Desc_RetrieverSelector(np.array([mapper_array]*10).T)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("The test has to halt here.")

    # FeatureInd retriever selector
    pos_selt = [(np.array([mapper_array]*2).T, ), (mapper_array, mapper_array),
                (mapper_function1, ), (mapper_function, mapper_function)]

    def test_getitem(selector):
        selector[0]
        selector[0, 1]
        selector[[0, 1]]
        try:
            boolean = False
            selector[0.7]
            boolean = True
        except:
            if boolean:
                raise Exception("It has to halt here.")

    for i in range(len(pos_selt)):
        ## Instantiation
        sel0 = Spatial_RetrieverSelector(*pos_selt[i])
        sel1 = FeatInd_RetrieverSelector(*pos_selt[i])
        sel2 = FeatInd_RetrieverSelector(*pos_selt[i])
        sel3 = Desc_RetrieverSelector(*pos_selt[i])
        selfeat = Feat_RetrieverSelector(sel1, sel2, sel3)
        test_getitem(selfeat)
        ## Partial information instantiation
        selfeat = Feat_RetrieverSelector(selfeat, None, None)
        test_getitem(selfeat)
        ### Testing Sp_DescriptorSelector
        sel = Sp_DescriptorSelector(sel0, selfeat)
        test_getitem(sel)
        ### Testing Sp_DescriptorSelector with partial instantiation
        sel = Sp_DescriptorSelector(sel)
        test_getitem(sel)

    #### Individual tests
    ## Partial information instantiation
    selfeat = Feat_RetrieverSelector((0, 0, 0, 0, 0, 0), None, None)
    # Getitem
    test_getitem(selfeat)
#    ## Partial information instantiation
    ## Instantiation
    sel1 = FeatInd_RetrieverSelector(sel1)
    sel2 = FeatInd_RetrieverSelector(sel2)
    sel3 = Desc_RetrieverSelector(sel3)
    selfeat = Feat_RetrieverSelector(sel1, sel2, sel3)
    test_getitem(selfeat)
    selfeat = Feat_RetrieverSelector(np.zeros((100, 6)))
    test_getitem(selfeat)
    selfeat = Feat_RetrieverSelector((lambda idx: (0, 0, 0, 0, 0, 0),
                                     {'n_in': 200}))
    test_getitem(selfeat)