Esempio n. 1
0
def test():
    ###########################################################################
    ############################# Artificial data #############################
    ###########################################################################
    ## Random relations
    n, density, shape = 100, 0.1, (10, 10)
    randint_sparse_matrix(density, shape, maxvalue=10)
    generate_randint_relations(density, shape, p0=0., maxvalue=1)
    generate_random_relations_cutoffs(n, 0.5, 0.9, True, 'network')
    generate_random_relations_cutoffs(n, 0.5, 0.9, False, 'network')
    generate_random_relations_cutoffs(n, 0.5, 0.9, True, 'sparse')

    n_elements, n_collections = 100, 10
    random_membership(n_elements, n_collections, multiple=True)
    random_membership(n_elements, n_collections, multiple=False)

    ## Random points
    n_points, n_dim, funct = 100, 2, np.cos
    random_transformed_space_points(n_points, n_dim, funct)
    random_transformed_space_points(n_points, n_dim, None)
    random_space_points(n_points, n_dim)

    ## Artificial grid data
    create_random_image(shape, n_modes=1)
    create_random_image(shape, n_modes=3)

    ## Artificial regions
    n_poly = 10
    random_shapely_polygon(bounding=(None, None), n_edges=0)
    random_shapely_polygon(bounding=((0., 1.), None), n_edges=0)
    random_shapely_polygon(bounding=(None, None), n_edges=4)
    random_shapely_polygons(n_poly, bounding=(None, None), n_edges=0)

    ## Artificial random features
    n, n_feats = np.random.randint(10, 1000), np.random.randint(2, 20)
    n_feats2 = [np.random.randint(2, 20) for i in range(n_feats)]
    ks = np.random.randint(1, 20)

    feats = continuous_array_features(n, n_feats)
    assert(len(feats.shape) == 2)
    feats = categorical_array_features(n, n_feats)
    assert(len(feats.shape) == 2)
    feats = categorical_array_features(n, n_feats2)
    assert(len(feats.shape) == 2)
    feats = continuous_dict_features(n, n_feats)
    assert(type(feats[0]) == dict)
    feats = categorical_dict_features(n, n_feats)
    assert(type(feats[0]) == dict)

    feats = continuous_agg_array_features(n, n_feats, ks)
    assert(len(feats.shape) == 3)
    feats = categorical_agg_array_features(n, n_feats, ks)
    assert(len(feats.shape) == 3)
    feats = categorical_agg_array_features(n, n_feats2, ks)
    assert(len(feats.shape) == 3)
    feats = continuous_agg_dict_features(n, n_feats, ks)
    assert(type(feats[0][0]) == dict)
    feats = categorical_agg_dict_features(n, n_feats, ks)
    assert(type(feats[0][0]) == dict)

    ## Artificial measures
    n_vals_i, n_iss = np.random.randint(2, 30), np.random.randint(1, 30)

    create_empty_features_array(n_feats, n_iss, ks)
    create_empty_features_dict(n_feats, n_iss, ks)
    create_features_i_array(n_feats, n_iss, ks)
    create_features_i_dict(n_feats, n_iss, ks)

    create_vals_i(n_iss, n_vals_i, ks)

    create_empty_array(ks, n_vals_i, n_feats)
    create_empty_append(ks, n_iss, n_feats)
    create_empty_replacelist(ks, n_iss, n_feats)

    create_artificial_measure_array(ks, n_vals_i, n_feats)
    create_artificial_measure_append(ks, n_vals_i, n_feats)
    create_artificial_measure_replacelist(ks, n_vals_i, n_feats)
    create_artificial_measure_replacelist(ks, n_vals_i, n_feats, True)

    ###########################################################################
    ############################ Spatial Elements #############################
    ###########################################################################
    ## Parameters
    words = m.replace('\n', ' ').replace('.', ' ').strip().split(" ")
    ids = [hash(e) for e in words]
    functs = [lambda x: str(x)+words[i] for i in range(len(words))]
    regs = random_shapely_polygons(10, bounding=(None, None), n_edges=0)

    ## Testing Elemets
    words_id = np.arange(len(words))
    words_elements = SpatialElementsCollection(words, words_id)
    words_elements2 = SpatialElementsCollection(words, list(words_id))
    words_elements = SpatialElementsCollection(words)
    ids_elements = SpatialElementsCollection(ids)
    functs_elements = SpatialElementsCollection(functs)
    polys_elements = SpatialElementsCollection(regs, np.arange(len(regs)))

    # Testing error instantiation
    try:
        flag_error = False
        SpatialElementsCollection(0)
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")
    try:
        flag_error = False
        SpatialElementsCollection(words, np.arange(len(words)+1))
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")
    try:
        flag_error = False
        tags = range(len(words)) + [len(words)-1]
        SpatialElementsCollection(words, tags)
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")
    try:
        flag_error = False
        SpatialElementsCollection(words, 5)
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")

    # Class functions
    words_elements[0]
    try:
        flag_error = False
        words_elements[len(words_elements)]
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")
    try:
        flag_error = False
        words_elements2[words[0]]
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")

    words_elements.elements_id = None
    try:
        flag_error = False
        words_elements[words[0]]
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")

    words_elements[0]

    for e in words_elements:
        pass

    for e in words_elements2:
        pass

    words_elements == words[0]
    relabel_map = np.arange(len(words))
    try:
        flag_error = False
        words_elements.relabel_elements(range(len(words)))
        flag_error = True
    except:
        if flag_error:
            raise Exception("It has to halt here.")

    words_elements.relabel_elements(relabel_map)
    relabel_map = dict(zip(relabel_map, relabel_map))
    words_elements.relabel_elements(relabel_map)

    ids_elements[0]
    for e in ids_elements:
        pass
    ids_elements == words[0]

    functs_elements[0]
    for e in functs_elements:
        pass
    functs_elements == words[0]

    # Polygon collections
    polys_elements == polys_elements[0]

    ############################ Locations Object #############################
    ###########################################################################
    ## Locations
    locs1 = np.random.random((100, 5))
    locs2 = np.random.random((100, 1))
    locs3 = np.random.random(100)
    locs4 = np.random.random((100, 2))
    sptrans = lambda x, p: np.sin(x)

    class Translocs:
        def __init__(self):
            pass

        def apply_transformation(self, x, p={}):
            return sptrans(x, p)
    sptrans2 = Translocs()

    lspcol = SpatialElementsCollection(locs1, np.arange(len(locs1)))
    lspcol == lspcol[0]

    try:
        flag_error = False
        locs = Locations(locs1, 5)
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")
    try:
        flag_error = False
        locs = Locations(locs1, list(range(len(locs1)+1)))
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")
    try:
        flag_error = False
        tags = list(range(len(locs1)))
        tags[0] = 1
        locs = Locations(locs1, tags)
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")

    locs = Locations(locs1)
    locsbis = Locations(locs1, list(range(len(locs1))))
    for l in locs:
        pass
    try:
        flag_error = False
        locs[-1]
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")
    try:
        flag_error = False
        locsbis[slice(0, 9)]
        flag_error = True
        raise Exception("It has to halt here.")
    except:
        if flag_error:
            raise Exception("It has to halt here.")

    locsbis[0]
    locs[0]
    assert((locs == locs1[0])[0])
    locs.compute_distance(locs[1])
    locs.space_transformation(sptrans, {})
    locs.space_transformation(sptrans2, {})
    locs._check_coord(0)
    locs._check_coord(locs[0])
    locs._check_coord([0, 3])
    locs._check_coord(np.random.random(locs.locations.shape[1]))
    locs._check_coord([locs1[0], locs1[3]])
    locs._check_coord(None)
    locs.in_radio(locs[0], 0.2)
    locs.data

    locs = Locations(locs2)
    assert((locs == locs2[0])[0])
    locs.compute_distance(locs[1])
    locs.space_transformation(sptrans, {})
    locs.space_transformation(sptrans2, {})
    locs.in_manhattan_d(locs[0], 0.2)

    locs = Locations(locs3)
    assert((locs == locs3[0])[0])
    locs.compute_distance(locs[1])
    locs.space_transformation(sptrans, {})
    locs.space_transformation(sptrans2, {})

    locs = Locations(locs4)
    locs.in_block_distance_d(np.random.random((1, 2)), 0.2)

    ###########################################################################
    ############################### Membership ################################
    ###########################################################################
    # artificial data
    random_membership(10, 20, True)
    random_membership(10, 20, False)

    n_in, n_out = 100, 20
    relations = [np.unique(np.random.randint(0, n_out,
                                             np.random.randint(n_out)))
                 for i in range(n_in)]
    relations = [list(e) for e in relations]
    memb1 = Membership(relations)

    memb1.to_network()
    memb1.to_dict()
    memb1.to_sparse()
    memb1.reverse_mapping()
    memb1.getcollection(0)
    memb1.getcollection(memb1.max_collection_id-1)
    memb1.collections_id
    memb1.n_collections
    memb1.n_elements
    memb1.membership
    str(memb1)
    memb1[0]
    memb1 == 0
    for e in memb1:
        pass

#    op2 = np.all([t == dict for t in types])
    relations = [dict(zip(e, len(e)*[{'membership': 1}])) for e in relations]
    memb1_dict = Membership(relations)
    memb1_dict.to_network()
    memb1_dict.to_dict()
    memb1_dict.to_sparse()
    memb1_dict.reverse_mapping()
    memb1_dict.getcollection(0)
    memb1.getcollection(memb1.max_collection_id-1)
    memb1_dict.collections_id
    memb1_dict.n_collections
    memb1_dict.n_elements
    memb1_dict.membership
    memb1.shape
    memb1.max_collection_id

    memb2 = Membership(np.random.randint(0, 20, 100))
    memb2.to_network()
    memb2.to_dict()
    memb2.to_sparse()
    memb2.reverse_mapping()
    memb2.getcollection(0)
    memb2.getcollection(memb2.max_collection_id-1)
    memb2.collections_id
    memb2.n_collections
    memb2.n_elements
    memb2.membership
    str(memb2)
    memb2[0]
    memb2 == 0
    for e in memb2:
        pass
    memb2.shape
    memb2.max_collection_id

    sparse = randint_sparse_matrix(0.2, (200, 100), 1)
    memb3 = Membership(sparse)
    memb3.to_dict()
    memb3.to_network()
    memb3.to_sparse()
    memb3.reverse_mapping()
    memb3.getcollection(0)
    memb3.getcollection(memb3.max_collection_id-1)
    memb3.collections_id
    memb3.n_collections
    memb3.n_elements
    memb3.membership
    str(memb3)
    memb3[0]
    memb3 == 0
    for e in memb3:
        pass
    memb3.shape
    memb3.max_collection_id

    relations = [[np.random.randint(10)] for i in range(50)]
    memb4 = Membership(relations)
    memb4.to_network()
    memb4.to_dict()
    memb4.to_sparse()
    memb4.reverse_mapping()
    memb4.getcollection(0)
    memb4.getcollection(memb4.max_collection_id-1)
    memb4.collections_id
    memb4.n_collections
    memb4.n_elements
    memb4.membership
    str(memb4)
    memb4[0]
    memb4 == 0
    for e in memb4:
        pass
    memb4.shape
    memb4.max_collection_id

    relations[0].append(0)
    memb5 = Membership(relations)
    memb5.to_network()
    memb5.to_dict()
    memb5.to_sparse()
    memb5.reverse_mapping()
    memb5.getcollection(0)
    memb5.getcollection(memb5.max_collection_id-1)
    memb5.collections_id
    memb5.n_collections
    memb5.n_elements
    memb5.membership
    str(memb5)
    memb5[0]
    memb5 == 0
    for e in memb5:
        pass
    memb5.shape
    memb5.max_collection_id

    relations[0].append(0)
    memb6 = Membership((sparse, np.arange(100)))
    memb6.to_network()
    memb6.to_dict()
    memb6.to_sparse()
    memb6.reverse_mapping()
    memb6.getcollection(0)
    memb6.getcollection(memb6.max_collection_id-1)
    memb6.collections_id
    memb6.n_collections
    memb6.n_elements
    memb6.membership
    str(memb6)
    memb6[0]
    memb6 == 0
    for e in memb6:
        pass
    memb6.shape
    memb6.max_collection_id

    ###########################################################################
    ############################### Mapper vals ###############################
    ###########################################################################
    feat_arr0 = np.random.randint(0, 20, 100)

    def map_vals_i_t(s, i, k):
        k_p, k_i = s.features[0]._map_perturb(k)
        i_n = s.features[0]._perturbators[k_p].apply2indice(i, k_i)
        return feat_arr0[i_n].ravel()[0]
    map_vals_i = create_mapper_vals_i(map_vals_i_t, feat_arr0)

    # correlation
    map_vals_i = create_mapper_vals_i('correlation', feat_arr0)
    map_vals_i = create_mapper_vals_i(('correlation', 100, 20), feat_arr0)
    map_vals_i = create_mapper_vals_i('matrix')
    map_vals_i = create_mapper_vals_i('matrix', feat_arr0)
    map_vals_i = create_mapper_vals_i('matrix', feat_arr0.reshape((100, 1)))
    map_vals_i = create_mapper_vals_i(('matrix', 20), list(feat_arr0))
    map_vals_i = create_mapper_vals_i(('matrix', 100, 20), len(feat_arr0))
    map_vals_i = create_mapper_vals_i('matrix', slice(0, 100, 1))
    map_vals_i.set_prefilter(slice(0, 100, 1))
    map_vals_i.set_prefilter(10)
    map_vals_i.set_prefilter([0, 2])
    map_vals_i.set_sptype('correlation')
    map_vals_i[(None, [0], 0)]
    map_vals_i.apply(None, [0], 0)

    map_vals_i = create_mapper_vals_i(map_vals_i)
    map_vals_i = create_mapper_vals_i(feat_arr0.reshape(100, 1))
    map_vals_i = create_mapper_vals_i(None)

    map_vals_i = Map_Vals_i(100)
    map_vals_i = Map_Vals_i((1000, 20))
    map_vals_i = Map_Vals_i(map_vals_i)
    map_vals_i = Map_Vals_i(memb1)

    ## Stress testing
    try:
        boolean = False
        map_vals_i = create_mapper_vals_i('correlation')
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("The test has to halt here.")

    ###########################################################################
    ############################## Spdesc_mapper ##############################
    ###########################################################################
    #selector1 = Sp_DescriptorMapper()
    mapper_array = np.random.randint(0, 5, 100)
    mapper_function = lambda idx: mapper_array[idx]
    mapper_function1 = lambda idx: tuple([mapper_array[idx]]*2)

    pos_mappers = [{'mapper': mapper_array}, {'mapper': mapper_function},
                   {'mapper': mapper_function, 'compute': True},
                   {'mapper': mapper_function, 'n_in': 5, 'n_out': 6},
                   {'mapper': mapper_function1, 'n_in': 5, 'n_out': [3, 4]},
                   {'mapper': mapper_function1, 'n_in': 5, 'compute': True}]

    for p in pos_mappers:
        comb_selector = DummySelector(**p)
#        comb_selector = GeneralSelector(**p)
        comb_selector[0]

        # Impossible cases
        try:
            ## Non-integer key getitem
            boolean = False
            map_vals_i = comb_selector[.2]
            boolean = True
            raise Exception("It has to halt here.")
        except:
            if boolean:
                raise Exception("The test has to halt here.")

        ## Functions
        DummySelector(comb_selector)
        comb_selector[0]
        comb_selector.set_pars(2, lambda x: (0, 0), n_out=[1, 1])
        comb_selector[0]

    selector1 = DummySelector(mapper_array)
    selector2 = DummySelector(lambda idx: mapper_array[idx], n_in=100, n_out=3)
    selector3 = DummySelector(lambda idx: [mapper_array[idx]]*3, n_in=100)
    sl = BaseCollectionSelectors([selector1, selector2, selector3])

    # Spatial retriever selector
    sel = Spatial_RetrieverSelector(np.array([mapper_array]*2).T)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Spatial_RetrieverSelector(mapper_array, mapper_array)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Spatial_RetrieverSelector(mapper_function1)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Spatial_RetrieverSelector(mapper_function, mapper_function)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Spatial_RetrieverSelector(sel)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Spatial_RetrieverSelector((0, 0))
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Spatial_RetrieverSelector(0, 0)
    sel[0], sel[0, 1], sel[[0, 1]]
    try:
        ## Different types of core mappers
        boolean = False
        Spatial_RetrieverSelector(mapper_array, mapper_function)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("The test has to halt here.")
    try:
        ## Not correct shape
        boolean = False
        Spatial_RetrieverSelector(mapper_array)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("The test has to halt here.")

    # FeatureInd retriever selector
    sel = FeatInd_RetrieverSelector(np.array([mapper_array]*2).T)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = FeatInd_RetrieverSelector(mapper_array, mapper_array)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = FeatInd_RetrieverSelector(mapper_function1)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = FeatInd_RetrieverSelector(mapper_function, mapper_function)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = FeatInd_RetrieverSelector(sel)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = FeatInd_RetrieverSelector((0, 0))
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = FeatInd_RetrieverSelector(0, 0)
    sel[0], sel[0, 1], sel[[0, 1]]
    try:
        ## Different types of core mappers
        boolean = False
        FeatInd_RetrieverSelector(mapper_array, mapper_function)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("The test has to halt here.")
    try:
        ## Different types of core mappers
        boolean = False
        FeatInd_RetrieverSelector(np.array([mapper_array]*10).T)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("The test has to halt here.")

    # FeatureInd retriever selector
    sel = Desc_RetrieverSelector(np.array([mapper_array]*2).T)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Desc_RetrieverSelector(mapper_array, mapper_array)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Desc_RetrieverSelector(mapper_function1)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Desc_RetrieverSelector(mapper_function, mapper_function)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Desc_RetrieverSelector(sel)
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Desc_RetrieverSelector((0, 0))
    sel[0], sel[0, 1], sel[[0, 1]]
    sel = Desc_RetrieverSelector(0, 0)
    sel[0], sel[0, 1], sel[[0, 1]]
    try:
        ## Different types of core mappers
        boolean = False
        Desc_RetrieverSelector(mapper_array, mapper_function)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("The test has to halt here.")
    try:
        ## Different types of core mappers
        boolean = False
        Desc_RetrieverSelector(np.array([mapper_array]*10).T)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("The test has to halt here.")

    # FeatureInd retriever selector
    pos_selt = [(np.array([mapper_array]*2).T, ), (mapper_array, mapper_array),
                (mapper_function1, ), (mapper_function, mapper_function)]

    def test_getitem(selector):
        selector[0]
        selector[0, 1]
        selector[[0, 1]]
        try:
            boolean = False
            selector[0.7]
            boolean = True
        except:
            if boolean:
                raise Exception("It has to halt here.")

    for i in range(len(pos_selt)):
        ## Instantiation
        sel0 = Spatial_RetrieverSelector(*pos_selt[i])
        sel1 = FeatInd_RetrieverSelector(*pos_selt[i])
        sel2 = FeatInd_RetrieverSelector(*pos_selt[i])
        sel3 = Desc_RetrieverSelector(*pos_selt[i])
        selfeat = Feat_RetrieverSelector(sel1, sel2, sel3)
        test_getitem(selfeat)
        ## Partial information instantiation
        selfeat = Feat_RetrieverSelector(selfeat, None, None)
        test_getitem(selfeat)
        ### Testing Sp_DescriptorSelector
        sel = Sp_DescriptorSelector(sel0, selfeat)
        test_getitem(sel)
        ### Testing Sp_DescriptorSelector with partial instantiation
        sel = Sp_DescriptorSelector(sel)
        test_getitem(sel)

    #### Individual tests
    ## Partial information instantiation
    selfeat = Feat_RetrieverSelector((0, 0, 0, 0, 0, 0), None, None)
    # Getitem
    test_getitem(selfeat)
#    ## Partial information instantiation
    ## Instantiation
    sel1 = FeatInd_RetrieverSelector(sel1)
    sel2 = FeatInd_RetrieverSelector(sel2)
    sel3 = Desc_RetrieverSelector(sel3)
    selfeat = Feat_RetrieverSelector(sel1, sel2, sel3)
    test_getitem(selfeat)
    selfeat = Feat_RetrieverSelector(np.zeros((100, 6)))
    test_getitem(selfeat)
    selfeat = Feat_RetrieverSelector((lambda idx: (0, 0, 0, 0, 0, 0),
                                     {'n_in': 200}))
    test_getitem(selfeat)
def test():
    n = 100
    locs = np.random.random((n, 2))*100
    feat_arr0 = np.random.randint(0, 20, (n, 1))
    feat_arr1 = np.random.random((n, 10))

    ########################### Auxdescriptormodels ###########################
    ###########################################################################
    #################################
    #### Reducer testing
    def creation_agg(listfeats):
        n_iss = np.random.randint(1, 10)
        if listfeats:
            aggdesc = []
            for i in range(n_iss):
                keys = np.unique(np.random.randint(0, 20, 10))
                values = np.random.random(len(keys))
                aggdesc.append(dict(zip(keys, values)))
        else:
            n_feats = 20
            aggdesc = np.random.random((n_iss, n_feats))
        p_aggpos = None
        return aggdesc, p_aggpos

    nnei, n_feats = np.random.randint(1, 1000), np.random.randint(1, 20)
    n_feats2 = [np.random.randint(1, 20) for i in range(n_feats)]
    n_iss = np.random.randint(1, 20)
    point_pos = [None]*n_iss

    ### Tests
    # Example objects

    pointfeats_arrayarray0 = [continuous_array_features(nnei, n_feats)]*n_iss
    pointfeats_listarray0 = np.array(pointfeats_arrayarray0)
    pointfeats_arrayarray1 = [categorical_array_features(nnei, n_feats)]*n_iss
    pointfeats_listarray1 = np.array(pointfeats_arrayarray1)
    pointfeats_arrayarray2 = [categorical_array_features(nnei, n_feats2)]*n_iss
    pointfeats_listarray2 = np.array(pointfeats_arrayarray2)
    pointfeats_listdict0 = [continuous_dict_features(nnei, n_feats)]*n_iss
    pointfeats_listdict1 = [categorical_dict_features(nnei, n_feats)]*n_iss

#    pointfeats_arrayarray0 = continuous_agg_array_features(n, n_feats, ks)
#    pointfeats_listarray0 = list(pointfeats_arrayarray0)
#    pointfeats_arrayarray1 = categorical_agg_array_features(n, n_feats, ks)
#    pointfeats_listarray1 = list(pointfeats_arrayarray1)
#    pointfeats_arrayarray2 = categorical_agg_array_features(n, n_feats2, ks)
#    pointfeats_listarray2 = list(pointfeats_arrayarray2)
#    pointfeats_listdict0 = continuous_agg_dict_features(n, n_feats, ks)
#    pointfeats_listdict1 = categorical_agg_dict_features(n, n_feats, ks)

    #################################
    #### Reducer
    ###############

    desc = sum_reducer(pointfeats_arrayarray0, point_pos)
    assert(type(desc) == list)
    assert(type(desc[0]) == np.ndarray)
    desc = sum_reducer(pointfeats_listarray0, point_pos)
    assert(type(desc) == list)
    assert(type(desc[0]) == np.ndarray)
    desc = sum_reducer(pointfeats_arrayarray1, point_pos)
    assert(type(desc) == list)
    assert(type(desc[0]) == np.ndarray)
    desc = sum_reducer(pointfeats_listarray1, point_pos)
    assert(type(desc) == list)
    assert(type(desc[0]) == np.ndarray)
    desc = sum_reducer(pointfeats_arrayarray2, point_pos)
    assert(type(desc) == list)
    assert(type(desc[0]) == np.ndarray)
    desc = sum_reducer(pointfeats_listarray2, point_pos)
    assert(type(desc) == list)
    assert(type(desc[0]) == np.ndarray)
    desc = sum_reducer(pointfeats_listdict0, point_pos)
    assert(type(desc) == list)
    assert(type(desc[0]) == dict)
    desc = sum_reducer(pointfeats_listdict1, point_pos)
    assert(type(desc) == list)
    assert(type(desc[0]) == dict)

    desc = avg_reducer(pointfeats_arrayarray0, point_pos)
    assert(type(desc) == list)
    assert(type(desc[0]) == np.ndarray)
    desc = avg_reducer(pointfeats_listarray0, point_pos)
    assert(type(desc) == list)
    assert(type(desc[0]) == np.ndarray)
    desc = avg_reducer(pointfeats_arrayarray1, point_pos)
    assert(type(desc) == list)
    assert(type(desc[0]) == np.ndarray)
    desc = avg_reducer(pointfeats_listarray1, point_pos)
    assert(type(desc) == list)
    assert(type(desc[0]) == np.ndarray)
    desc = avg_reducer(pointfeats_arrayarray2, point_pos)
    assert(type(desc) == list)
    assert(type(desc[0]) == np.ndarray)
    desc = avg_reducer(pointfeats_listarray2, point_pos)
    assert(type(desc) == list)
    assert(type(desc[0]) == np.ndarray)
    desc = avg_reducer(pointfeats_listdict0, point_pos)
    assert(type(desc) == list)
    assert(type(desc[0]) == dict)
    desc = avg_reducer(pointfeats_listdict1, point_pos)
    assert(type(desc) == list)
    assert(type(desc[0]) == dict)

#    aggdesc, p_aggpos = creation_agg(True)
#    sum_reducer(aggdesc, p_aggpos)
#    avg_reducer(aggdesc, p_aggpos)
#    aggdesc, p_aggpos = creation_agg(False)
#    sum_reducer(aggdesc, p_aggpos)
#    avg_reducer(aggdesc, p_aggpos)
#    aggdesc, p_aggpos = creation_agg(False)
#    sum_reducer(list(aggdesc), p_aggpos)
#    avg_reducer(list(aggdesc), p_aggpos)

    #################################
    #### Outformatters
    ###################
    def creation_outformatter():
        n_iss = np.random.randint(1, 10)
        outfeats = [str(e) for e in np.arange(20)]
        feats = []
        for i in range(n_iss):
            keys = np.unique(np.random.randint(0, 20, 10))
            values = np.random.random(len(keys))
            feats.append(dict(zip(keys, values)))
        return feats, outfeats
    _out = ['ndarray', 'dict']
    feats, outfeats = creation_outformatter()
    count_out_formatter_general(feats, outfeats, _out[0], 0)
    count_out_formatter_general(feats, outfeats, _out[1], 0)
    null_out_formatter(feats, outfeats, _out[0], 0)
    try:
        boolean = False
        count_out_formatter_general(feats, outfeats, '', 0)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
#    try:
#        # In the future probably has to halt
#        boolean = False
#        array_feats = np.random.random((10, 1))
#        count_out_formatter_general(array_feats, outfeats, _out[1], 0)
#        boolean = True
#        raise Exception("It has to halt here.")
#    except:
#        if boolean:
#            raise Exception("It has to halt here.")

    #################################
    #### Featurenames
    #################
    def creation_features(listfeats):
        n_iss = np.random.randint(1, 10)
        if listfeats:
            feats = []
            for i in range(n_iss):
                keys = np.unique(np.random.randint(0, 20, 10))
                values = np.random.random(len(keys))
                feats.append(dict(zip(keys, values)))
        else:
            feats = np.random.randint(0, 20, n_iss).reshape((n_iss, 1))
        return feats
    # List feats
    features_o = creation_features(True)
    counter_featurenames(features_o)
    list_featurenames(features_o)
    try:
        boolean = False
        array_featurenames(features_o)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    # Array feats
    features_o = creation_features(False)
    counter_featurenames(features_o)
    array_featurenames(features_o)

    try:
        boolean = False
        list_featurenames(features_o)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    try:
        boolean = False
        counter_featurenames(None)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    try:
        boolean = False
        array_featurenames(None)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")

    #################################
    #### Characterizers
    ###################
    # We need 2nd level features so we use aggregation ones
    # [iss][nei]{feats} or [iss](nei, feats) or (iss, nei, feats)

    point_pos = None
    n, n_feats = np.random.randint(10, 1000), np.random.randint(1, 20)
    n_feats2 = [np.random.randint(1, 20) for i in range(n_feats)]
    ks = np.random.randint(1, 20)

    ### Tests
    # Example objects
    pointfeats_arrayarray0 = continuous_agg_array_features(n, n_feats, ks)
    pointfeats_listarray0 = list(pointfeats_arrayarray0)
    pointfeats_arrayarray1 = categorical_agg_array_features(n, n_feats, ks)
    pointfeats_listarray1 = list(pointfeats_arrayarray1)
    pointfeats_arrayarray2 = categorical_agg_array_features(n, n_feats2, ks)
    pointfeats_listarray2 = list(pointfeats_arrayarray2)
    pointfeats_listdict0 = continuous_agg_dict_features(n, n_feats, ks)
    pointfeats_listdict1 = categorical_agg_dict_features(n, n_feats, ks)

    # Counter
    characterizer_1sh_counter(pointfeats_arrayarray0, point_pos)
    characterizer_1sh_counter(pointfeats_arrayarray1, point_pos)
    characterizer_1sh_counter(pointfeats_listarray0, point_pos)
    characterizer_1sh_counter(pointfeats_listarray1, point_pos)

    # Summer
    characterizer_summer(pointfeats_arrayarray0, point_pos)
    characterizer_summer(pointfeats_listarray0, point_pos)
    characterizer_summer(pointfeats_arrayarray1, point_pos)
    characterizer_summer(pointfeats_listarray1, point_pos)
    characterizer_summer(pointfeats_arrayarray2, point_pos)
    characterizer_summer(pointfeats_listarray2, point_pos)
    characterizer_summer(pointfeats_listdict0, point_pos)
    characterizer_summer(pointfeats_listdict1, point_pos)

    characterizer_summer_array(pointfeats_arrayarray0, point_pos)
    characterizer_summer_array(pointfeats_listarray0, point_pos)
    characterizer_summer_array(pointfeats_arrayarray1, point_pos)
    characterizer_summer_array(pointfeats_listarray1, point_pos)
    characterizer_summer_array(pointfeats_arrayarray2, point_pos)
    characterizer_summer_array(pointfeats_listarray2, point_pos)

    characterizer_summer_listdict(pointfeats_listdict0, point_pos)
    characterizer_summer_listdict(pointfeats_listdict1, point_pos)

    characterizer_summer_listarray(pointfeats_listarray0, point_pos)
    characterizer_summer_listarray(pointfeats_listarray1, point_pos)
    characterizer_summer_listarray(pointfeats_listarray2, point_pos)
    characterizer_summer_arrayarray(pointfeats_arrayarray0, point_pos)
    characterizer_summer_arrayarray(pointfeats_arrayarray1, point_pos)
    characterizer_summer_arrayarray(pointfeats_arrayarray2, point_pos)

    # Average
    characterizer_average(pointfeats_arrayarray0, point_pos)
    characterizer_average(pointfeats_listarray0, point_pos)
    characterizer_average(pointfeats_arrayarray1, point_pos)
    characterizer_average(pointfeats_listarray1, point_pos)
    characterizer_average(pointfeats_arrayarray2, point_pos)
    characterizer_average(pointfeats_listarray2, point_pos)
    characterizer_average(pointfeats_listdict0, point_pos)
    characterizer_average(pointfeats_listdict1, point_pos)

    characterizer_average_array(pointfeats_arrayarray0, point_pos)
    characterizer_average_array(pointfeats_listarray0, point_pos)
    characterizer_average_array(pointfeats_arrayarray1, point_pos)
    characterizer_average_array(pointfeats_listarray1, point_pos)
    characterizer_average_array(pointfeats_arrayarray2, point_pos)
    characterizer_average_array(pointfeats_listarray2, point_pos)

    characterizer_average_listdict(pointfeats_listdict0, point_pos)
    characterizer_average_listdict(pointfeats_listdict1, point_pos)

    characterizer_average_listarray(pointfeats_listarray0, point_pos)
    characterizer_average_listarray(pointfeats_listarray1, point_pos)
    characterizer_average_listarray(pointfeats_listarray2, point_pos)
    characterizer_average_arrayarray(pointfeats_arrayarray0, point_pos)
    characterizer_average_arrayarray(pointfeats_arrayarray1, point_pos)
    characterizer_average_arrayarray(pointfeats_arrayarray2, point_pos)

    ## Testing utils
    f = characterizer_from_unitcharacterizer(lambda x, y: x[0])
    f(pointfeats_arrayarray0, [point_pos]*n)
    f(pointfeats_listarray0, [point_pos]*n)
    f(pointfeats_arrayarray1, [point_pos]*n)
    f(pointfeats_listarray1, [point_pos]*n)
    f(pointfeats_arrayarray2, [point_pos]*n)
    f(pointfeats_listarray2, [point_pos]*n)
    f(pointfeats_listdict0, [point_pos]*n)
    f(pointfeats_listdict1, [point_pos]*n)

    #################################
    #### Characterizers
    ###################

    # TODO: listdicts feats based characterizers

#    aggregator_1sh_counter(pointfeats, point_pos)
#    aggregator_summer(pointfeats, point_pos)
#    aggregator_average(pointfeats, point_pos)

    #################################
    #### add2results
#    def creation_x_i(listfeats, n_k, n_iss, n_feats):
#        if listfeats:
#            x_i = []
#            for k in range(n_k):
#                x_i_k = []
#                for i in range(n_iss):
#                    keys = np.unique(np.random.randint(0, n_feats, n_feats))
#                    keys = [str(e) for e in keys]
#                    values = np.random.random(len(keys))
#                    x_i_k.append(dict(zip(keys, values)))
#                x_i.append(x_i_k)
#        else:
#            x_i = np.random.random((n_k, n_iss, n_feats))
#        return x_i
#
#    def creation_add2res(type_):
#        ## Preparations
#        n_feats = np.random.randint(1, 20)
#        n_k = np.random.randint(1, 20)
#        n_iss = np.random.randint(1, 20)
#        max_vals_i = np.random.randint(1, 20)
#        vals_i = []
#        for i in range(n_k):
#            vals_i.append(np.random.randint(0, max_vals_i, n_iss))
#        if type_ == 'replacelist':
#            x = [[[], []]]*n_k
#            x_i = creation_x_i(True, n_k, n_iss, n_feats)
#        elif type_ == 'append':
#            x = [[[]]*n_iss]*n_k
#            x_i = creation_x_i(True, n_k, n_iss, n_feats)
#        elif type_ == 'sum':
#            x_i = creation_x_i(False, n_k, n_iss, n_feats)
#            x = np.random.random((max_vals_i, n_feats, n_k))
#        return x, x_i, vals_i
#
#    types = ['replacelist', 'append', 'sum']
#    x, x_i, vals_i = creation_add2res(types[0])
#    x, x_i, vals_i = creation_add2res(types[2])
#    x, x_i, vals_i = creation_add2res(types[1])

    n_feats = np.random.randint(2, 20)
    ks = np.random.randint(1, 20)
    n_iss = np.random.randint(1, 20)
    n_vals_i = np.random.randint(2, 20)

    vals_i = create_vals_i(n_iss, n_vals_i, ks)

    x = create_artificial_measure_replacelist(ks, n_vals_i, n_feats)
    x_i = create_empty_features_dict(n_feats, n_iss, ks)
    measure_spdict_unknown = replacelist_addresult_function(x, x_i, vals_i)
    x = create_artificial_measure_replacelist(ks, n_vals_i, n_feats, True)
    measure_spdict_unknown = replacelist_addresult_function(x, x_i, vals_i)

    x = create_artificial_measure_append(ks, n_vals_i, n_feats)
    append_addresult_function(x, x_i, vals_i)
    x[0][0] = x[0][0][0]
    append_addresult_function(x, x_i, vals_i)

    x = create_artificial_measure_array(ks, n_vals_i, n_feats)
    x_i = create_empty_features_array(n_feats, n_iss, ks)

    measure_array = sum_addresult_function(x, x_i, vals_i)

    #################################
    #### Completers
    x = create_artificial_measure_append(ks, n_vals_i, n_feats)
    sparse_dict_completer(x)
    sparse_dict_completer_unknown(measure_spdict_unknown)
    null_completer(measure_array)
    global_info = np.random.random(len(measure_array))
    weighted_completer(measure_array, global_info)
    global_info = np.random.random(measure_array.shape)
    weighted_completer(measure_array, global_info)
    weighted_completer(measure_array, None)

    ############################# Descriptormodels ############################
    ###########################################################################
    #################################
    ###### DistanceDescriptor
    #########################
    pos_n = [100]
    pos_map_idx = [None, lambda idx: idx]
    pos_funct = [None, lambda d: d]

    pos = [pos_n, pos_map_idx, pos_funct]

    for p in product(*pos):
        distdesc = DistancesDescriptor(nfeats=p[0], map_idx=p[1], funct=p[2])

        distdesc.set_functions(None, 'dict')
        distdesc.compute([[0]], [[0.3]])
        distdesc.compute([[0]], [None])
        distdesc.compute([[0]], None)

        distdesc.set_functions(None, 'ndarray')
        distdesc.compute([[0]], [[0.3]])
        distdesc.compute([[0]], [None])
        distdesc.compute([[0]], None)

    ###### NormalizedDistanceDescriptor
    ###################################
    regs = np.random.randint(0, 6, 100)
    pos_regs = [regs, regs, regs.reshape((100, 1)), regs.reshape((100, 1)),
                np.array([regs, regs, regs]).T]
    pos_kperturb = [0, 2, 0, 2, 2]

    pos_ipos = range(len(pos_kperturb))
    pos_n = [6]
    pos_map_idx = [None, lambda idx: idx]
    pos_funct = [None, lambda d: d]

    pos = [pos_ipos, pos_n, pos_map_idx, pos_funct]
    for p in product(*pos):
        ks = pos_kperturb[p[0]] if pos_kperturb[p[0]] is not None else 0
        distdesc = NormalizedDistanceDescriptor(pos_regs[p[0]], nfeats=p[1],
                                                map_idx=p[2], funct=p[3],
                                                k_perturb=pos_kperturb[p[0]])

        distdesc.set_functions(None, 'dict')
        distdesc.compute([[0]], [[0.3]])
        distdesc.compute([[0]], [None])
        distdesc.compute([[0]], None)

        distdesc.set_functions(None, 'ndarray')
        desc0 = distdesc.compute([[0]], [[0.3]])
        desc1 = distdesc.compute([[0]], [None])
        desc2 = distdesc.compute([[0]], None)

        vals_i = np.random.randint(0, 6, ks+1)

        desc0 = np.array([desc0])
        desc1 = np.array([desc1])
        desc2 = np.array([desc2])

        distdesc.complete_desc_i(None, None, None, desc0, vals_i)
        distdesc.complete_desc_i(None, None, None, desc1, vals_i)
        distdesc.complete_desc_i(None, None, None, desc2, vals_i)

        measure = np.random.random((6, 6, ks+1))
        distdesc.to_complete_measure(measure)

    #### SumDescriptor
    ##################
    point_pos = None
    measure = np.random.random((100, 10, 2))
#    characs = np.random.random((10, 5))
#    feats = continuous_array_features(100, 10)
#    feats_dict = continuous_dict_features(100, 10)

    sumdesc = SumDescriptor()
    sumdesc.compute(pointfeats_arrayarray0, point_pos)
    sumdesc.compute(pointfeats_listarray0, point_pos)
    sumdesc.compute(pointfeats_arrayarray1, point_pos)
    sumdesc.compute(pointfeats_listarray1, point_pos)
    sumdesc.compute(pointfeats_arrayarray2, point_pos)
    sumdesc.compute(pointfeats_listarray2, point_pos)
    sumdesc.compute(pointfeats_listdict0, point_pos)
    sumdesc.compute(pointfeats_listdict1, point_pos)

    sumdesc = SumDescriptor('array')
    sumdesc.compute(pointfeats_arrayarray0, point_pos)
    sumdesc.compute(pointfeats_listarray0, point_pos)
    sumdesc.compute(pointfeats_arrayarray1, point_pos)
    sumdesc.compute(pointfeats_listarray1, point_pos)
    sumdesc.compute(pointfeats_arrayarray2, point_pos)
    sumdesc.compute(pointfeats_listarray2, point_pos)

    sumdesc = SumDescriptor('listdict')
    sumdesc.compute(pointfeats_listdict0, point_pos)
    sumdesc.compute(pointfeats_listdict1, point_pos)

    sumdesc = SumDescriptor('listarray')
    sumdesc.compute(pointfeats_listarray0, point_pos)
    sumdesc.compute(pointfeats_listarray1, point_pos)
    sumdesc.compute(pointfeats_listarray2, point_pos)

    sumdesc = SumDescriptor('arrayarray')
    sumdesc.compute(pointfeats_arrayarray0, point_pos)
    sumdesc.compute(pointfeats_arrayarray1, point_pos)
    sumdesc.compute(pointfeats_arrayarray2, point_pos)

#    sumdesc.compute_characs(feats_dict, point_pos)
#    sumdesc.compute_characs(feats_dict, None)
#    sumdesc.reducer(feats_dict, point_pos)
#    sumdesc.reducer(feats_dict, None)
#    sumdesc.aggdescriptor(feats_dict, point_pos)
#    sumdesc.aggdescriptor(feats_dict, None)

    # Not specific
    sumdesc.to_complete_measure(measure)
    #sumdesc.complete_desc_i(i, neighs_info, desc_i, desc_neighs, vals_i)
    sumdesc.set_global_info(None)
    sumdesc.set_functions(None, None)

    #################################
    #### AvgDescriptor
    point_pos = np.random.random((10, 5))
    measure = np.random.random((100, 10, 2))
#    avgdesc = AvgDescriptor()
#    characs = np.random.random((10, 5))
#    feats = continuous_array_features(100, 10)
#    feats_dict = continuous_dict_features(100, 10)

    avgdesc = AvgDescriptor()
    avgdesc.compute(pointfeats_arrayarray0, point_pos)
    avgdesc.compute(pointfeats_listarray0, point_pos)
    avgdesc.compute(pointfeats_arrayarray1, point_pos)
    avgdesc.compute(pointfeats_listarray1, point_pos)
    avgdesc.compute(pointfeats_arrayarray2, point_pos)
    avgdesc.compute(pointfeats_listarray2, point_pos)
    avgdesc.compute(pointfeats_listdict0, point_pos)
    avgdesc.compute(pointfeats_listdict1, point_pos)

    avgdesc = AvgDescriptor('array')
    avgdesc.compute(pointfeats_arrayarray0, point_pos)
    avgdesc.compute(pointfeats_listarray0, point_pos)
    avgdesc.compute(pointfeats_arrayarray1, point_pos)
    avgdesc.compute(pointfeats_listarray1, point_pos)
    avgdesc.compute(pointfeats_arrayarray2, point_pos)
    avgdesc.compute(pointfeats_listarray2, point_pos)

    avgdesc = AvgDescriptor('listdict')
    avgdesc.compute(pointfeats_listdict0, point_pos)
    avgdesc.compute(pointfeats_listdict1, point_pos)

    avgdesc = AvgDescriptor('listarray')
    avgdesc.compute(pointfeats_listarray0, point_pos)
    avgdesc.compute(pointfeats_listarray1, point_pos)
    avgdesc.compute(pointfeats_listarray2, point_pos)

    avgdesc = AvgDescriptor('arrayarray')
    avgdesc.compute(pointfeats_arrayarray0, point_pos)
    avgdesc.compute(pointfeats_arrayarray1, point_pos)
    avgdesc.compute(pointfeats_arrayarray2, point_pos)

#    avgdesc.reducer(characs, point_pos)
#    avgdesc.reducer(characs, None)
#    avgdesc.reducer(feats, point_pos)
#    avgdesc.reducer(feats, None)
#    avgdesc.reducer(feats_dict, point_pos)
#    avgdesc.reducer(feats_dict, None)
#
#    avgdesc.aggdescriptor(characs, point_pos)
#    avgdesc.aggdescriptor(characs, None)
#    avgdesc.aggdescriptor(feats, point_pos)
#    avgdesc.aggdescriptor(feats, None)
#    avgdesc.aggdescriptor(feats_dict, point_pos)
#    avgdesc.aggdescriptor(feats_dict, None)

    # Not specific
    avgdesc.to_complete_measure(measure)
    #avgdesc.complete_desc_i(i, neighs_info, desc_i, desc_neighs, vals_i)
    avgdesc.set_global_info(None)
    avgdesc.set_functions(None, None)

    #################################
    #### CountDescriptor
#    point_pos = np.random.random((10, 5))
#    measure = np.random.random((100, 10, 2))
#    countdesc = CountDescriptor()
#    characs = np.random.randint(0, 10, 50).reshape((10, 5))
#    feats = categorical_array_features(100, 10)
#    feats_dict = categorical_dict_features(100, 10)

    countdesc = CountDescriptor()
    countdesc.compute(pointfeats_arrayarray0, point_pos)
    countdesc.compute(pointfeats_arrayarray1, point_pos)
    countdesc.compute(pointfeats_listarray0, point_pos)
    countdesc.compute(pointfeats_listarray1, point_pos)
#
#    countdesc.reducer(characs, point_pos)
#    countdesc.reducer(characs, None)
#    countdesc.reducer(feats, point_pos)
#    countdesc.reducer(feats, None)
#    countdesc.reducer(feats_dict, point_pos)
#    countdesc.reducer(feats_dict, None)
#
#    countdesc.aggdescriptor(characs, point_pos)
#    countdesc.aggdescriptor(characs, None)
#    countdesc.aggdescriptor(feats, point_pos)
#    countdesc.aggdescriptor(feats, None)
#    countdesc.aggdescriptor(feats_dict, point_pos)
#    countdesc.aggdescriptor(feats_dict, None)
#

    # Not specific
    countdesc.to_complete_measure(measure)
    #countdesc.complete_desc_i(i, neighs_info, desc_i, desc_neighs, vals_i)
    countdesc.set_global_info(None)
    countdesc._format_default_functions()
    countdesc.set_functions(None, None)
    countdesc.set_functions(None, 'dict')

    #################################
    #### Pjensen
    pjensen = PjensenDescriptor()
    # Specific
    features = list(np.arange(20)) + list(np.random.randint(0, 20, 80))
    features = np.array(features).reshape((100, 1))
    pjensen.set_global_info(features)
    feats = categorical_array_features(100, 20)
    feats_dict = categorical_dict_features(100, 10)
    characs = np.random.randint(0, 10, 50).reshape((10, 5))
    point_pos = np.random.random((10, 5))
    measure = np.random.randint(0, 50, 20*20).reshape((20, 20, 1))

    pjensen = PjensenDescriptor(features)
    pjensen.compute(features, None)
    pjensen = PjensenDescriptor(features)
    pjensen.compute(features, point_pos)

    # Not specific
    pjensen.to_complete_measure(measure)
    #pjensen.complete_desc_i(i, neighs_info, desc_i, desc_neighs, vals_i)
    pjensen._format_default_functions()
    pjensen.set_functions(None, None)
    pjensen.set_functions(None, 'dict')

#    pjensen = PjensenDescriptor(features)
#    pjensen.compute_characs(features, point_pos)
#    pjensen = PjensenDescriptor(features)
#    pjensen.compute_characs(features, point_pos)

#
#    # Functions
#    pjensen.compute_characs(characs, point_pos)
#    pjensen.compute_characs(characs, None)
#    pjensen.compute_characs(feats, point_pos)
#    pjensen.compute_characs(feats, None)
#    pjensen.compute_characs(feats_dict, point_pos)
#    pjensen.compute_characs(feats_dict, None)
#
#    pjensen.reducer(characs, point_pos)
#    pjensen.reducer(characs, None)
#    pjensen.reducer(feats, point_pos)
#    pjensen.reducer(feats, None)
#    pjensen.compute_characs(feats_dict, point_pos)
#    pjensen.compute_characs(feats_dict, None)
#
#    pjensen.aggdescriptor(characs, point_pos)
#    pjensen.aggdescriptor(characs, None)
#    pjensen.aggdescriptor(feats, point_pos)
#    pjensen.aggdescriptor(feats, None)
#    pjensen.compute_characs(feats_dict, point_pos)
#    pjensen.compute_characs(feats_dict, None)
#

#    # Not specific
#    pjensen.to_complete_measure(measure)
#    #pjensen.complete_desc_i(i, neighs_info, desc_i, desc_neighs, vals_i)
#    pjensen._format_default_functions()
#    pjensen.set_functions(None, None)
#    pjensen.set_functions(None, 'dict')

#    #################################
#    #### SparseCounter
    # Only testing the specific functions. The others are tested in counter
    spcountdesc = SparseCounter()
#    spcountdesc.to_complete_measure(pointfeats_listdict0)
#    spcountdesc.to_complete_measure(pointfeats_listdict1)
##
##    spcountdesc.compute_characs(characs, point_pos)
##    spcountdesc.compute_characs(characs, None)
##
##    spcountdesc.reducer(characs, point_pos)
##    spcountdesc.reducer(characs, None)
##
##    spcountdesc.aggdescriptor(characs, point_pos)
##    spcountdesc.aggdescriptor(characs, None)
##
##    #spcountdesc.complete_desc_i(i, neighs_info, desc_i, desc_neighs, vals_i)
##
    # Not specific
    spcountdesc.set_global_info(None)
    spcountdesc.set_functions(None, None)
#
#    #################################
#    #### NBinsHistogramDesc
    nbinsdesc = NBinsHistogramDesc(5)
    characs = np.random.randint(0, 10, 50).reshape((10, 5))
    point_pos = np.random.random((10, 5))
    measure = np.random.random((100, 10, 2))
    feats = categorical_array_features(100, 20)
#    feats_dict = categorical_dict_features(100, 10)

    nbinsdesc.compute(characs, point_pos)
    nbinsdesc.compute([characs], None)
    nbinsdesc.compute(feats, point_pos)
    nbinsdesc.compute(feats, None)
#    nbinsdesc.compute_characs(feats_dict, point_pos)
#    nbinsdesc.compute_characs(feats_dict, None)

#    nbinsdesc.reducer(characs, point_pos)
#    nbinsdesc.reducer(characs, None)
#    nbinsdesc.reducer(feats, point_pos)
#    nbinsdesc.reducer(feats, None)
#    nbinsdesc.reducer(feats_dict, point_pos)
#    nbinsdesc.reducer(feats_dict, None)
#
#    nbinsdesc.aggdescriptor(characs, point_pos)
#    nbinsdesc.aggdescriptor(characs, None)
#    nbinsdesc.aggdescriptor(feats, point_pos)
#    nbinsdesc.aggdescriptor(feats, None)
#    nbinsdesc.aggdescriptor(feats_dict, point_pos)
#    nbinsdesc.aggdescriptor(feats_dict, None)
#
#    #nbinsdesc.complete_desc_i(i, neighs_info, desc_i, desc_neighs, vals_i)
#
    # Specific
    nbinsdesc.to_complete_measure(measure)
    nbinsdesc._format_default_functions()
    nbinsdesc.set_functions(None, None)
    nbinsdesc.set_functions(None, 'dict')
    features = np.random.random((100, 5))
    nbinsdesc.set_global_info(features, True)
    nbinsdesc.set_global_info(features, False)
#    # Not specific

###############################################################################
###############################################################################
################################## TO TRASH ###################################
###############################################################################
#    ret0 = KRetriever(locs, 3, ifdistance=True)
#    ret1 = CircRetriever(locs, 3, ifdistance=True)
#    gret0 = RetrieverManager([ret0])
#    gret1 = RetrieverManager([ret1])
#
#    ## Create MAP VALS (indices)
#    corr_arr = -1*np.ones(n).astype(int)
#    for i in range(len(np.unique(feat_arr0))):
#        corr_arr[(feat_arr0 == np.unique(feat_arr0)[i]).ravel()] = i
#    assert(np.sum(corr_arr == (-1)) == 0)
#
#    def map_vals_i_t(s, i, k):
#        k_p, k_i = s.features[0]._map_perturb(k)
#        i_n = s.features[0]._perturbators[k_p].apply2indice(i, k_i)
#        return corr_arr[i_n]
#    map_vals_i = create_mapper_vals_i(map_vals_i_t, feat_arr0)
#
#    feats0 = ImplicitFeatures(feat_arr0)
#    feats1 = ImplicitFeatures(feat_arr1)
#
#    avgdesc = AvgDescriptor()
#    countdesc = CountDescriptor()
#    pjensendesc = PjensenDescriptor()
#
#    feats_ret0 = FeaturesManager(feats0, countdesc, maps_vals_i=map_vals_i)
#    feats_ret1 = FeaturesManager([feats1], avgdesc, maps_vals_i=map_vals_i)
#    feats_ret2 = FeaturesManager(feats0, pjensendesc, maps_vals_i=map_vals_i)
#
#    sp_model0 = SpatialDescriptorModel(gret0, feats_ret1)
#    sp_model1 = SpatialDescriptorModel(gret1, feats_ret1)
#    sp_model2 = SpatialDescriptorModel(gret0, feats_ret0)
#    sp_model3 = SpatialDescriptorModel(gret1, feats_ret0)
#    sp_model4 = SpatialDescriptorModel(gret0, feats_ret2)
#    sp_model5 = SpatialDescriptorModel(gret1, feats_ret2)
#
#    corr = sp_model0.compute()
#    corr = sp_model1.compute()
#    corr = sp_model2.compute()
#    corr = sp_model3.compute()
#    corr = sp_model4.compute()
#    corr = sp_model5.compute()
#
#    ### Testing auxiliar descriptormodels functions
#    # Artificial data
#    contfeats, point_pos = np.random.random(5), np.random.random(5)
#    catfeats = np.random.randint(0, 10, 5)
#    aggdescriptors_idxs = np.random.random((10, 5))
#    x, x_i, vals_i = np.zeros((1, 1, 1)), np.zeros((1, 1)), [[0]]
#    # Characterizers
#    characterizer_1sh_counter(catfeats, point_pos)
#    characterizer_summer(contfeats, point_pos)
#    characterizer_average(contfeats, point_pos)
#    # Reducers
#    sum_reducer([aggdescriptors_idxs], point_pos)
#    sum_reducer([{9: 0, 8: 1, 4: 7, 3: 0, 1: 0}], point_pos)
#    avg_reducer(aggdescriptors_idxs, point_pos)
#    avg_reducer([{9: 0, 8: 1, 4: 7, 3: 0, 1: 0}], point_pos)
#
#    # Add2result
#    sum_addresult_function(x, x_i, vals_i)
#    append_addresult_function([[[]]], x_i, vals_i)
#    replacelist_addresult_function([[[], []]], x_i, vals_i)
#    # Completers
#    null_completer(np.array([1]))
#    weighted_completer(np.array([1]), np.array([1]))
#    weighted_completer(np.array([1]), None)
#    sparse_dict_completer([[[{0: 2}]]])
#    sparse_dict_completer([[[{0: 2}, {1: 3}]]])
#    # Aggregators
#    aggregator_1sh_counter(catfeats, point_pos)
#    aggregator_summer(catfeats, point_pos)
#    aggregator_average(catfeats, point_pos)
#    # Featurenames
#    counter_featurenames(np.random.randint(0, 10, 10).reshape((10, 1)))
#    try:
#        counter_featurenames([np.random.randint(0, 10, 10).reshape((10, 1))])
#        raise Exception
#    except:
#        pass
#    array_featurenames([np.random.random((10, 5))])
#    try:
#        array_featurenames(None)
#        raise Exception
#    except:
#        pass
#    # Out formatter
#    count_out_formatter_general(catfeats, catfeats, 'dict', 0)
#    try:
#        count_out_formatter_general(catfeats, catfeats[:3], 'dict', 0)
#        raise Exception
#    except:
#        pass
#    null_out_formatter(catfeats, catfeats, 'dict', 0)
#
#    ### Testing descriptors
#    # Artificial data
#    contfeats, point_pos = np.random.random(5), np.random.random(5)
#    catfeats = np.random.randint(0, 10, 5)
#    aggdescriptors_idxs = np.random.random((10, 5))
#
#    # Descriptors
#    avgdesc = AvgDescriptor()
#    countdesc = CountDescriptor()
#    pjensendesc = PjensenDescriptor()
#    sumdesc = SumDescriptor()
#    nbinsdesc = NBinsHistogramDesc(5)
#    sparsedesc = SparseCounter()
#
#    avgdesc.compute_characs(contfeats, point_pos)
#    avgdesc.reducer(aggdescriptors_idxs, point_pos)
#    avgdesc.aggdescriptor(contfeats, point_pos)
#    countdesc.compute_characs(catfeats, point_pos)
#    countdesc.reducer(aggdescriptors_idxs, point_pos)
#    countdesc.aggdescriptor(catfeats, point_pos)
#    pjensendesc.compute_characs(catfeats, point_pos)
#    pjensendesc.reducer(aggdescriptors_idxs, point_pos)
#    pjensendesc.aggdescriptor(catfeats, point_pos)
#    sumdesc.compute_characs(contfeats, point_pos)
#    sumdesc.reducer(aggdescriptors_idxs, point_pos)
#    sumdesc.aggdescriptor(contfeats, point_pos)
#    nbinsdesc.compute_characs(contfeats, point_pos)
#    nbinsdesc.reducer(aggdescriptors_idxs, point_pos)
#    nbinsdesc.aggdescriptor(contfeats, point_pos)
#    sparsedesc.compute_characs(catfeats, point_pos)
#    sparsedesc.reducer(aggdescriptors_idxs, point_pos)
#    sparsedesc.aggdescriptor(catfeats, point_pos)
#
#    ## GeneralDescriptor
    gendesc = GeneralDescriptor(characterizer_summer, null_completer)
    gendesc = GeneralDescriptor(characterizer_summer, null_completer,
                                null_out_formatter)
def test():
    ## Definition parameters
    n = 1000
    m = 5
    rei = 10

    n, n_feats = np.random.randint(10, 1000), np.random.randint(1, 20)
    n_feats2 = [np.random.randint(1, 20) for i in range(n_feats)]
    ks = np.random.randint(1, 20)

    def create_ids(n1):
        aux = np.random.randint(1, 4, n1)
        return np.cumsum(aux)

    def create_featurenames(n1):
        aux = create_ids(n1)
        return [str(e) for e in aux]

    def extract_featurenames_agg(aggdictfeats):
        names = []
        for k in range(len(aggdictfeats)):
            names += extract_featurenames(aggdictfeats[k])
        names = list(set(names))
        return names

    def extract_featurenames(aggdictfeats):
        names = []
        for i in range(len(aggdictfeats)):
            names += aggdictfeats[i].keys()
        names = list(set(names))
        return names

    def compute_featurenames(features):
        names = []
        if type(features) == np.ndarray:
            names = [str(e) for e in range(len(features[0]))]
        return names

    class DummyDesc:
        def set_functions(self, typefeatures, outformat):
            pass

    class Dummy1Desc(DummyDesc):
        def __init__(self):
            self.compute = lambda x, d: [e[0] for e in x]
            self._out_formatter = lambda x, y1, y2, y3: x
            self._f_default_names = compute_featurenames

    class Dummy2Desc_exp(DummyDesc):
        def __init__(self):
            self.compute = lambda x, d: [e[0] for e in x]
            self._out_formatter = lambda x, y1, y2, y3: x
            self._f_default_names = compute_featurenames

    class Dummy2Desc_imp(DummyDesc):
        def __init__(self):
            self.compute = lambda x, d: np.array([e[0] for e in x])
            self._out_formatter = lambda x, y1, y2, y3: x
            self._f_default_names = compute_featurenames

    ## Possible descriptormodels to test
    avgdesc = AvgDescriptor()
    dum1desc = Dummy1Desc()
    dum2desc = Dummy2Desc_imp()
    dum2desc_agg = Dummy2Desc_exp()

    ### Test functions definitions
    def test_getfeatsk(Feat):
        nei = Neighs_Info()
        nei.set((([0], [0]), [0]))
        i, d, _, k = 0
        pass

    def test_getitem(Feat):
        #k = 0
        #idxs = np.random.randint(0, 5, 20).reshape((1, 4, 5))
        #Feat._get_feats_k(idxs, k)
        #Feat._get_feats_k(list(idxs), k)
        #Feat[[]]
        feat = Feat[0]
        assert(len(feat[0]) == 1)
        assert(len(feat) == Feat.k_perturb+1)
        feat = Feat[(0, 0)]
        assert(len(feat[0]) == 1)
        assert(len(feat) == 1)
        feat = Feat[([0], [0])]
        assert(len(feat[0]) == 1)
        assert(len(feat) == 1)
#        INVALID
#        feat = Feat[([0], [0.])]
#        assert(len(feat[0]) == 1)
#        assert(len(feat) == Feat.k_perturb+1)
        feat = Feat[range(5), [0]]
        assert(len(feat[0]) == 5)
        assert(len(feat) == 1)
        feat = Feat[0:3]
        assert(len(feat[0]) == 3)
        assert(len(feat) == Feat.k_perturb+1)
        feat = Feat[:]
#        print len(feat[0]),  len(Feat)
        assert(len(feat[0]) == len(Feat))
        assert(len(feat) == Feat.k_perturb+1)
        feat = Feat[((0, 0), 0)]
        assert(len(feat[0]) == 1)
        assert(len(feat) == 1)
        feat = Feat[(([0], [0]), [0])]
        assert(len(feat[0]) == 1)
        assert(len(feat) == 1)
        Feat[[[[]]], [0]]
        if Feat.k_perturb:
#            print 'x'*100, Feat.k_perturb, Feat.shape
            Feat[(([[0], [0]], [[0], [0]]), [0, 1])]
        feat = Feat[[0, 4, 5]]
        assert(len(feat[0]) == 3)
        assert(len(feat) == Feat.k_perturb+1)
        try:
            boolean = False
            Feat[-1]
            boolean = True
            raise Exception("It has to halt here.")
        except:
            if boolean:
                raise Exception("It has to halt here.")
        try:
            boolean = False
            Feat._retrieve_feats([[[0]]], -1, None)
            boolean = True
            raise Exception("It has to halt here.")
        except:
            if boolean:
                raise Exception("It has to halt here.")
        try:
            boolean = False
            Feat._retrieve_feats([[[0]]], 10000, None)
            boolean = True
            raise Exception("It has to halt here.")
        except:
            if boolean:
                raise Exception("It has to halt here.")
        try:
            boolean = False
            Feat[len(Feat)]
            boolean = True
            raise Exception("It has to halt here.")
        except:
            if boolean:
                raise Exception("It has to halt here.")
        try:
            boolean = False
            Feat[range(4), range(Feat.k_perturb+3)]
            boolean = True
            raise Exception("It has to halt here.")
        except:
            if boolean:
                raise Exception("It has to halt here.")
        nei = Neighs_Info()
        nei.set((([0], [0]), [0]))
        Feat[nei]
        nei = Neighs_Info()
        nei.set([[[0, 4], [0, 3]]])
        Feat[nei]
        nei = Neighs_Info(staticneighs=True)
        nei.set([[0, 4], [0, 3]])
        Feat[nei, 0]
        # shape
        Feat.shape
        ## Empty call
        Feat[(([[]], [[]]), [0])]
        # Descriptormodels setting
        # null formatters
        #Feat._format_characterizer(None, None)
        if Feat.typefeat != 'phantom':
            Feat.set_descriptormodel(dum1desc)
            if Feat.typefeat == 'implicit':
                Feat.set_descriptormodel(dum2desc)
            else:
                Feat.set_descriptormodel(dum2desc_agg)

            avgdesc = AvgDescriptor()
            Feat.set_descriptormodel(avgdesc)

        ## Export features
        feat_o, feat_info, feat_pars = Feat.export_features()
        new_feats = feat_o(feat_info, **feat_pars)
        assert(isinstance(new_feats, feat_o))

    ## Definition arrays
    aggfeatures = np.random.random((n/2, m, rei))
    features0 = np.random.random((n, m))
    features1 = np.random.random((n, m))
    features2 = np.vstack([np.random.randint(0, 10, n) for i in range(m)]).T
    reindices0 = np.arange(n)
    reindices = np.vstack([reindices0]+[np.random.permutation(n)
                                        for i in range(rei-1)]).T
    perturbation = PermutationPerturbation(reindices)

    ###########################################################################
    ##########################
    #### Explicit Features testing
    ### Definition classes
    # Instantiation
    Feat = ExplicitFeatures(np.random.randint(0, 20, 100))
    test_getitem(Feat)
    Feat = ExplicitFeatures(np.random.random((100, 2)))
    test_getitem(Feat)
    Feat = ExplicitFeatures(aggfeatures)
    test_getitem(Feat)
    try:
        boolean = False
        ExplicitFeatures(np.random.random((10, 1, 1, 1)))
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It should not accept that inputs.")

    ## Exhaustive instantiation testing
    aggcontfeats_ar0 = continuous_agg_array_features(n, n_feats, ks)
    aggcatfeats_ar0 = categorical_agg_array_features(n, n_feats, ks)
    aggcatfeats_ar1 = categorical_agg_array_features(n, n_feats2, ks)
    aggcontfeats_dict = continuous_agg_dict_features(n, n_feats, ks)
    aggcatfeats_dict = categorical_agg_dict_features(n, n_feats, ks)

    pos_feats = [aggcontfeats_ar0, aggcatfeats_ar0, aggcatfeats_ar1,
                 aggcontfeats_dict, aggcatfeats_dict]
    pos_names = [create_featurenames(n_feats), create_featurenames(1),
                 create_featurenames(len(n_feats2)),
                 create_featurenames(n_feats),
                 extract_featurenames_agg(aggcontfeats_dict),
                 extract_featurenames_agg(aggcatfeats_dict)]
    pos_nss = [0, 1, 2, 3, 4]
    pos_null = [None, 0., np.inf]
    pos_characterizer = [None]
#    pos_outformatter = [None]
    pos_indices = [None]

    possibilities = [pos_nss, pos_null, pos_characterizer, pos_indices]

    for p in product(*possibilities):
#        print p
        ## Names definition
        names = []
        if np.random.randint(0, 2):
            names = pos_names[p[0]]
        ## Instantiation
        Feat = ExplicitFeatures(pos_feats[p[0]], descriptormodel=p[2],
                                names=names, indices=p[3], nullvalue=p[1])
        ## Testing main functions
        test_getitem(Feat)

    ## Particular cases
    try:
        boolean = False
        names = [str(i) for i in range(len(aggcontfeats_ar0[0])+1)]
        ExplicitFeatures(aggcontfeats_ar0, names=names, indices=p[3],
                         descriptormodel=p[2], nullvalue=p[1])
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    ##
    ## List features
    listfeatures = []
    for k in range(5):
        listfeatures_k = []
        for i in range(100):
            aux = np.unique(np.random.randint(0, 100, np.random.randint(5)))
            d = dict(zip(aux, np.random.random(len(aux))))
            listfeatures_k.append(d)
        listfeatures.append(listfeatures_k)
    Feat = ExplicitFeatures(listfeatures)
    len(Feat)
    nei = Neighs_Info()
    nei.set((([0], [0]), [0]))
    Feat[nei]
    nei = Neighs_Info()
    nei.set([[[0, 4], [0, 3]]])
    Feat[nei]

    ###########################################################################
    ##########################
    #### Implicit Features testing
    ### Definition classes
    # Instantiation
    contfeats_ar0 = continuous_array_features(n, n_feats)
    catfeats_ar0 = categorical_array_features(n, n_feats)
    catfeats_ar1 = categorical_array_features(n, n_feats2)
    contfeats_dict = continuous_dict_features(n, n_feats)
    catfeats_dict = categorical_dict_features(n, n_feats)

    pos_feats = [contfeats_ar0, catfeats_ar0, catfeats_ar1,
                 contfeats_dict, catfeats_dict]
    pos_names = [create_featurenames(n_feats), create_featurenames(1),
                 create_featurenames(len(n_feats2)),
                 create_featurenames(n_feats),
                 extract_featurenames(contfeats_dict),
                 extract_featurenames(catfeats_dict)]
    pos_nss = [0, 1, 2, 3, 4]
    pos_null = [None]  # TODO: [None, 0., np.inf]
    pos_characterizer = [None]
#    pos_outformatter = [None]
    pos_indices = [None]  # TODO
    pos_perturbations = [None, perturbation]

    possibilities = [pos_nss, pos_null, pos_characterizer, pos_indices,
                     pos_perturbations]
    ## Combination of inputs testing
    for p in product(*possibilities):
#        print p
        ## Names definition
        names = []
        if np.random.randint(0, 2):
            names = pos_names[p[0]]
        ## Instantiation
        Feat = ImplicitFeatures(pos_feats[p[0]], names=names,
                                descriptormodel=p[2], perturbations=p[4])
        ## Testing main functions
#        if p[0] < 3:
#            test_getitem(Feat)
        test_getitem(Feat)

    Feat_imp = ImplicitFeatures(contfeats_ar0, perturbations=perturbation)
    test_getitem(Feat_imp)
    Feat_imp = ImplicitFeatures(catfeats_ar0, perturbations=perturbation)
    test_getitem(Feat_imp)
    Feat_imp = ImplicitFeatures(catfeats_ar0.ravel(),
                                perturbations=perturbation, names=[0])
    test_getitem(Feat_imp)
    Feat_imp = ImplicitFeatures(catfeats_ar1, perturbations=perturbation)
    test_getitem(Feat_imp)
    Feat_imp = ImplicitFeatures(contfeats_dict, perturbations=perturbation)
#    test_getitem(Feat_imp)
    Feat_imp = ImplicitFeatures(catfeats_dict, perturbations=perturbation)
#    test_getitem(Feat_imp)

    try:
        boolean = False
        Feat = ImplicitFeatures(contfeats_ar0, perturbations=None)
        Feat._map_perturb(-1)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    try:
        boolean = False
        Feat = ImplicitFeatures(contfeats_ar0, perturbations=perturbation)
        Feat._map_perturb(-1)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    try:
        boolean = False
        Feat = ImplicitFeatures(contfeats_ar0, perturbations=perturbation)
        Feat._map_perturb(1000)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")

    ###########################################################################
    ##########################
    #### Phantom Features testing
    ### Definition classes
    # Instantiation
    pos_fea_info = [(100, 25)]
    pos_perturbations = [None, perturbation]
    pos_names = [[]]
    pos_outfeats = [[]]
    pos_characterizer = [None, ]
#    pos_outformatter = [None]

    possibilities = [pos_fea_info, pos_perturbations, pos_names, pos_outfeats,
                     pos_characterizer]
    ## Combination of inputs testing
    for p in product(*possibilities):
#        print p
        fe = PhantomFeatures(features_info=p[0], perturbations=p[1],
                             names=p[2], out_features=p[3],
                             descriptormodel=p[4])
        test_getitem(fe)

    ###########################################################################
    #### Testing auxiliar parsing
    feats0 = np.random.randint(0, 10, 100)
    feats1 = feats0.reshape((100, 1))
    feats2 = np.random.random((100, 2, 3))
    desc = DummyDescriptor()
    pars_feats = {}

    # Testing combinations of possible inputs
    feats_info = feats0
    features_obj = _featuresobject_parsing_creation(feats_info)
    assert(isinstance(features_obj, BaseFeatures))
    feats_info = feats1
    features_obj = _featuresobject_parsing_creation(feats_info)
    assert(isinstance(features_obj, BaseFeatures))
    feats_info = feats2
    features_obj = _featuresobject_parsing_creation(feats_info)
    assert(isinstance(features_obj, BaseFeatures))
    feats_info = (feats0, pars_feats)
    features_obj = _featuresobject_parsing_creation(feats_info)
    assert(isinstance(features_obj, BaseFeatures))
    feats_info = (feats1, pars_feats)
    features_obj = _featuresobject_parsing_creation(feats_info)
    assert(isinstance(features_obj, BaseFeatures))
    feats_info = (feats2, pars_feats)
    features_obj = _featuresobject_parsing_creation(feats_info)
    assert(isinstance(features_obj, BaseFeatures))
    feats_info = (feats0, pars_feats, desc)
    features_obj = _featuresobject_parsing_creation(feats_info)
    assert(isinstance(features_obj, BaseFeatures))
    feats_info = (feats1, pars_feats, desc)
    features_obj = _featuresobject_parsing_creation(feats_info)
    assert(isinstance(features_obj, BaseFeatures))
    feats_info = (feats2, pars_feats, desc)
    features_obj = _featuresobject_parsing_creation(feats_info)
    assert(isinstance(features_obj, BaseFeatures))
    features_obj = _featuresobject_parsing_creation(features_obj)
    assert(isinstance(features_obj, BaseFeatures))