def test():
    ### Parameters or externals
    info_ret = {'order': 2}
    locs = np.random.random((1000, 2))
    inttypes = [int, np.int32, np.int64]
#    mainmapper1 = generate_random_relations(25, store='sparse')
#    mainmapper2 = generate_random_relations(100, store='sparse')
#    mainmapper3 = generate_random_relations(5000, store='sparse')
    griddisc1 = GridSpatialDisc((5, 5), xlim=(0, 1), ylim=(0, 1))
    griddisc2 = GridSpatialDisc((10, 10), xlim=(0, 1), ylim=(0, 1))
    griddisc3 = GridSpatialDisc((50, 100), xlim=(0, 1), ylim=(0, 1))

    ### Testing utities
    ## util_spatial_relations
    f = lambda x, y: x + y
    sp_elements = np.random.random(10)
    general_spatial_relation(sp_elements[0], sp_elements[0], f)
    general_spatial_relations(sp_elements, f, simmetry=False)
    general_spatial_relations(sp_elements, f, simmetry=True)

    ## format_out_relations
    mainmapper1 = randint_sparse_matrix(0.8, (25, 25))
    format_out_relations(mainmapper1, 'sparse')
    format_out_relations(mainmapper1, 'network')
    format_out_relations(mainmapper1, 'sp_relations')
    lista = format_out_relations(mainmapper1, 'list')
    u_regs = mainmapper1.data

    ## Element metrics
    element_i, element_j = 54, 2
    pars1 = {'periodic': 60}
    pars2 = {}
    unidimensional_periodic(element_i, element_j, pars=pars1)
    unidimensional_periodic(element_i, element_j, pars=pars2)
    unidimensional_periodic(element_j, element_i, pars=pars1)
    unidimensional_periodic(element_j, element_i, pars=pars2)
    measure_difference(element_i, element_j, pars=pars1)
    measure_difference(element_i, element_j, pars=pars2)
    measure_difference(element_j, element_i, pars=pars1)
    measure_difference(element_j, element_i, pars=pars2)

    def ensure_output(neighs, dists, mainmapper):
#        print dists
        assert(all([len(e.shape) == 2 for e in dists]))
        assert(all([len(e) == 0 for e in dists if np.prod(e.shape) == 0]))
        if mainmapper._out == 'indices':
#            print neighs
            correcness = []
            for nei in neighs:
                if len(nei):
                    correcness.append(all([type(e) in inttypes for e in nei]))
                else:
                    correcness.append(nei.dtype in inttypes)
            assert(correcness)

    ###########################################################################
    ### Massive combinatorial testing
    # Possible parameters
    relations, pars_rel, _data =\
        compute_ContiguityRegionDistances(griddisc1, store='sparse')
    pos_relations = [relations, relations.A,
                     format_out_relations(relations, 'network')]
    pos_distanceorweighs, pos_sym = [True, False], [True, False]
    pos_inputstypes, pos_outputtypes = [[None, 'indices', 'elements_id']]*2
    pos_input_type = [None, 'general', 'integer', 'array', 'array1', 'array2',
                      'list', 'list_int', 'list_array']
    pos_inputs = [[0], 0, 0, np.array([0]), np.array([0]), np.array([0]),
                  [0], [0], [np.array([0])]]
    pos_data_in, pos_data = [[]]*2
    possibles = [pos_relations, pos_distanceorweighs, pos_sym, pos_outputtypes,
                 pos_inputstypes, pos_input_type]
    # Combinations
    for p in product(*possibles):
        mainmapper1 = RegionDistances(relations=p[0], distanceorweighs=p[1],
                                      symmetric=p[2], output=p[3], input_=p[4],
                                      input_type=p[5])
        mainmapper1[slice(0, 1)]
        # Define input
        if p[5] is None:
            if p[4] != 'indices':
                neighs, dists = mainmapper1[mainmapper1.data[0]]
                ensure_output(neighs, dists, mainmapper1)
                neighs, dists = mainmapper1[0]
                ensure_output(neighs, dists, mainmapper1)
                neighs, dists = mainmapper1[np.array([-1])]
                ensure_output(neighs, dists, mainmapper1)
            if p[4] != 'elements_id':
                neighs, dists = mainmapper1[0]
                ensure_output(neighs, dists, mainmapper1)
                try:
                    boolean = False
                    mainmapper1[-1]
                    boolean = True
                    raise Exception("It has to halt here.")
                except:
                    if boolean:
                        raise Exception("It has to halt here.")
        else:
            if p[5] == 'list':
                # Get item
                neighs, dists = mainmapper1[[0]]
                ensure_output(neighs, dists, mainmapper1)
                neighs, dists = mainmapper1[[np.array([0])]]
                ensure_output(neighs, dists, mainmapper1)
                try:
                    boolean = False
                    mainmapper1[[None]]
                    boolean = True
                    raise Exception("It has to halt here.")
                except:
                    if boolean:
                        raise Exception("It has to halt here.")
            idxs = pos_inputs[pos_input_type.index(p[5])]
            neighs, dists = mainmapper1[idxs]
            ensure_output(neighs, dists, mainmapper1)
        # Functions
        mainmapper1.set_inout(p[5], p[4], p[3])
        mainmapper1.transform(lambda x: x)
        mainmapper1.data
        mainmapper1.data_input
        mainmapper1.data_output
        mainmapper1.shape
        ## Extreme cases

    ## Individual extreme cases
    ## Instantiation
    pars_rel = {'symmetric': False}
    relations = relations.A
    data_in = list(np.arange(len(relations)))
    wrond_data = np.random.random((100))
    mainmapper3 = RegionDistances(relations=relations, _data=wrond_data,
                                  data_in=data_in, **pars_rel)
    data_in = np.arange(len(relations)).reshape((len(relations), 1))
    mainmapper3 = RegionDistances(relations=relations, _data=wrond_data,
                                  data_in=data_in, **pars_rel)
    try:
        boolean = False
        wrond_data = np.random.random((100, 3, 4))
        mainmapper3 = RegionDistances(relations=relations, _data=wrond_data,
                                      data_in=data_in, **pars_rel)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    try:
        boolean = False
        wrond_data = np.random.random((100, 3, 4))
        sparse_rels = randint_sparse_matrix(0.8, (25, 25))
        mainmapper3 = RegionDistances(relations=relations, _data=wrond_data,
                                      data_in=data_in, **pars_rel)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    try:
        boolean = False
        wrond_data = np.random.random((100, 3, 4))
        mainmapper3 = RegionDistances(relations=relations, _data=wrond_data,
                                      data_in=data_in, **pars_rel)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    relations = np.random.random((20, 20))
    try:
        boolean = False
        wrond_data = np.random.random((100, 3, 4))
        mainmapper3 = RegionDistances(relations=relations, _data=wrond_data,
                                      data_in=data_in, **pars_rel)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")

    ## Other cases
    # Dummymap instantiation
    regs0 = np.unique(np.random.randint(0, 1000, 200))
    regs1 = regs0.reshape((len(regs0), 1))
    regs2 = regs0.reshape((len(regs0), 1, 1))
    pos_regs = [regs0, list(regs0), regs1]
    possibles = [pos_regs, pos_input_type]
    for p in product(*possibles):
        dummymapper = DummyRegDistance(p[0], p[1])
        # Get item
        idxs = pos_inputs[pos_input_type.index(p[1])]
        neighs, dists = dummymapper[idxs]
        ensure_output(neighs, dists, dummymapper)
        neighs, dists = dummymapper[slice(0, 1)]
        ensure_output(neighs, dists, dummymapper)
        ## Functions
        dummymapper.transform(lambda x: x)
        dummymapper.data
        dummymapper.data_input
        dummymapper.data_output
        dummymapper.shape
    # Halting cases
    try:
        boolean = False
        dummymapper = DummyRegDistance(regs2)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    try:
        boolean = False
        dummymapper = DummyRegDistance(None)
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    try:
        boolean = False
        dummymapper[None]
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")
    try:
        boolean = False
        dummymapper[-1]
        boolean = True
        raise Exception("It has to halt here.")
    except:
        if boolean:
            raise Exception("It has to halt here.")

    ###########################################################################
    ### Auxiliar parsing creation functions test
    ############################################
    # Standarts
    #    * relations object
    #    * (main_relations_info, pars_rel)
    #    * (main_relations_info, pars_rel, _data)
    #    * (main_relations_info, pars_rel, _data, data_in)
    #
    ## Main relations information
    relations = np.random.random((100, 20))
    _data = np.arange(20)
    _data_input = np.arange(100)

    relations_info = (relations, {})
    relations_object = _relations_parsing_creation(relations_info)
    assert(isinstance(relations_object, RegionDistances))

    relations_info = (relations, {}, _data)
    relations_object = _relations_parsing_creation(relations_info)
    assert(isinstance(relations_object, RegionDistances))

    relations_info = (relations, {}, _data, _data_input)
    relations_object = _relations_parsing_creation(relations_info)
    assert(isinstance(relations_object, RegionDistances))

    relations_object = _relations_parsing_creation(relations_object)
    assert(isinstance(relations_object, RegionDistances))

    relations_object = _relations_parsing_creation(relations)
    assert(isinstance(relations_object, RegionDistances))

    ###########################################################################
    ### Computers testing
    #####################
    ## aux_regionmetrics
    # Get regions activated
    elements = griddisc1.get_regions_id()
    get_regions4distances(griddisc1, elements=None, activated=None)
    get_regions4distances(griddisc1, elements, activated=elements)

    # Filter possible neighs
    only_possible = np.unique(np.random.randint(0, 100, 50))
    neighs = [np.unique(np.random.randint(0, 100, 6)) for i in range(4)]
    dists = [np.random.random(len(neighs[i])) for i in range(4)]
    filter_possible_neighs(only_possible, neighs, dists)
    filter_possible_neighs(only_possible, neighs, None)

    # TODO: Sync with other classes as sp_desc_models
    lista = [[0, 1, 2, 3], [0, 2, 3, 5], [1, 1, 1, 1]]
    u_regs = np.arange(25)
    regions_id = np.arange(25)
    elements_i = np.arange(25)
    element_labels = np.arange(25)
    discretizor = GridSpatialDisc((5, 5), xlim=(0, 1), ylim=(0, 1))

    locs = np.random.random((100, 2))
    retriever = KRetriever
    info_ret = np.ones(100)*4
    descriptormodel = DummyDescriptor()

    sp_descriptor = discretizor, locs, retriever, info_ret, descriptormodel

    sparse_from_listaregneighs(lista, u_regs, symmetric=True)
    sparse_from_listaregneighs(lista, u_regs, symmetric=False)
    ret_selfdists = KRetriever(locs, 4, ifdistance=True)
    compute_selfdistances(ret_selfdists, np.arange(100), typeoutput='network',
                          symmetric=True)
    compute_selfdistances(ret_selfdists, np.arange(100), typeoutput='sparse',
                          symmetric=True)
    compute_selfdistances(ret_selfdists, np.arange(100), typeoutput='matrix',
                          symmetric=True)

#    create_sp_descriptor_points_regs(sp_descriptor, regions_id, elements_i)
#    create_sp_descriptor_regionlocs(sp_descriptor, regions_id, elements_i)

    ## Compute Avg distance
    locs = np.random.random((100, 2))
    sp_descriptor = (griddisc1, locs), (KRetriever, {'info_ret': 5}), None
    relations, pars_rel, _data =\
        compute_AvgDistanceRegions(sp_descriptor, store='network')
    regdists = RegionDistances(relations=relations, _data=_data, **pars_rel)
    relations, pars_rel, _data =\
        compute_AvgDistanceRegions(sp_descriptor, store='matrix')
    regdists = RegionDistances(relations=relations, _data=_data, **pars_rel)
    relations, pars_rel, _data =\
        compute_AvgDistanceRegions(sp_descriptor, store='sparse')
    regdists = RegionDistances(relations=relations, _data=_data, **pars_rel)

    # Region spatial relations
    # For future (TODO)

    ### RegionDistances Computers
    griddisc1 = GridSpatialDisc((5, 5), xlim=(0, 1), ylim=(0, 1))
    ## Compute Contiguity
    relations, pars_rel, _data =\
        compute_ContiguityRegionDistances(griddisc1, store='matrix')
    relations, pars_rel, _data =\
        compute_ContiguityRegionDistances(griddisc1, store='network')
    relations, pars_rel, _data =\
        compute_ContiguityRegionDistances(griddisc1, store='sparse')
    mainmapper1 = RegionDistances(relations=relations, _data=None,
                                  **pars_rel)
    neighs, dists = mainmapper1.retrieve_neighs([0])
    assert(len(neighs) == len(dists))
    assert(len(neighs) == 1)
    neighs, dists = mainmapper1.retrieve_neighs([0, 1])
    assert(len(neighs) == len(dists))
    assert(len(neighs) == 2)

    ## Compute CenterLocs
    sp_descriptor = griddisc1, None, None
    ## TODO: pdist problem
    relations, pars_rel, _data =\
        compute_CenterLocsRegionDistances(sp_descriptor, store='network',
                                          elements=None, symmetric=True,
                                          activated=None)
    relations, pars_rel, _data =\
        compute_CenterLocsRegionDistances(sp_descriptor, store='matrix',
                                          elements=None, symmetric=True,
                                          activated=None)
    relations, pars_rel, _data =\
        compute_CenterLocsRegionDistances(sp_descriptor, store='sparse',
                                          elements=None, symmetric=True,
                                          activated=None)
    sp_descriptor = griddisc1, (KRetriever, {'info_ret': 2}), None

### TODO: Descriptormodel
    relations, pars_rel, _data =\
        compute_CenterLocsRegionDistances(sp_descriptor, store='sparse',
                                          elements=None, symmetric=True,
                                          activated=None)

    ## Retriever tuple
    ## Retriever object

    ## Spdesc

    ## Compute PointsNeighsIntersection

    ## Aux_regionmetrics
    #sparse_from_listaregneighs(lista, u_regs, symmetric)

    ###########################################################################
    ### Relative positioner testing
    ###############################
    n_el, n_dim = 5, 2
    elements_i = np.random.random((n_el, n_dim))
    elements_neighs = []
    for i in range(n_el):
        aux_neigh = np.random.random((np.random.randint(1, 4), n_dim))
        elements_neighs.append(aux_neigh)
    rel_pos = BaseRelativePositioner(metric_distances)
    rel_pos.compute(elements_i, elements_neighs)

    rel_pos = BaseRelativePositioner(diff_vectors)
    rel_pos.compute(elements_i, elements_neighs)
def test():
    n, nx, ny = 100, 100, 100
    m, rei = 3, 5
    locs = np.random.random((n, 2))*10
    ## Retrievers management
    ret0 = KRetriever(locs, 3, ifdistance=True)
    ret1 = CircRetriever(locs, .3, ifdistance=True)
    #countdesc = CountDescriptor()

    ## Other functions
    def map_indices(s, i):
        if s._pos_inputs is not None:
            return s._pos_inputs.start + s._pos_inputs.step*i
        else:
            return i

    def halting_f(signum, frame):
        raise Exception("Not error time.")

    ## Random exploration functions
    def random_pos_space_exploration(pos_possibles):
        selected, indices = [], []
        for i in range(len(pos_possibles)):
            sel, ind = random_pos_exploration(pos_possibles[i])
            selected.append(sel)
            indices.append(ind)
        return selected, indices

    def random_pos_exploration(possibles):
        ## Selection
        i_pos = np.random.randint(0, len(possibles))
        return possibles[i_pos], i_pos

    ## Impossibles
    def impossible_instantiation(selected, p, ret, feat):
        i_ret, sel, agg, pert = p
        p_ind, m_ind, n_desc, i_feat = selected
        checker = False

        ## Not implemented perturbation over explicit features
        if pert is not None:
            if type(feat) == np.ndarray:
                if len(feat.shape) == 3:
                    checker = True
            elif isinstance(feat, ExplicitFeatures):
                checker = True
            elif isinstance(feat, FeaturesManager):
                check_aux = []
                for i in range(len(feat.features)):
                    check_aux.append(isinstance(feat.features[i],
                                                ExplicitFeatures))
                checker = any(check_aux)
        return checker

    def compulsary_instantiation_errors(selected, p, ret, feat):
        i_ret, sel, agg, pert = p
        p_ind, m_ind, n_desc, i_feat = selected
        checker = False

        ## Cases
        if p_ind == []:
            checker = True

        ## Compulsary failing instantiation
        if not checker:
            return
        try:
            boolean = False
            SpatialDescriptorModel(retrievers=ret, featurers=feat,
                                   mapselector_spdescriptor=sel,
                                   pos_inputs=p_ind, map_indices=m_ind,
                                   perturbations=pert, aggregations=agg,
                                   name_desc=n_desc)
            boolean = True
        except:
            if boolean:
                raise Exception("It has to halt here.")
        return checker

    def test_methods(methods, input_):
        """Test proper methods output for selectors indications."""
#        print methods, input_
        assert(len(methods) == 3)
        assert(methods[0] in [True, False])

        if methods[1] is None:
            assert(methods[2] is None)
        elif type(input_) == int:
            assert(type(methods[1]) == tuple)
            assert(type(methods[2]) == tuple)
            assert(len(methods[1]) == 2)
            assert(len(methods[2]) == 3)
            assert(all([len(e) == 2 for e in methods[2]]))
            assert(all([type(e) == tuple for e in methods[2]]))
        else:
            assert(type(input_) == list)
            assert(type(methods[1]) == list)
            assert(type(methods[2]) == list)
            assert(len(methods[1]) == len(input_))
            assert(len(methods[2]) == len(input_))
            for i in range(len(methods[1])):
                assert(type(methods[1][i]) == tuple)
                assert(type(methods[2][i]) == tuple)
                assert(len(methods[1][i]) == 2)
                assert(len(methods[2][i]) == 3)
                assert(all([len(e) == 2 for e in methods[2][i]]))
                assert(all([type(e) == tuple for e in methods[2][i]]))

    ###########################################################################
    ###########################################################################
    ######## Testing aggregations preparation
    ## Testing all possible aggregation_in
    agg_f_ret = None
    desc_in, desc_out = AvgDescriptor(), AvgDescriptor()
    feats = ImplicitFeatures(np.random.random((100, 10)),
                             descriptormodel=AvgDescriptor())

    agg_in = agg_f_ret, desc_in, {}, {}, desc_out
    res = _parse_aggregation_feat(agg_in, feats)
    assert(type(res) == tuple)
    assert(len(res) == 5)
    agg_in = agg_f_ret, desc_in, {}, {}
    res = _parse_aggregation_feat(agg_in, feats)
    assert(type(res) == tuple)
    assert(len(res) == 5)
    agg_in = agg_f_ret, {}, {}
    res = _parse_aggregation_feat(agg_in, feats)
    assert(type(res) == tuple)
    assert(len(res) == 5)
    agg_in = agg_f_ret, desc_in, desc_out
    res = _parse_aggregation_feat(agg_in, feats)
    assert(type(res) == tuple)
    assert(len(res) == 5)
    agg_in = (agg_f_ret, )
    res = _parse_aggregation_feat(agg_in, feats)
    assert(type(res) == tuple)
    assert(len(res) == 5)

    # Creation standard aggregation_info
    disc = GridSpatialDisc((5, 5), xlim=(0, 1), ylim=(0, 1))
    locs = np.random.random((100, 2))
    regs = disc.discretize(locs)
    disc_info = locs, regs, disc

    retriever_in = (KRetriever, {'info_ret': 4})
    retriever_out = (KRetriever, {'info_ret': 4})
    aggregating = avgregionlocs_outretriever, (avgregionlocs_outretriever, )

    aggregation_info = disc_info, retriever_in, retriever_out, aggregating
    # Creation of aggregation objects
    aggretriever = create_aggretriever(aggregation_info)
    assert(isinstance(aggretriever, BaseRetriever))
    aggfeatures = create_aggfeatures(aggregation_info, feats)
    assert(isinstance(aggfeatures, BaseFeatures))

    ###########################################################################
    ###########################################################################
    ######## Testing instantiation spdesc
    ## TODO: bool_input_idx=False

    # Aggregation
    disc = GridSpatialDisc((5, 5), xlim=(0, 1), ylim=(0, 1))
    retriever_in = (KRetriever, {'info_ret': 4})
    retriever_out = (KRetriever, {'info_ret': 4})
    aggregating = avgregionlocs_outretriever, (avgregionlocs_outretriever, )
    aggregation_info = disc, retriever_in, retriever_out, aggregating

    # Locs and retrievers
    n_in, n_out = 50, 50  # TODO: Different sizes and easy manage
    locs_input = np.random.random((n_in, 2))
    locs1 = np.random.random((n_out, 2))
    locs2 = np.random.random((n_out, 2))

    # Features
    aggfeats = np.random.random((n_out, m, rei))
    featsarr0 = np.random.random((n_out, m))
    featsarr1 = np.random.random((n_out, m))
    featsarr2 = np.vstack([np.random.randint(0, 10, n_out)
                           for i in range(m)]).T

    def new_retrievers_creation():
        ret0 = KRetriever(locs1, autolocs=locs_input, info_ret=3,
                          bool_input_idx=True)
        ret1 = [ret0, CircRetriever(locs2, info_ret=0.1, autolocs=locs_input,
                                    bool_input_idx=True)]
        ret2 = RetrieverManager(ret0)
        pos_rets = [ret0, ret1, ret2]
        return pos_rets

    pos_rets = range(3)

    ## Possible feats
    def new_features_creation():
        feats0 = ExplicitFeatures(aggfeats)
        feats1 = ImplicitFeatures(featsarr0)
        feats2 = FeaturesManager(ExplicitFeatures(aggfeats))

        pos_feats = [feats0, feats1, aggfeats, featsarr0, feats2]
        return pos_feats

    pos_feats = range(5)

    # Selectors
    arrayselector0 = np.zeros((n_in, 8))
    arrayselector1 = np.zeros((n_in, 2)), np.zeros((n_in, 6))
    arrayselector2 = np.zeros((n_in, 2)), tuple([np.zeros((n_in, 2))]*3)
    functselector0 = lambda idx: ((0, 0), ((0, 0), (0, 0), (0, 0)))
    functselector1 = lambda idx: (0, 0), lambda idx: ((0, 0), (0, 0), (0, 0))
    tupleselector0 = (0, 0), (0, 0, 0, 0, 0, 0)
    tupleselector1 = (0, 0, 0, 0, 0, 0, 0, 0)
    tupleselector2 = (0, 0), ((0, 0), (0, 0), (0, 0))

    listselector = None
    selobj = Sp_DescriptorSelector(*arrayselector1)
    pos_selectors = [None, arrayselector0, arrayselector1, arrayselector2,
                     functselector0, functselector1,
                     tupleselector0, tupleselector1, tupleselector2,
                     Sp_DescriptorSelector(arrayselector0)]
    pos_agg = [None]

    ## Perturbations
    reindices0 = np.arange(n_out)
    reindices = np.vstack([reindices0]+[np.random.permutation(n_out)
                                        for i in range(rei-1)]).T
    perturbation = PermutationPerturbation(reindices)
    pos_pert = [None, perturbation]

    ## Random exploration
    pos_loop_ind = [None, 20, (0, n_in, 1), slice(0, n_in, 1), []]
    pos_loop_mapin = [None, map_indices]
    pos_name_desc = [None, '', 'random_desc']
    # Possible feats
    # Random exploration possibilities
    pos_random = [pos_loop_ind, pos_loop_mapin, pos_name_desc, pos_feats]

    possibilities = [pos_rets, pos_selectors, pos_agg, pos_pert]

    s = 0
    for p in product(*possibilities):
        i_ret, sel, agg, pert = p
        ## Random exploration of parameters
        selected, indices = random_pos_space_exploration(pos_random)
        p_ind, m_ind, n_desc, i_feat = selected
        ## Classes renewal
        rets_cand = new_retrievers_creation()
        feats_cand = new_features_creation()
        # Retrievers
        ret = rets_cand[i_ret]
        feat = feats_cand[i_feat]

#        print indices
#        print p, selected
        ## Impossible cases
        checker1 = impossible_instantiation(selected, p, ret, feat)
        checker2 = compulsary_instantiation_errors(selected, p, ret, feat)
        if checker1 or checker2:
            continue
        ## Testing instantiation
        spdesc = SpatialDescriptorModel(retrievers=ret, featurers=feat,
                                        mapselector_spdescriptor=sel,
                                        pos_inputs=p_ind, map_indices=m_ind,
                                        perturbations=pert, aggregations=agg,
                                        name_desc=n_desc)
#        print s
        #### Function testing
        ## Auxiliar functions
        spdesc.add_perturbations(pert)
        spdesc.set_loop(p_ind, m_ind)
        spdesc._map_indices(spdesc, 0)
        for i in spdesc.iter_indices():
            methods = spdesc._get_methods(i)
            test_methods(methods, i)

        methods = spdesc._get_methods(0)
        test_methods(methods, 0)
        methods = spdesc._get_methods(10)
        test_methods(methods, 10)
        methods = spdesc._get_methods([0])
        test_methods(methods, [0])
        methods = spdesc._get_methods([0, 1, 2])
        test_methods(methods, [0, 1, 2])

        desc = spdesc._compute_descriptors(10)
        desc = spdesc._compute_descriptors([10])
        desc = spdesc._compute_descriptors([0, 1, 2])

        desc = spdesc.compute(10)
        desc = spdesc.compute([10])
        desc = spdesc.compute([0, 1, 2])

        #Retrieverdriven
        aux_i = 0
        for desc_i, vals_i in spdesc.compute_nets_i():
            assert(len(desc_i) == len(vals_i))
            assert(len(desc_i) == spdesc.featurers.k_perturb+1)
            aux_i += 1
            if aux_i == 100:
                break
        aux_i = 0
        for desc_ik, vals_ik in spdesc.compute_net_ik():
            aux_i += 1
            if aux_i == 100:
                break

        ## Loops
#        for idx in spdesc.iter_indices():
#            break
#        for vals_ik, desc_ik in spdesc.compute_net_ik():
#            #assert(vals_ik)
#            #assert(desc_ik)
#            break
#        for desc_i, vals_i in spdesc.compute_net_i():
#            #assert(vals_ik)
#            #assert(desc_ik)
#            break

        ## Global computations
#        try:
#            signal.signal(signal.SIGALRM, halting_f)
#            signal.alarm(0.01)   # 0.01 seconds
#            spdesc.compute()
#        except Exception as e:
#            logi = e == "Not error time."
#            if not logi:
#                spdesc.compute()
#        try:
#            signal.signal(signal.SIGALRM, halting_f)
#            signal.alarm(0.01)   # 0.01 seconds
#            spdesc._compute_nets()
#        except Exception as e:
#            logi = e == "Not error time."
#            if not logi:
#                spdesc._compute_nets()
#        try:
#            ## Testing compute_retdriven
#            signal.signal(signal.SIGALRM, halting_f)
#            signal.alarm(0.01)   # 0.01 seconds
#            spdesc._compute_retdriven()
#        except Exception as e:
#            logi = e == "Not error time."
#            if not logi:
#                spdesc._compute_retdriven()
#        try:
#            logfile = Logger('logfile.log')
#            signal.signal(signal.SIGALRM, halting_f)
#            signal.alarm(0.01)   # 0.01 seconds
#            spdesc.compute_process(logfile, lim_rows=100000, n_procs=0)
#            os.remove('logfile.log')
#        except Exception as e:
#            os.remove('logfile.log')
#            logi = e == "Not error time."
#            if not logi:
#                spdesc.compute_process(logfile, lim_rows=100000, n_procs=0)

        ## Testing aggregations
        if len(spdesc.retrievers) == len(spdesc.featurers):
            spdesc.add_aggregations(aggregation_info)
        else:
            spdesc.add_aggregations(aggregation_info, ([0], [0]))
        s += 1

    feats1 = ImplicitFeatures(featsarr0)

    m_vals_i = np.random.randint(0, 5, 50)
    ret = CircRetriever(locs1, autolocs=locs_input, info_ret=3,
                        bool_input_idx=True)
    feat = FeaturesManager(feats1, maps_vals_i=m_vals_i, mode='sequential',
                           descriptormodels=None)
    spdesc = SpatialDescriptorModel(retrievers=ret, featurers=feat,
                                    mapselector_spdescriptor=None,
                                    perturbations=perturbation,
                                    aggregations=None, name_desc=n_desc)
    ## Complete processes
    spdesc.compute()
    logfile = Logger('logfile.log')
    spdesc.compute_process(logfile, lim_rows=100000, n_procs=0)
    os.remove('logfile.log')
    spdesc._compute_nets()
    spdesc._compute_retdriven()
    ## Model functions
    spdesc.fit(np.arange(20), np.random.random(20))
    spdesc.predict(np.arange(20))

    ############
    ### Auxiliar functions
    ####
    spdesc = _spdesc_parsing_creation(ret, feat)
    assert(isinstance(spdesc, SpatialDescriptorModel))
    res = create_aggfeatures(spdesc, None)
    assert(isinstance(res, ExplicitFeatures))

    ###########################################################################
    ###########################################################################
    spdesc_temp = SpatioTemporalDescriptorModel(spdesc)
    indices = np.arange(10)
    y = np.random.random(10)
    spdesc_temp = spdesc_temp.fit(indices, y)
    spdesc_temp.predict(indices)
    spdesc_temp = SpatioTemporalDescriptorModel([spdesc, spdesc])
    indices = np.arange(20)
    y = np.random.random(20)
    spdesc_temp = spdesc_temp.fit(indices, y)
    spdesc_temp.predict(indices)