Пример #1
0
    def start(self, x, y, **kwargs):
        '''Start the scan. Note that you can use `Scan()` arguments as you
        would otherwise directly interacting with `Scan()`.

        `x` | array or list of arrays | prediction features
        `y` | array or list of arrays | prediction outcome variable
        `kwargs` | arguments | any `Scan()` argument can be passed here

        '''

        import talos

        m = talos.autom8.AutoModel(self.task, self.experiment_name).model

        try:
            kwargs['params']
            scan_object = talos.Scan(x,
                                     y,
                                     model=m,
                                     experiment_name=self.experiment_name,
                                     **kwargs)
        except KeyError:
            p = talos.autom8.AutoParams(task=self.task)

            if self.max_param_values is not None:
                p.resample_params(self.max_param_values)
            params = p.params
            scan_object = talos.Scan(x=x,
                                     y=y,
                                     params=params,
                                     model=m,
                                     experiment_name=self.experiment_name,
                                     **kwargs)

        return scan_object
Пример #2
0
def minimal():
    x, y = ta.templates.datasets.iris()

    p = {
        'activation': ['relu', 'elu'],
        'optimizer': ['Nadam', 'Adam'],
        'losses': ['logcosh'],
        'hidden_layers': [0, 1, 2],
        'batch_size': [20, 30, 40],
        'epochs': [10, 20]
    }

    def iris_model(x_train, y_train, x_val, y_val, params):

        n_timesteps, n_features, n_outputs = trainX.shape[1], trainX.shape[
            2], trainy.shape[1]

        model = Sequential()
        model.add(Dense(32, input_dim=4, activation=params['activation']))
        model.add(Dense(3, activation='softmax'))
        model.compile(optimizer=params['optimizer'], loss=params['losses'])

        out = model.fit(x_train,
                        y_train,
                        verbose=0,
                        batch_size=params['batch_size'],
                        epochs=params['epochs'],
                        validation_data=[x_val, y_val])

        return out, model

    scan_object = ta.Scan(x, y, model=iris_model, params=p)

    return scan_object
def NeuralNetScan(DataSet, Y):
    seed = 7
    test_size = 0.33
    X_train, X_test, y_train, y_test = train_test_split(DataSet,
                                                        Y,
                                                        test_size=test_size,
                                                        random_state=seed)
    sc = StandardScaler()
    sc.fit(X_train)
    X_train = sc.transform(X_train)
    X_test = sc.transform(X_test)
    p = {
        'lr': (0.5, 5, 10),
        'first_neuron': [4, 8, 16, 32, 64],
        'hidden_layers': [0, 1, 2],
        'epochs': [5],
        'weight_regulizer': [None],
        'emb_output_dims': [None],
        'shape': ['brick'],
        'optimizer': ['Adam'],
        'losses': ['binary_crossentropy'],
        'activation': ['relu', 'tanh'],
        'last_activation': ['sigmoid']
    }
    t = ta.Scan(x=X_train,
                y=y_train,
                x_val=X_test,
                y_val=y_test,
                model=NeuralNetwork,
                params=p,
                experiment_name='Particle_DataSet')
    BestParameters = t.data.sort_values(by='val_acc', ascending=False).iloc[0]
    return t, BestParameters
Пример #4
0
def do_nn2_talos(x_train, x_test, y_train, y_test):
    p = {
        'learning_rate': [0.2, 0.15, .1, 0.05,.05],
        'n_nodes': [90, 100, 110],
        'batch_size': [80, 90, 100, 110, 120]
    }

    def create_nn2_hyperas(x_train, x_test, y_train, y_test, params):
        seq = Sequential()
        seq.add(Dense(output_dim=params['n_nodes'], init="uniform", activation="relu", input_dim=11))
        seq.add(Dense(output_dim=1, init="uniform", activation="sigmoid"))
        seq.compile(optimizer=Adam(lr=params['learning_rate']), loss="binary_crossentropy", metrics=['accuracy'])  # vs categorical_crossentropy

        epochs = 200
        history = seq.fit(x_train, y_train,
                          verbose=False,
                          batch_size=params["batch_size"],
                          epochs=epochs,
                          validation_data=[x_test, y_test],
                          callbacks=[early_stopper(epochs, mode=[0,50])]
                          )

        return history, seq

    result = talos.Scan(x_train.values, y_train.values, x_val=x_test.values, y_val=y_test.values, model=create_nn2_hyperas, params=p)
    return result
Пример #5
0
def run_custom_model(my_training_batch_generator, my_val_batch_generator,
                     input_shape):
    params = set_params(input_shape)
    verbose = True
    round_limit = 2  # NOTE Set this to however many rounds you want to test with

    model = custom_model.create_model(my_training_batch_generator,
                                      my_val_batch_generator, verbose)

    dummyX, dummyY = my_training_batch_generator.__getitem__(0)
    testX, testY = my_val_batch_generator.__getitem__(0)
    my_val_batch_generator.on_epoch_end()
    tt = talos.Scan(x=dummyX,
                    y=dummyY,
                    params=params,
                    model=model,
                    x_val=testX,
                    y_val=testY,
                    experiment_name='example.csv',
                    print_params=True,
                    round_limit=round_limit)

    # print(vars(tt), dir(tt))
    # print(tt)
    t = project_object(tt, 'params', 'saved_models', 'saved_weights', 'data',
                       'details', 'round_history')
    save_object(t, 'example.pickle')
Пример #6
0
def Optimization():
    scan_object = ta.Scan(x=x_train,
                          y=y_train,
                          params=parameters,
                          model=pet_finder_model,
                          val_split=0,
                          experiment_name='pet_finder')
    # Evaluate
    analyze_object = ta.Analyze(scan_object)
    scan_data = analyze_object.data

    # heatmap correlation
    analyze_object.plot_corr('val_accuracy', ['accuracy', 'loss', 'val_loss'])

    # a four dimensional bar grid
    ast.bargrid(scan_data,
                x='lr',
                y='val_accuracy',
                hue='num_Nodes',
                row='loss_function',
                col='dropout')
    list_of_parameters = analyze_object.table('val_loss',
                                              ['accuracy', 'loss', 'val_loss'],
                                              'val_accuracy')
    return list_of_parameters
    def _train_auto_model(self):
        # Param space for Talos model to search through
        # Tuples values are a range, Lists are choice options
        p = {
            'lr': (0.5, 5, 10),
            'first_neuron': [24, 48, 96],
            'e_size':[128, 300],
            'h_size':[32, 64],
            'hidden_layers':[2,3],
            'activation':[relu],
            'batch_size': [64],
            'epochs': [2, 5],
            'dropout': (0, 0.20, 10),
            'optimizer': [Adam],
            'seq_len': [self.seq_len],
            'vocab_size': [self.vocab_size],
            'last_activation': [sigmoid]
        }

        # Talos scan that will find the best model with the parameters above
        h = ta.Scan(self.X_train, self.y_train,
            params=p,
            model=self.model,
            grid_downsample=self.grid_downsample_amount,
            val_split=0, # Zerofy val_split in Talos, because we pass in our own val data
            x_val=self.X_val,
            y_val=self.y_val
        )

        # Get only the model from the Talos experiment
        best_model_index = best_model(h, 'val_acc', False)
        self.model = activate_model(h, best_model_index)
Пример #8
0
def minimal():
    x, y = talos.templates.datasets.iris()

    p = {
        'activation': ['relu', 'elu'],
        'optimizer': ['Nadam', 'Adam'],
        'losses': ['binary_crossentropy'],
        'batch_size': [20, 30, 40],
        'epochs': [10, 20]
    }

    def iris_model(x_train, y_train, x_val, y_val, params):
        model = Sequential()
        model.add(Dense(32, input_dim=4, activation=params['activation']))
        model.add(Dense(3, activation='softmax'))
        model.compile(optimizer=params['optimizer'], loss=params['losses'])

        out = model.fit(x_train,
                        y_train,
                        batch_size=params['batch_size'],
                        epochs=params['epochs'],
                        validation_data=[x_val, y_val],
                        verbose=0)

        return out, model

    scan_object = talos.Scan(x,
                             y,
                             model=iris_model,
                             params=p,
                             experiment_name='iris',
                             fraction_limit=0.1)

    return scan_object
Пример #9
0
def test_latest():

    print('\n >>> start Latest Features... \n')

    import talos
    from tensorflow.keras.models import Sequential
    from tensorflow.keras.layers import Dense

    x, y = talos.templates.datasets.iris()

    p = {
        'activation': ['relu', 'elu'],
        'optimizer': ['Nadam', 'Adam'],
        'losses': ['logcosh'],
        'shapes': ['brick'],
        'first_neuron': [16, 32, 64, 128],
        'hidden_layers': [0, 1, 2, 3],
        'dropout': [.2, .3, .4],
        'batch_size': [20, 30, 40, 50],
        'epochs': [10]
    }

    def iris_model(x_train, y_train, x_val, y_val, params):

        model = Sequential()
        model.add(
            Dense(params['first_neuron'],
                  input_dim=4,
                  activation=params['activation']))

        talos.utils.hidden_layers(model, params, 3)

        model.add(Dense(3, activation='softmax'))
        model.compile(optimizer=params['optimizer'],
                      loss=params['losses'],
                      metrics=['acc'])

        out = model.fit(x_train,
                        y_train,
                        callbacks=[
                            talos.utils.ExperimentLogCallback(
                                'testing_latest', params)
                        ],
                        batch_size=params['batch_size'],
                        epochs=params['epochs'],
                        validation_data=[x_val, y_val],
                        verbose=0)

        return out, model

    scan_object = talos.Scan(x,
                             y,
                             model=iris_model,
                             params=p,
                             experiment_name='testing_latest',
                             round_limit=5,
                             reduction_method='gamify',
                             save_weights=False)

    print('finised Latest Features \n')
Пример #10
0
    def optimize(self):
        x, y = self.ImageData.images, self.ImageData.labels

        def model(x_train, y_train, x_val, y_val, params):
            rcnn = RCNN(ImageData=self.ImageData,
                        loss=self.params['loss'],
                        opt=self.params['opt'],
                        lr=lr_normalizer(self.params['lr'],
                                         self.params['opt']),
                        seed=self.seed,
                        verbose=0)

            return rcnn.train(epochs=self.params['epochs'],
                              batch_size=self.params['batch_size'],
                              split_size=self.params['split_size'],
                              checkpoint_path=None,
                              early_stopping=False,
                              verbose=0)

        self.scan = talos.Scan(x,
                               y,
                               params=self.params,
                               model=model,
                               experiment_name='rcnn',
                               fraction_limit=.001)
        return self.scan
Пример #11
0
def test_predict():

    print("\n >>> start Predict()...")

    import sys
    sys.path.insert(0, '/Users/mikko/Documents/GitHub/talos')
    import talos

    x, y = talos.templates.datasets.iris()
    p = talos.templates.params.iris()
    model = talos.templates.models.iris

    x = x[:50]
    y = y[:50]

    scan_object = talos.Scan(x=x,
                             y=y,
                             params=p,
                             model=model,
                             experiment_name='test_iris',
                             round_limit=2)

    predict = talos.Predict(scan_object)

    _preds = predict.predict(x, 'val_acc', False)
    _preds = predict.predict_classes(x, 'val_acc', False)

    print('finised Predict() \n')
Пример #12
0
    def run(self):
        dataset = keras.datasets.mnist
        (train_images, train_labels), (test_images,
                                       test_labels) = dataset.load_data()

        # standardize the pixel values from 0-255 to 0-1
        train_images = util.reshapeSet(train_images)
        test_images = util.reshapeSet(test_images)

        train_labels = tfku.to_categorical(train_labels, 10)
        test_labels = tfku.to_categorical(test_labels, 10)

        train_labels = np.asarray(train_labels)
        test_labels = np.asarray(test_labels)

        if (self.verbose):
            print("Train set size: %s" % str(train_images.shape))
            print("Train set labels size: %s" % str(train_labels.shape))
            print("Test set size: %s" % str(test_images.shape))
            print("Test set labels size: %s" % str(test_labels.shape))
            print("")
            print("First train object <%s>" % train_labels[0])
            print("First test object <%s>" % test_labels[0])
            print("")

        return talos.Scan(train_images,
                          train_labels,
                          model=mnist_model,
                          params=self.params,
                          x_val=test_images,
                          y_val=test_labels,
                          experiment_name=self.name)
Пример #13
0
def talos_version():

    def mnist_model(x_train, y_train, x_val, y_val, params):

        model = Sequential()
        model.add(Conv2D(32, kernel_size=(3, 3), activation=params['activation'], input_shape=(28, 28, 1)))
        model.add(Flatten())
        model.add(Dense(128, activation=params['activation']))
        model.add(Dropout(params['dropout']))
        model.add(Dense(10, activation='softmax'))

        model.compile(optimizer=params['optimizer'],
                      loss=params['losses'],
                      metrics=['acc', talos.utils.metrics.F1Score()])

        out = model.fit_generator(SequenceGenerator(x_train,
                                                    y_train,
                                                    batch_size=params['batch_size']),
                                                    epochs=params['epochs'],
                                                    validation_data=[x_val, y_val],
                                                    callbacks=[],
                                                    workers=4,
                                                    verbose=0)

        return out, model

    scan_object = talos.Scan(x=x_train,
                             y=y_train,
                             x_val=x_val,
                             y_val=y_val,
                             params=p,
                             model=mnist_model,
                             experiment_name='mnist',
			                 save_weights=False)
Пример #14
0
def get_best_model(x_train, y_train, **kwargs):
    np.random.seed(7)
    y_pred = kwargs['primal_data']['y_pred']
    params = kwargs['params']

    params['model_name'] = [kwargs['primal_data']['model_name']]
    kwargs.setdefault('dataset_name', 'talos_readings')
    kwargs.setdefault('experiment_no', '1')
    dataset_name = kwargs['dataset_name']
    experiment_no = kwargs['experiment_no']
    val_metric = kwargs['val_metric']
    metric = kwargs['metric']

    for name, value in params.items():
        if type(value) != list:
            params[name] = [value]

    h = ta.Scan(x_train,
                y_pred,
                params=params,
                dataset_name=dataset_name,
                experiment_no=experiment_no,
                model=talos_model,
                grid_downsample=0.5)

    report = h.data
    best_model = report.sort_values(val_metric, ascending=True).iloc[0]
    best_model_id = best_model.name - 1
    dnn_model = activate_model(h, best_model_id)
    loss = best_model.losses
    epochs = int(best_model.epochs)
    batch_size = int(best_model.batch_size)
    optimizer = best_model.optimizer
    dnn_model.compile(optimizer=optimizer, loss=loss, metrics=[metric])
    return dnn_model, epochs, batch_size
Пример #15
0
def hyperparameter_exploration(data, name, num):
    """Hyperparameter exploration with TALOS.

    This function explores different hyperparameter combinations with the framework TALOS.

    Args:
        data -- a dictionary with the training, testing and validation data
        name -- name of the experiment
        num -- number of the experiment
    """
    logger = logging.getLogger('RNN-SA.main.hyperparameter_exploration')
    logger.info("Doing hyperparameter exploration...")
    start_time = time.time()

    talos.Scan(
        x=data['train_X'],  # prediction features
        y=data['train_y'],  # prediction outcome variable
        params=params.hparams_talos,  # the parameter dictionary
        model=ml_models.LSTM_model,  # the Keras model as a function
        dataset_name=name,  # used for experiment log
        experiment_no=num,  # used for experiment log
        x_val=data['val_X'],  # validation data for x
        y_val=data['val_y'],  # validation data for y
        # grid_downsample=0.1,  # a float to indicate fraction for random sampling
        print_params=True,  # print each permutation hyperparameters
    )

    end_time = time.time()
    logger.info("Finished hyperparameter exploration!")
    logger.info("Time elapsed: %f s \n", end_time - start_time)
Пример #16
0
    def explore(self, params):
        '''
        Explores hyperparameter space and outputs diagnostic plots to file
        :param params: A dictionary of hyperparameters to explore
        :return: The talos scan history
        '''

        dir = os.path.join(os.path.dirname(script_dir), hyprOutputDir)
        if not os.path.isdir(dir):
            os.makedirs(dir)

        dir = os.path.join(dir, self.name)
        if not os.path.isdir(dir):
            os.makedirs(dir)

        name = os.path.join(dir, self.name)
        x = self.reshape(self.data.getFullX())
        y = self.data.getFullY()
        h = ta.Scan(x,
                    y,
                    params=params,
                    model=exploration_model,
                    dataset_name=name,
                    experiment_no='1',
                    grid_downsample=.1)

        return h
Пример #17
0
def run_search_cnn(model=search_cnn):
    x_train, y_train, x_test, y_test = load_data()
    x_train = np.reshape(x_train, (3823, 64, 1))
    x_test = np.reshape(x_test, (1797, 64, 1))
    p = {
        'lr': [0.5, 5],
        'kernel': [3, 5],
        'batch_size': [32, 16],
        'epochs': [100],
        'dropout': [0, 0.5],
        'activation': ['relu', 'tanh'],
        'optimizer': [Adam, Adagrad, RMSprop],
        'losses': ['mse', 'categorical_crossentropy'],
        'last_activation': ['softmax']
    }
    # p = {'lr': [0.5, 5],
    #  'kernel':[3, 5],
    #  'batch_size': [32, 16],
    #  'epochs': [100],
    #  'dropout': [0, 0.5],
    #  'weight_regulizer':[None],
    #  'emb_output_dims': [None],
    #  'shape':['brick','long_funnel'],
    #  'optimizer': [Adam, Adagrad, RMSprop],
    #  'losses': ['mse', 'categorical_crossentropy'],
    #  'activation':['relu'],
    #  'last_activation': ['softmax']}
    t = ta.Scan(x=x_train,
                y=y_train,
                model=model,
                experiment_name="cnn",
                params=p)
    return t
Пример #18
0
def test_reducers():

    print("\n >>> start reducers...")

    import talos

    x, y = talos.templates.datasets.iris()
    p = talos.templates.params.iris()
    model = talos.templates.models.iris

    x = x[:50]
    y = y[:50]

    for strategy in [
            'trees', 'forrest', 'correlation', 'gamify', 'local_strategy'
    ]:

        talos.Scan(x=x,
                   y=y,
                   params=p,
                   model=model,
                   experiment_name='test_iris',
                   round_limit=2,
                   reduction_method=strategy,
                   reduction_interval=1)

    print('finised reducers \n')
Пример #19
0
def minimal():
    x, y = ta.datasets.iris()

    p = {
        'activation': ['relu', 'elu'],
        'optimizer': ['Nadam', 'Adam'],
        'losses': ['logcosh'],
        'hidden_layers': [0, 1, 2],
        'batch_size': [20, 30, 40],
        'epochs': [10, 20]
    }

    def iris_model(x_train, y_train, x_val, y_val, params):
        model = Sequential()
        model.add(Dense(32, input_dim=8, activation=params['activation']))
        model.add(Dense(1, activation='softmax'))
        model.compile(optimizer=params['optimizer'], loss=params['losses'])

        out = model.fit(x_train,
                        y_train,
                        batch_size=params['batch_size'],
                        epochs=params['epochs'],
                        validation_data=[x_val, y_val])

        return out, model

    scan_object = ta.Scan(x,
                          y,
                          model=iris_model,
                          params=p,
                          grid_downsample=0.1)

    return scan_object
Пример #20
0
def test_templates():

    print("\n >>> start templates ...")

    import talos

    x, y = talos.templates.datasets.titanic()
    x = x[:50]
    y = y[:50]
    model = talos.templates.models.titanic
    p = talos.templates.params.titanic()
    talos.Scan(x, y, p, model, 'test', round_limit=2)

    x, y = talos.templates.datasets.iris()
    x = x[:50]
    y = y[:50]
    model = talos.templates.models.iris
    p = talos.templates.params.iris()
    talos.Scan(x, y, p, model, 'test', round_limit=2)

    x, y = talos.templates.datasets.cervical_cancer()
    x = x[:50]
    y = y[:50]
    model = talos.templates.models.cervical_cancer
    p = talos.templates.params.cervical_cancer()
    talos.Scan(x, y, p, model, 'test', round_limit=2)

    x, y = talos.templates.datasets.breast_cancer()
    x = x[:50]
    y = y[:50]
    model = talos.templates.models.breast_cancer
    p = talos.templates.params.breast_cancer()
    talos.Scan(x, y, p, model, 'test', round_limit=2)

    x, y = talos.templates.datasets.icu_mortality(50)
    x, y = talos.templates.datasets.telco_churn(.3)
    x, y, x1, y1 = talos.templates.datasets.mnist()
    x, y = talos.templates.datasets.breast_cancer()
    x, y = talos.templates.datasets.cervical_cancer()
    x, y = talos.templates.datasets.titanic()

    talos.templates.pipelines.breast_cancer(random_method='quantum')
    talos.templates.pipelines.cervical_cancer(random_method='sobol')
    talos.templates.pipelines.iris(random_method='uniform_crypto')
    talos.templates.pipelines.titanic(random_method='korobov_matrix')

    print("finish templates \n")
Пример #21
0
def run_optimize(x, y, mode='auto', params=None, grid_downsample=1, epochs=10):

    if mode == 'auto':
        params = default_params()
        params['epochs'] = [epochs]
        scan_object = ta.Scan(x,
                              y,
                              model=simple_neural_net,
                              params=params,
                              grid_downsample=grid_downsample)
    else:
        scan_object = ta.Scan(x, y, model=simple_neural_net, params=params)

    cols = ['val_acc', 'val_loss', 'acc', 'loss']
    scan_object.data[cols] = scan_object.data[cols].astype(float).round(3)

    return scan_object.data
 def search(self, params):
     t = ta.Scan(
         x=np.zeros((16, 5, 20)),  # Add dummy X or talos crashes
         y=np.zeros((16, 4)),  # Same for y
         model=self.trainTalos,
         params=params,
         experiment_no="1",
     )
     return t
Пример #23
0
def iris_pipeline(round_limit=5, random_method='uniform_mersenne'):
    '''Performs a Scan with Iris dataset and simple dense net'''

    scan_object = ta.Scan(iris()[0],
                          iris()[1],
                          iris_params(),
                          iris_model,
                          round_limit=round_limit)

    return scan_object
Пример #24
0
def titanic_pipeline(round_limit=2, random_method='uniform_mersenne'):
    '''Performs a Scan with Iris dataset and simple dense net'''

    scan_object = ta.Scan(titanic()[0][:50],
                          titanic()[1][:50],
                          titanic_params(),
                          titanic_model,
                          round_limit=round_limit)

    return scan_object
Пример #25
0
 def grid_search(self, trainX, trainy, testX, testy, params):
     t = ta.Scan(x=trainX,
                 y=trainy,
                 x_val=testX,
                 y_val=testy,
                 model=self.evaluate_params(),
                 grid_downsample=0.01,
                 params=params,
                 dataset_name='svmONOFF',
                 experiment_no='1')
Пример #26
0
def titanic(round_limit=2, random_method='uniform_mersenne'):
    '''Performs a Scan with Iris dataset and simple dense net'''
    import talos as ta
    scan_object = ta.Scan(ta.templates.datasets.titanic()[0][:50],
                          ta.templates.datasets.titanic()[1][:50],
                          ta.templates.params.titanic(),
                          ta.templates.models.titanic,
                          round_limit=round_limit)

    return scan_object
Пример #27
0
def cervical_cancer(round_limit=2, random_method='uniform_mersenne'):
    '''Performs a Scan with Iris dataset and simple dense net'''
    import talos as ta
    scan_object = ta.Scan(ta.templates.datasets.cervical_cancer()[0],
                          ta.templates.datasets.cervical_cancer()[1],
                          ta.templates.params.cervical_cancer(),
                          ta.templates.models.cervical_cancer,
                          'test',
                          round_limit=round_limit)

    return scan_object
Пример #28
0
def _do_scan(data, params, name):
    x_train, x_test, y_train, y_test = data
    print(params)
    talos.Scan(model=mnist_model,
               x=x_train,
               y=y_train,
               x_val=x_test,
               y_val=y_test,
               params=params,
               dataset_name=name,
               print_params=True,
               clear_tf_session=False)
Пример #29
0
def run_experiment(time_sec, params, outfile):
    X, y = preprocessing('grade_model_training/data/deep_learn_data.csv')

    #Set scan paramters
    scan_object = ta.Scan(X,
                          y,
                          model=import_model,
                          params=params,
                          dataset_name="grade_model_training/output/results",
                          experiment_no=outfile,
                          grid_downsample=.5)

    return scan_object
Пример #30
0
 def run_talosScan(self, fold, last_epoch_value=True, experiment_no="1"):
     # and run the experiment
     self.boolean = True
     t = ta.Scan(x=self.x_train[fold],
                 y=self.y_train[fold],
                 model=self.model,
                 params=self.params,
                 x_val=self.x_val[fold],
                 y_val=self.y_val[fold],
                 last_epoch_value=last_epoch_value,
                 experiment_no=experiment_no,
                 search_method='linear')
     return t