コード例 #1
0
ファイル: generator.py プロジェクト: Smily1984/PSC-RTE
def problem_generator_with_steady_modification_of_unique_constraint(
        problems, N, dev, non_fixed_var):
    assert (len(non_fixed_var) == 1)
    prob_root = lin_opt_pbs(problems, non_fixed_var)
    rhs = prob_root.prob_list[0].linear_constraints.get_rhs()
    prob_root.set_deviation(dev)
    prob_root.set_non_fixed_vars(non_fixed_var)

    prob_temp = lin_opt_pbs([cplex.Cplex()])
    prob_temp.prob_list[0].read(prob_root.name_list[0])
    prob_temp.set_deviation(dev)
    prob_temp.set_non_fixed_vars(non_fixed_var)

    rhs_list = []
    sol_list = []

    j = non_fixed_var[0]

    for i in range(N):
        new_value = rhs[j] * (1 - dev) + (i + 1) * 2 * rhs[j] * dev / N
        prob_temp.prob_list[0].linear_constraints.set_rhs([(j, new_value)])
        rhs_list.append(new_value)
        sol_list.extend(prob_temp.calculate_solutions())

    rhs_list = np.array(rhs_list).reshape(-1, 1)
    data = dataset(
        rhs_list, sol_list
    )  # write either dataset or dataset.dataset to create a new instance
    return data
コード例 #2
0
ファイル: generator.py プロジェクト: Smily1984/PSC-RTE
def problem_generator_y(problems, N, dev, non_fixed_vars=None, path=None):
    prob_root = lin_opt_pbs(problems, non_fixed_vars, path)
    prob_root.set_deviation(dev)
    K = len(prob_root.prob_list)
    first = True
    while True:
        rhs_list = []
        sol_list = []
        for i in range(N):
            if i > 0 and i % 100 == 0:
                print(i)
            ind = np.random.randint(K)
            if first:
                prob_temp = lin_opt_pbs([cplex.Cplex()])
                prob_temp.prob_list[0].read(prob_root.name_list[ind])
                prob_temp.set_deviation(dev)
                prob_temp.set_non_fixed_vars(prob_root.get_non_fixed_vars())
                prob_temp.prob_list[0].set_log_stream(None)
                prob_temp.prob_list[0].set_error_stream(None)
                prob_temp.prob_list[0].set_warning_stream(None)
                prob_temp.prob_list[0].set_results_stream(None)
                first = False
            prob_root.modify_random_prob(ind, prob_temp)
            rhs_list.extend(prob_temp.extract_RHS())
            sol_list.extend(prob_temp.calculate_solutions())
        data = dataset(
            rhs_list, sol_list
        )  # write either dataset or dataset.dataset to create a new instance
        yield (data.get_RHS(), data.get_solutions())
コード例 #3
0
def problem_generator(problems, N, dev, non_fixed_vars=None, path=None):
    prob_root = lin_opt_pbs(problems, non_fixed_vars, path=path)
    prob_root.set_deviation(dev)
    K = len(prob_root.prob_list)

    prob_temp = lin_opt_pbs([cplex.Cplex()])
    prob_temp.prob_list[0].read(prob_root.name_list[0])

    prob_temp.set_deviation(dev)
    prob_temp.set_non_fixed_vars(prob_root.get_non_fixed_vars())
    rhs_list = []
    sol_list = []
    for i in range(N):
        ind = np.random.randint(K)
        prob_root.modify_random_prob(ind, prob_temp)
        rhs_list.extend(prob_temp.extract_RHS())
        sol_list.extend(prob_temp.calculate_solutions())
    data = dataset(
        rhs_list, sol_list
    )  # write either dataset or dataset.dataset to create a new instance
    return data
コード例 #4
0
ファイル: test_Adam.py プロジェクト: tianzhou2011/PSC-RTE
from package_name.dataset import dataset
from package_name.NeuralNetwork import nn
import matplotlib.pyplot as plt
import tensorflow as tf

### Creating the data
# data = problem_generator(['petit_probleme.lp'], 100000, 0.1, [23, 24, 25])
problem_set_for_training = dataset("petits_problemes_N100-000_dev0.1")
problem_set_for_evaluation = problem_set_for_training.cut(0.2)

### Creating the Neural Network
layers_list, last_activation, epochs, neural_network = [4], None, 20, nn()
neural_network.basic_nn(layers_list, last_activation)
neural_network.set_loss("mean_absolute_percentage_error")
neural_network.set_metrics(["mean_absolute_percentage_error"])
# opt = tf.keras.optimizers.Nadam(learning_rate=0.001, beta_1=0.999, beta_2=0.9999, epsilon=1e-10, name='Nadam')
# neural_network.set_optimizer(opt) # adjust parameter of Adam
neural_network.set_optimizer("Adam")  # defaut optimiser Adam
neural_network.add_processing_linear_mean()

### Training the neural network
training_data = neural_network.train_with(problem_set_for_training, epochs,
                                          0.1, 1)

### Evaluating the neural network
evaluation_data = neural_network.predict(problem_set_for_evaluation)
training_data.hoped_precision = 0.001
evaluation_data.hoped_precision = 0.001

#print(training_data.history.history)
#training_histogram = training_data.precision_histogram("For training dataset : ")
コード例 #5
0
ファイル: generator.py プロジェクト: tianzhou2011/PSC-RTE
    j = non_fixed_var[0]

    for i in range(N):
        new_value = rhs[j] * (1 - dev) + (i + 1) * 2 * rhs[j] * dev / N
        prob_temp.prob_list[0].linear_constraints.set_rhs([(j, new_value)])
        rhs_list.append(new_value)
        sol_list.extend(prob_temp.calculate_solutions())

    rhs_list = np.array(rhs_list).reshape(-1, 1)
    data = dataset(
        rhs_list, sol_list
    )  # write either dataset or dataset.dataset to create a new instance
    return data


# Testing the methods defined above
# data = problem_generator_with_steady_modification_of_unique_constraint(['petit_probleme.lp'], 5000, 30, [25])
# print(data.get_RHS())
# print(data.get_solutions())
# data.sol_fct_of_RHS()

path = r"G://SSD-PSC//Data_RTE//hello//"
data = problem_generator(listdir(path), 1, 0.000000000000001, path=path)
print(data.get_RHS())
print(data.get_solutions())
data.dump_in_file("RTE")

new_dataset = dataset("RTE")
print("resultat")
print(new_dataset.get_RHS().shape)
コード例 #6
0
    def set_loss(self, loss_name):
        self.loss = loss_name

    def set_metrics(self, metrics_name):
        self.metrics = [metrics_name]

    def get_validation_acc(self):
        return np.amax(self.fit().history['val_acc'])

    def get_model(self):
        return {
            'loss': -self.get_validation_acc,
            'status': STATUS_OK,
            'model': self.model
        }


dataSet = dataset.dataset("petits_problemes_1-000")
trainAndTestData = get_data(dataSet, 0.3)
myOptimiser = optimiser(dataset=dataSet,
                        proportion=0.3,
                        model=nn_hyperas(trainAndTestData[0],
                                         trainAndTestData[1],
                                         trainAndTestData[2],
                                         trainAndTestData[3],
                                         batch_choice=[64, 128]).basic_nn(
                                             [10, 10]).get_model())

myOptimiser.optimise_nn()