def run_no_neptune(head, tail):
    ttf = IsingData(train_ratio=1, test_ratio=0.5, validation_ratio=0.5)
    ttf.load_json(tail)
    (train_image, train_label), (test_image,
                                 test_label), (val_image,
                                               val_label) = ttf.get_data()

    # normalise and reshape
    logdir = "logs/convo/" + datetime.now().strftime("%Y%m%d-%H%M%S")

    train_image = train_image.reshape(
        (len(train_image), ttf.size, ttf.size, 1))
    test_image = test_image.reshape((len(test_image), ttf.size, ttf.size, 1))
    val_image = val_image.reshape((len(val_image), ttf.size, ttf.size, 1))

    model = get_convolutional_network(ttf.size, PARAMS['periodic_padding'])
    model.compile(optimizer=PARAMS['optimizer'],
                  loss=PARAMS['loss'],
                  metrics=PARAMS['metrics'])

    history = model.fit(train_image,
                        train_label,
                        epochs=PARAMS['epochs'],
                        validation_data=(val_image, val_label),
                        batch_size=PARAMS['batch_size'])
    print(model.summary())
    loss, acc = model.evaluate(test_image, test_label)
    print(f"Model accuracy: {acc}")
    return acc
Exemplo n.º 2
0
def execute_feed_forward(head, tail, plotspectrum=True, runneptune=True, use_max=False):
    neptune.init("OneOneFour/Ising-Model")
    neptune_tb.integrate_with_tensorflow()
    ttsg = IsingData(train_ratio=5)
    ttsg.load_json(tail)
    if runneptune:
        exp = neptune.create_experiment(name=f"DFFN on {ttsg.size}x{ttsg.size} on file {tail}", params=PARAMS)
    if plotspectrum:
        e_overlap = ttsg.plot_energy_spectrum(20, "energy_spectrum.png")
        #m_overlap = ttsg.plot_magnetization_spectrum(20, "magnetization_spectrum.png")
        if runneptune:
            energy_spectrum_img = Image.open("energy_spectrum.png")
            magnetization_spectrum_img = Image.open("magnetization_spectrum.png")

            exp.send_image("energy-spectrum", energy_spectrum_img)
            exp.send_image("magnetization-spectrum", magnetization_spectrum_img)

            exp.send_metric("energy-overlap", e_overlap)
            exp.send_metric("mag-overlap", m_overlap)

    (train_images, train_labels), (test_images, test_labels), (val_image, val_data) = ttsg.get_data()

    if PARAMS["randomize_spins"]:
        train_images = np.array([t * -1 if np.random.uniform(0, 1) > 0.5 else t for t in train_images])
        test_images = np.array([t * -1 if np.random.uniform(0, 1) > 0.5 else t for t in test_images])
        val_image = np.array([t * -1 if np.random.uniform(0, 1) > 0.5 else t for t in val_image])

    train_images = (train_images + 1) / 2
    test_images = (test_images + 1) / 2
    val_image = (val_image + 1) / 2

    callback = callbacks.TensorBoard(log_dir=f"logs\\ffn\\{datetime.now().strftime('%Y%m%d-%H%M%S')}")
    model, hist_dict = feed_forward(train_images, train_labels, val_image, val_data, callback, ttsg.size)

    if plotspectrum:
        pred_label = model.predict(test_images[:3])
        # plot_9_with_prediction(test_images[:9], test_labels[:9], pred_label)
        plot_row_with_prediction(test_images[:3], test_labels[:3], pred_label)
    max_acc = max(hist_dict["val_acc"])

    loss, acc = model.evaluate(test_images, test_labels)

    print(f"Model Accuracy on test set:{acc}")
    if runneptune:
        exp.send_artifact(tail)
        exp.send_text("test-accuracy", str(acc))
        exp.send_metric("max_acc", max_acc)
        exp.send_text("test-loss", str(loss))
        exp.send_text("file-name", tail)
        name = f"FFN_weights {datetime.now().strftime('%Y_%m_%d %H_%M')}.h5"
        model.save_weights(name)
        exp.send_artifact(name)
        exp.stop()
    if use_max:
        return loss, max_acc
    else:
        return loss, acc
def run_neptune(head, tail):
    neptune.init(project_qualified_name="OneOneFour/Ising-Model")
    neptune_tb.integrate_with_tensorflow()
    ttf = IsingData(train_ratio=1, test_ratio=0.5, validation_ratio=0.20)
    ttf.load_json(tail)
    (train_image, train_label), (test_image,
                                 test_label), (val_image,
                                               val_label) = ttf.get_data()

    # normalise and reshape

    train_image = train_image.reshape(
        (len(train_image), ttf.size, ttf.size, 1))
    test_image = test_image.reshape((len(test_image), ttf.size, ttf.size, 1))
    val_image = val_image.reshape((len(val_image), ttf.size, ttf.size, 1))

    exp_name = f"Convolutional {tail} {datetime.now().strftime('%Y_%m_%d')}"
    with neptune.create_experiment(name=exp_name, params=PARAMS) as exp:
        logdir = "..\\logs\\fit\\" + datetime.now().strftime("%Y%m%d-%H%M%S")
        callback = TensorBoard(
            log_dir=logdir)  # Make sure to save callback as a regular variable
        model = get_convolutional_network(
            ttf.size,
            exp.get_parameters()['periodic_padding'])
        model.compile(optimizer=exp.get_parameters()['optimizer'],
                      loss=exp.get_parameters()['loss'],
                      metrics=ast.literal_eval(
                          exp.get_parameters()['metrics']))

        history = model.fit(train_image,
                            train_label,
                            epochs=PARAMS['epochs'],
                            validation_data=(val_image, val_label),
                            callbacks=[callback],
                            batch_size=PARAMS['batch_size'])
        print(model.summary())
        loss, acc = model.evaluate(test_image, test_label)
        print(f"Model accuracy: {acc}")
        exp.send_text("test-accuracy", str(acc))
        exp.send_text("test-loss", str(loss))
        weights_name = f"convolutional_weights {datetime.now().strftime('%Y_%m_%d %H_%M')}.h5"
        model.save_weights(weights_name)
        exp.send_artifact(weights_name)
    return acc
Exemplo n.º 4
0
def feed_forward_residual(head, tail):
    neptune.init("OneOneFour/Ising-Model")
    neptune_tb.integrate_with_tensorflow()

    ising_data = IsingData(train_ratio=5)
    ising_data.load_json(tail)

    (train_data, train_labels), (test_data, test_labels), (val_data, val_labels) = ising_data.get_data()

    if PARAMS["randomize_spins"]:
        train_data = np.array([t * -1 if np.random.uniform(0, 1) > 0.5 else t for t in train_data])
        test_data = np.array([t * -1 if np.random.uniform(0, 1) > 0.5 else t for t in test_data])
        val_data = np.array([t * -1 if np.random.uniform(0, 1) > 0.5 else t for t in val_data])

    with neptune.create_experiment(name=f"Residual feed forward") as exp:
        tb_callback = callbacks.TensorBoard(log_dir=f"logs\\ffn\\{datetime.now().strftime('%Y%m%d-%H%M%S')}")
        input = Input(shape=(ising_data.size, ising_data.size,))
        flatten = layers.Flatten()(input)
        first = layers.Dense(20, activation="relu")(flatten)
        second = layers.Dense(20, activation="relu")(first)
        transformation = layers.Dense(20)(first)
        first_add = layers.add([transformation, second])
        third = layers.Dense(20, activation="relu")(first_add)
        second_transformation = layers.Dense(20)(first_add)
        second_add = layers.add([third, second_transformation])
        dropout = layers.Dropout(0.3)(second_add)
        fourth = layers.Dense(1, activation="sigmoid")(dropout)
        # out = layers.concatenate([fourth, flatten])
        model = models.Model(inputs=input, outputs=fourth)

        model.compile(optimizer="sgd", loss="binary_crossentropy", metrics=["accuracy"])
        history = model.fit(train_data, train_labels, validation_data=(val_data, val_labels), epochs=50,
                            callbacks=[tb_callback])

        loss, acc = model.evaluate(test_data, test_labels)

        return loss, acc
Exemplo n.º 5
0

def plot_image_with_nn(image, label, prediction):
    plt.subplot(1, 2, 1)
    plt.imshow(image)
    plt.title(f"Label Supercritical?:{bool(label)}")
    plt.subplot(1, 2, 2)
    plt.title(
        f"Prediction:{'Supercritical' if np.argmax(prediction) else 'Subcritical'} ({round(prediction[np.argmax(prediction)] * 100)}%)"
    )
    plt.bar(range(2), prediction)
    plt.show()


if __name__ == '__main__':
    ttfing = IsingData(0.9, 100)
    ttfing.load(f"dumps/18-07-2019 15-29-58dump.json")

    neptune.init(
        api_token=
        'eyJhcGlfYWRkcmVzcyI6Imh0dHBzOi8vdWkubmVwdHVuZS5tbCIsImFwaV9rZXkiOiJhNzRhMWY2Ni03YThiLTRmMWUtODlhNC0wMTFhZTYxNzY4YjYifQ==',
        project_qualified_name='oneonefour/Ising-Model')

    neptune.create_experiment()

    (train_images, train_labels), (test_images,
                                   test_labels) = ttfing.get_data()

    # Normalise the spins which is important for activation functions
    train_images = (train_images + 1) / 2
    test_images = (test_images + 1) / 2
Exemplo n.º 6
0
 def get_overlap(head, tail):
     data = IsingData()
     data.load_json(tail)
     return data.plot_energy_spectrum(), data.plot_magnetization_spectrum()
Exemplo n.º 7
0
#         self.data = data
#         # Minimise langrangian L =  sum(lambda_i) - 1/2 sum_i sum_j lamda_i y_i x_i DOT lambda_j y_j x_j
#
#         opt_dict = {}  # Opt dict is of the form {||w|| : [w,b]} once optimised select w,b with minimal key val
#         # transforms = [
#         #     [1, 1], [-1, 1], [-1, -1], [1, -1]
#         # ] We can do better than this
#
#     # Features in this case will be 1/-1 values of the lattice sites
#     def predict(self, features):
#         # Get the sign of (x.w +b)
#         classification = np.sign(np.dot(np.array(features), self.w) + self.b)
#         return classification

if __name__ == '__main__':
    ttf = IsingData(train_ratio=5)
    file = input("Enter JSON file to load")
    head, tail = os.path.split(file)
    os.chdir(os.path.join(os.getcwd(),head))
    ttf.load_json(tail)
    (train_data, train_labels), (test_data, test_labels),(validation_data,validation_labels) = ttf.get_data()
    # svm = SVC(kernel="linear")

    # svm.fit(train_data, train_labels)
    energy_train_0 = [IsingLattice.energy_periodic(t, ttf.size) for i, t in enumerate(train_data) if
                      train_labels[i] == 0]
    magnetization_train_0 = [IsingLattice.cur_magnetization(t, ttf.size) for i, t in enumerate(train_data) if
                             train_labels[i] == 0]
    energy_train_1 = [IsingLattice.energy_periodic(t, ttf.size) for i, t in enumerate(train_data) if
                      train_labels[i] == 1]
    magnetization_train_1 = [IsingLattice.cur_magnetization(t, ttf.size) for i, t in enumerate(train_data) if