Exemple #1
0
def main():

    dim = {"ethan": 58, "ethen": 48, "ethin": 38}

    log_file = "cc2ai/results/" + str(date.today()) + ".log"
    with open(log_file, "a+") as f:
        f.write("##### Analysis of " + str(datetime.now()) + " #####\n")

    for key, value in dim.items():
        dim_triu = int(value * (value + 1) / 2)

        msg.info("Starting " + key, 2)

        msg.info("Fetch data ...", 2)
        dataset = fetch_dataset(
            "cc2ai/" + key + "/dataset_" + key + "_6-31g**.npy", value)
        molecules = np.load("cc2ai/" + key + "/molecules_" + key +
                            "_6-31g**.npy")[150:]

        msg.info("Setup trainer ...", 2)
        trainer = Trainer(EluTrNNN([dim_triu, dim_triu, dim_triu]),
                          cost_function=MSE())
        trainer.setup()

        msg.info("Setup analyzer ...", 2)
        analyzer = TriuNetworkAnalyzer(trainer, value, 30)

        msg.info("Do measurements ...", 2)
        results = analyzer.measure(dataset, molecules)

        msg.info("Plot results ...", 2)

        with open(log_file, "a+") as f:
            f.write(analyzer.make_results_str(results))
def main():   

    S, P = np.load("butadien/data/dataset.npy")

    dataset = Dataset(S, P, split_test=0.25)


    trainer = Trainer(
        EluTrNNN([dim**2, 200, 100, dim**2], log_histograms=True),
        cost_function=IdempotencyPenalty(coupling=1e-6),
        optimizer=tf.train.AdamOptimizer(learning_rate=5e-3)
    )

    trainer.setup()
    network_idem, sess_idem = trainer.train(
        dataset,
        convergence_threshold=1e-5,
        #summary_save_path="butadien/log/idem"
    )
    graph_idem = trainer.graph

    with trainer.graph.as_default():
        error = trainer.cost_function.idempotency_error(network_idem)
        error_val = sess_idem.run(error, {network_idem.input_tensor: dataset.testing[0]})

    msg.info("Achieved idempotency error: " + str(error_val), 2)
def main():   

    S, P = np.load("butadien/data/dataset.npy")

    dataset = Dataset(S, P, split_test=0.25)

    save_path = "butadien/scripts/log/idem"

    try:
        rmtree(save_path)
    except:
        pass

    trainer = Trainer(
        SeluTrNNN(
            [dim**2, 700, 700, dim**2], 
            log_histograms=True
        ),
        #error_function=AbsoluteError(),
        #cost_function=RegularizedMSE(alpha=1e-7),
        cost_function=IdempotencyPenalty(
            dataset.inverse_input_transform,
            coupling=1e-5
        ),
        #optimizer=tf.train.AdamOptimizer(learning_rate=1e-3)
    )

    trainer.setup()
    network, sess = trainer.train(
        dataset,
        convergence_threshold=1e-6,
        summary_save_path=save_path,
        mini_batch_size=15
    )
    graph_idem = trainer.graph

    with trainer.graph.as_default():
        y = tf.placeholder(
                dtype="float32", 
                shape=[None, network.structure[-1]],
                name="y"
            )
        error_val = sess.run(
            AbsoluteError().function(network, y), 
            {
                network.input_tensor: dataset.testing[0],
                y: dataset.testing[1]
            }
        )
        
        error_idem = sess.run(
            trainer.cost_function.idempotency_error(network), 
            {network.input_tensor: dataset.testing[0]}
        )

    msg.info("Achieved absolute error:    {:0.3E}".format(error_val), 2)
    msg.info("Achieved idempotency error: {:0.3E}".format(error_idem), 2)
Exemple #4
0
def main():

    dataset = fetch_dataset()
    molecules = np.load("butadien/data/molecules.npy")[150:]

    trainer = Trainer(EluTrNNN([dim**2, dim**2, dim**2]))
    trainer.setup()
    analyzer = NetworkAnalyzer(trainer)
    analyzer.setup(dim, 30)
    results = analyzer.measure(dataset, molecules)

    print(analyzer.make_results_str(results))
def train_network(dataset):
    """Training the network"""

    trainer = Trainer(EluTrNNN([DIM**2, DIM**2, DIM**2, DIM**2]),
                      cost_function=RegularizedMSE(alpha=1e-7),
                      optimizer=tf.train.AdamOptimizer(learning_rate=1e-3))

    trainer.setup()

    network, sess = trainer.train(dataset, convergence_threshold=5e-7)

    return trainer, network, sess
Exemple #6
0
def train_network(molecule_type, dataset):
    """Training the network"""

    dim_triu = int(DIM[molecule_type] * (DIM[molecule_type] + 1) / 2)

    trainer = Trainer(EluTrNNN([dim_triu, dim_triu, dim_triu]),
                      cost_function=RegularizedMSE(alpha=1e-7),
                      optimizer=tf.train.AdamOptimizer(learning_rate=1e-3))

    trainer.setup()

    network, sess = trainer.train(dataset, convergence_threshold=5e-7)

    return trainer, network, sess
Exemple #7
0
    def test_training_default_options(self):

        try:
            trainer = Trainer(EluTrNNN(self.structure))
        except:
            self.fail("Instantiation of trainer failed")

        try:
            trainer.setup()
        except:
            self.fail("Trainer setup failed")

        try:
            trainer.train(self.dataset)
        except:
            self.fail("Trainer with trainer failed.")
Exemple #8
0
    def test_training_w_logging(self):

        save_dir = "tests/tmp_log/"

        if not isdir(save_dir):
            mkdir(save_dir)

        try:
            try:
                trainer = Trainer(EluTrNNN(self.structure))
            except:
                self.fail("Instantiation of trainer failed")

            try:
                trainer.setup()
            except:
                self.fail("Trainer setup failed")

            try:
                trainer.train(self.dataset, summary_save_path=save_dir)
            except:
                self.fail("Trainer with trainer failed.")
        finally:
            rmtree(save_dir)
def analysis(dataset, network_instance, costs):

    

    #--- the nn stuff ---
    trainer = Trainer(
        network=network_instance,
        cost_function=costs
    )

    trainer.setup()

    graph = trainer.graph
    #---


    #--- error functions ---
    with graph.as_default():

        x = trainer.network.input_tensor
        f = trainer.network.output_tensor
        y = trainer.target_placeholder

        p_batch = tf.reshape(f, [-1, dim, dim])

        transposed = tf.matrix_transpose(p_batch)
        symmetry_error = tf.reduce_mean(tf.abs(p_batch - transposed), axis=0)

        s_raw = dataset.inverse_input_transform(x)
        s_batch = tf.reshape(s_raw, [-1, dim, dim])
        
        idempotency_error = \
            tf.reduce_mean(
                tf.abs(
                    reduce(tf.matmul, (p_batch, s_batch, p_batch)) - 2 * p_batch
                ), 
            axis=[1,2])
        
        occupancy = tf.trace(tf.matmul(p_batch, s_batch)) 

        absolute_error = tf.reduce_mean(tf.abs(y - f), axis=1)
        #absolute_error = tf.losses.absolute_difference(y, f)

    #---

    total_err_abs = []
    total_err_sym = []
    total_err_idem = []
    total_err_occ = []
    for i in range(10):
        
        network, sess = trainer.train(
            dataset,
            convergence_threshold=1e-6
        )


        with graph.as_default():
        
            err_abs = \
                sess.run(absolute_error, {x: dataset.testing[0], y: dataset.testing[1]})
            err_sym = sess.run(symmetry_error, {x: dataset.testing[0]})
            err_idem = sess.run(idempotency_error, {x: dataset.testing[0]})
            err_occ = sess.run(occupancy, {x: dataset.testing[0]}) - 30
            
            total_err_abs.append(np.mean(err_abs))
            total_err_sym.append(np.mean(err_sym))
            total_err_idem.append(np.mean(err_idem))
            total_err_occ.append(np.mean(err_occ))

            def stats(x):
                return np.mean(x), np.std(x)

            print("----------------------------------------")
            print("Network " + str(i+1))

            print("Abs: {:0.5E} +- {:0.5E}".format(*stats(err_abs)))
            print("Sym: {:0.5E} +- {:0.5E}".format(*stats(err_sym)))
            print("Ide: {:0.5E} +- {:0.5E}".format(*stats(err_idem)))
            print("Occ: {:0.5E} +- {:0.5E}".format(*stats(err_occ)))

    print("=========================================")
    print("Abs: {:0.5E} +- {:0.5E}".format(*stats(total_err_abs)))
    print("Sym: {:0.5E} +- {:0.5E}".format(*stats(total_err_sym)))
    print("Ide: {:0.5E} +- {:0.5E}".format(*stats(total_err_idem)))
    print("Occ: {:0.5E} +- {:0.5E}".format(*stats(total_err_occ)))
Exemple #10
0
def investigate_structure(dataset, structure, nsamples=10):
    
    msg.info("Investigate " + str(structure), 2)

    error_val, error_idem, error_sym = [], [], []
    for run in range(nsamples):
        
        msg.info("Starting run {0}/{1}".format(run + 1, nsamples), 2)

        trainer = Trainer(
            SeluTrNNN(
                structure, 
                log_histograms=True
            ),
            #error_function=AbsoluteError(),
            #cost_function=RegularizedMSE(alpha=1e-7),
            cost_function=IdempotencyPenalty(
                dataset.inverse_input_transform,
                coupling=1e-5
            ),
            optimizer=tf.train.AdamOptimizer(learning_rate=1e-3)
        )

        trainer.setup()
        network, sess = trainer.train(
            dataset,
            convergence_threshold=1e-6,
            #summary_save_path="butadien/log/idem",
            mini_batch_size=15
        )


        with trainer.graph.as_default():
            y = tf.placeholder(
                    dtype="float32", 
                    shape=[None, network.structure[-1]],
                    name="y"
                )
            error_val.append(sess.run(
                AbsoluteError().function(network, y), 
                {
                    network.input_tensor: dataset.testing[0],
                    y: dataset.testing[1]
                }
            ))
            
            error_idem.append(sess.run(
                trainer.cost_function.idempotency_error(network), 
                {network.input_tensor: dataset.testing[0]}
            ))

            error_sym.append(sess.run(
                symmetry_error(network.output_tensor),
                {network.input_tensor: dataset.testing[0]}
            ))

        msg.info("Achieved absolute error: {:0.3E}".format(error_val[-1]), 2)
        msg.info("Achieved idemp. error:   {:0.3E}".format(error_idem[-1]), 2)
        msg.info("Achieved sym. error:     {:0.3E}".format(error_sym[-1]), 2)

    log(
        structure, 
        np.array(error_val), np.array(error_idem), np.array(error_sym)
    )