Example #1
0
 def sample(self):
     climbs, _ = self.preprocess()
     discriminator_path = local_file_path(__file__, "discriminator.h5")
     generator_path = local_file_path(__file__, "generator.h5")
     gan = GAN(climbs,
               discriminator_path,
               generator_path,
               node_scale_factor=8)
     gan.load_models()
     images = gan.sample_image()
     print(images)
Example #2
0
    def train(self):
        climbs, _ = self.preprocess()

        discriminator_path = local_file_path(__file__, "discriminator.h5")
        generator_path = local_file_path(__file__, "generator.h5")

        gan = GAN(climbs,
                  discriminator_path,
                  generator_path,
                  node_scale_factor=8)

        gan.train(epochs=5000, batch_size=32, sample_interval=200)
def main(year):
    # Load generated climbsets
    file_data = pickle.load(
        open(local_file_path(__file__, year + ".pickle"), "rb"))

    # Split training and test data
    original_climbs = file_data["original"].climbs
    original_grades = np.asarray(
        [i.grade.grade_number for i in original_climbs])
    x_train, x_test, y_train, _ = Configuration.split_function(
        original_climbs, original_grades)

    # Only keep a sample of the test set for original climbs
    file_data["original"].climbs = x_test[:500]

    # Train the models
    trained_models = map(train_model, model_setups((x_train, y_train)))

    # Format the data for the website
    data = website_json_structure(file_data)

    # Add grades to the website data
    for config in trained_models:
        add_grades(data["original"], config, file_data["original"].climbs)
        add_grades(data["lstm"], config, file_data["lstm"].climbs)

    # Save to file
    repo_base_directory = Path(__file__).resolve().parent.parent.parent
    website_climb_directory = repo_base_directory.joinpath(
        "website-moon", "moon", "climbs")

    with open(website_climb_directory.joinpath(f"{year}.js"), "w") as handle:
        handle.write(f"var climbs_{year} = " + json.dumps(data))
Example #4
0
def main(year):
    num_climbs = 500

    # Load climbset
    climbset = load_climbset(year)

    # Sample generators
    lstm = keras_lstm_gen.Model()
    sampling_parameters = params.Parameters()
    sample = lstm.sample(climbset, num_climbs, sampling_parameters)

    # Save to file
    file_data = {"original": climbset, "lstm": sample}

    print(f"Saving {len(file_data)} climbsets")
    pickle.dump(file_data,
                open(local_file_path(__file__, f"{year}.pickle"), "wb"))
Example #5
0
 def load_sample(self):
     return pickle.load(
         open(local_file_path(__file__, "sample.pickle"), "rb"))