Esempio n. 1
0
def initialiseWAV(my_json):  # -> 1716 secs, no probs
    print("Creating the WAV files.")
    navigate.set_path_to_dataset_MP3_sorted(my_json)
    all_datasets = utils.get_list_subs(
    )  # List all the sub folders of the dataset folder.
    threads = []
    for index_d, dataset in enumerate(all_datasets):
        if (len(os.listdir(my_json['dataset_mp3_sorted_path'] + '\\' +
                           dataset)) >= 30):
            navigate.set_path(
                my_json["dataset_mp3_sorted_path"] + "\\" + dataset
            )  # Set the path to the first sub dataset directory, which contains tracks.
            actual_dataset_path = navigate.get_actual_path()
            items_dataset = navigate.get_list_subs(
            )  # Return all the track of the actual folder.
            my_thread = WavThread("Wav Thread " + str(index_d), my_json,
                                  actual_dataset_path, dataset, items_dataset)
            threads.append(my_thread)
            navigate.set_path_to_dataset_MP3_sorted(my_json)
        else:
            print("There is less than 30 songs in the ", dataset,
                  " dataset. I will not export it.")
    navigate.set_path_to_project_path(my_json)
    for thread in threads:
        thread.start()
    for thread in threads:
        thread.join()
    navigate.set_path_to_project_path(my_json)
Esempio n. 2
0
def initialiseFigs(my_json, dpi, height, width, legend,
                   colormesh):  # 1444 secs
    # We can't use threat, because matplotlib isn't thread protected.
    print("Creating the Images.")
    navigate.set_path_to_dataset_WAV(my_json)
    all_datasets = utils.get_list_subs(
    )  # List all the sub folders of the dataset folder.
    navigate.set_path_to_dataset_image(my_json)
    for dataset in all_datasets:
        if not (os.path.exists(dataset)):
            os.makedirs(dataset)
    navigate.set_path_to_dataset_WAV(my_json)
    processes = []
    outputs_shaped = model.shapeOutputs(my_json)
    right_array = 0
    flag = False
    for index_d, dataset in enumerate(all_datasets):
        for item in outputs_shaped:
            if (item['category_name'] == dataset):
                right_array = item["category_array"]
                flag = True
        navigate.set_path(
            dataset
        )  # Set the path to the first sub dataset directory, which contains tracks.
        actual_dataset_path = navigate.get_actual_path()
        items_dataset = utils.get_list_subs(
        )  # Return all the track of the actual folder.
        if (len(items_dataset) >= 50 and flag == True):  #
            my_process = ImageProcess("Image process " + str(index_d),
                                      my_json,
                                      actual_dataset_path,
                                      dataset,
                                      items_dataset,
                                      right_array,
                                      my_json["dataset_image_path"] + "\\" +
                                      dataset,
                                      dpi,
                                      height,
                                      width,
                                      legend=legend,
                                      colormesh=colormesh)
            processes.append(my_process)
        navigate.set_path_to_dataset_WAV(my_json)
    processes = [mp.Process(target=p.run, args=()) for p in processes]
    for p in processes:
        p.start()
    for p in processes:
        p.join()
    navigate.set_path_to_project_path(my_json)
Esempio n. 3
0
def check(my_json):
    print("Checking the project.")
    navigate.set_path_to_project_path(my_json)
    for sub in get_list_subs():
        if (sub != "__pycache__" and sub != ".ipynb_checkpoints"
                and sub != "train_model.ipynb"):
            if (sub in my_json["list_of_items_path"]):
                pass
            else:
                print(sub, " isn't existing yet, there is a problem.\n")
    navigate.set_path_to_datasets(my_json)
    for sub in get_list_subs():
        if (sub in my_json["list_of_items_dataset"]):
            pass
        else:
            print(sub, " isn't existing yet, there is a problem.\n")
Esempio n. 4
0
def saveResults(my_json, model, X_train, X_test, Y_train, scores, epochs,
                batch_size, my_time, nfft, overlap, Fs, optimizer, loss_func,
                frequency, categories, encoder, id_model):
    navigate.set_path_to_results(my_json)
    if not (os.path.exists(my_json['scores_file'])):
        open(my_json['scores_file'], 'a').close()
        print("File : ", my_json['scores_file'], " created.")
    else:
        print(
            "Writing the results (information) of the training dataset in : ",
            my_json['scores_file'], ".")
        f = open(my_json['scores_file'], "a")
        f.write(str("---- Results of " + str(datetime.now()) + " ----\n"))
        f.write(str("Categories : " + str(categories) + " Hz.\n"))
        if (id_model == 0):
            f.write(
                str("Original frequency of a song : " + str(frequency) +
                    " Hz.\n"))
            f.write(str("Frequency of a song : " + str(Fs) + " Hz.\n"))
            f.write(str("NFFT of a song : " + str(nfft) + ".\n"))
            f.write(str("overlap of a song : " + str(overlap) + ".\n"))
        else:
            pass
        f.write(str("Optimizer of the model : " + str(optimizer) + " .\n"))
        f.write(str("Loss function of the model : " + str(loss_func) + " .\n"))
        f.write(
            str("Totat dataset : " + str(len(X_train) + len(X_test)) +
                " items.\n"))
        f.write(str("Length train set : " + str(len(X_train)) + ".\n"))
        f.write(str("Length test set : " + str(len(X_test)) + ".\n"))
        f.write(str("Epochs : " + str(epochs) + ".\n"))
        f.write(str("Batch Size : " + str(batch_size) + ".\n"))
        f.write(str("Loss : " + str(int(scores[0] * 100)) + "%.\n"))
        f.write(str("Accuracy : " + str(int(scores[1] * 100)) + "%.\n"))
        f.write(
            str("Using encoder to encode outputs : " + str(encoder) + ".\n"))
        f.write(
            str("It takes : " + str(int(my_time / 60)) +
                " minutes to train my model.\n"))
        model.summary(print_fn=lambda x: f.write(x + '\n'))
        f.write(
            str("------------------------------------------------------------------\n\n"
                ))
        f.close()
    navigate.set_path_to_project_path(my_json)
Esempio n. 5
0
def initialiseMP3Sorted(my_json):  # -> 1748 secs, no probs
    print("Sorting the MP3 files.")
    datas = pd.read_csv(tracks,
                        encoding="utf-8",
                        usecols=["album", 'track.19', "track.7", "track.16"],
                        dtype={
                            "album": object,
                            'track.19': object,
                            "track.7": object,
                            "track.16": object
                        })
    datas = datas.rename(
        columns={
            "album": "track_id",
            'track.19': "title",
            "track.7": "genre_top",
            "track.16": "number"
        })
    threads = []
    navigate.set_path_to_datasets(my_json)
    if not (os.path.exists("Mp3Sorted")):
        os.makedirs("Mp3Sorted")
    navigate.set_path_to_dataset_MP3(my_json)
    for index_dir, x_dir in enumerate(utils.get_list_subs()):
        if (x_dir != 'checksums' and x_dir != "fma_metadata"
                and x_dir != "README.txt"):
            navigate.set_path(x_dir)
            actual_dataset_path = navigate.get_actual_path()
            items_dataset = navigate.get_list_subs(
            )  # Return all the track of the actual folder.
            my_thread = SortMP3Thread("MP3 Sorted Thread " + str(index_dir),
                                      my_json, actual_dataset_path, x_dir,
                                      items_dataset, datas)
            threads.append(my_thread)
        else:
            print("This is not a directory of mp3 tracks.")
        navigate.set_path_to_dataset_MP3(my_json)
    navigate.set_path_to_project_path(my_json)
    for thread in threads:
        thread.start()
    for thread in threads:
        thread.join()
    navigate.set_path_to_project_path(my_json)
Esempio n. 6
0
def initialiseFigsGTZAN(my_json, dpi, height, width, legend):  # 1444 secs
    # We can't use threat, because matplotlib isn't thread protected.
    print("Creating the Images.")
    navigate.set_path_to_dataset_WAV_GTZAN(my_json)
    all_datasets = utils.get_list_subs(
    )  # List all the sub folders of the dataset folder.
    navigate.set_path_to_dataset_image_GTZAN(my_json)
    for dataset in all_datasets:
        if not (os.path.exists(dataset)):
            os.makedirs(dataset)
    navigate.set_path_to_dataset_WAV_GTZAN(my_json)
    processes = []
    for index_d, dataset in enumerate(all_datasets):
        navigate.set_path(
            dataset
        )  # Set the path to the first sub dataset directory, which contains tracks.
        actual_dataset_path = navigate.get_actual_path()
        items_dataset = utils.get_list_subs(
        )  # Return all the track of the actual folder.
        if (len(items_dataset) >= 50):  #
            my_process = ImageProcessGTZAN(
                "Image GTZAN process " + str(index_d),
                my_json,
                actual_dataset_path,
                dataset,
                items_dataset,
                my_json["dataset_images_gtzan_path"] + "\\" + dataset,
                dpi,
                height,
                width,
                legend=legend)
            processes.append(my_process)
        navigate.set_path_to_dataset_WAV_GTZAN(my_json)
    processes = [mp.Process(target=p.run, args=()) for p in processes]
    for p in processes:
        p.start()
    for p in processes:
        p.join()
    navigate.set_path_to_project_path(my_json)
Esempio n. 7
0
def razImages(my_json):
    print("Erasing all the images.")
    navigate.set_path_to_dataset_image(my_json)
    for x_dir in get_list_subs():
        shutil.rmtree(x_dir)
    navigate.set_path_to_project_path(my_json)
Esempio n. 8
0
def razImagesGTZAN(my_json):
    print("Erasing all the mp3 songs in MP3Sorted directory.")
    navigate.set_path_to_dataset_image_GTZAN(my_json)
    for x_dir in get_list_subs():
        shutil.rmtree(x_dir)
    navigate.set_path_to_project_path(my_json)
Esempio n. 9
0
def razWAV(my_json):
    print("Erasing all the wav files.")
    navigate.set_path_to_dataset_WAV(my_json)
    for x_dir in get_list_subs():
        shutil.rmtree(x_dir)
    navigate.set_path_to_project_path(my_json)