예제 #1
0
def extract_features_from_all_levels():
    downloaded_songs_full, downloaded_songs = get_list_of_downloaded_songs()

    features = np.array([])
    targets = np.array([])
    features_needed = []
    for song_dir in downloaded_songs_full:
        features_needed.append(song_dir)
    from multiprocessing import Pool
    p = Pool(8)
    p.map(extract_features_targets_from_dir, features_needed)

    print('Reading features_and_targets from song dirs')
    for song_dir in downloaded_songs_full:
        feature_target = read_features_targets_from_song_dir(song_dir)
        if feature_target is not None and len(feature_target) == 2:
            if len(feature_target[0]) != 0 and len(feature_target[1]) != 0:
                features = np.append(features, feature_target[0])
                targets = np.append(targets, feature_target[1])
    features_and_targets = np.stack([features, targets], axis=1)
    io_functions.saveFile(
        features_and_targets,
        os.path.join(
            EXTRACT_DIR, 'features_and_targets_' + getpass.getuser() + '_' +
            time.strftime('%Y-%m-%d_%H-%M-%S') + '.pkl'))

    return features_and_targets
예제 #2
0
def extract_features_targets_from_dir(song_dir):
    print('Extracting Features from ' + str(song_dir))
    meta_data_filename = os.path.join(EXTRACT_DIR,
                                      os.path.join(song_dir, 'meta_data.txt'))
    features = np.array([])
    targets = np.array([])
    if not os.path.exists(meta_data_filename):
        pass
    else:
        meta_data = read_meta_data_file(meta_data_filename)
        difficulty_rating = meta_data['scoresaberDifficulty'].replace(
            ' ', '').split(',')
        try:
            if difficulty_rating != ['']:
                json_files = get_all_json_level_files_from_data_directory(
                    os.path.join(EXTRACT_DIR, song_dir))
                if (len(json_files) == len(difficulty_rating)):
                    for i in range(len(json_files)):
                        bs_level = io_functions.parse_json(json_files[i])
                        features = np.append(
                            features,
                            np.array(
                                extract_features_from_beatsaber_level(
                                    bs_level)))
                        try:
                            targets = np.append(targets, np.array(
                                [float(difficulty_rating[i]), int(meta_data['thumbsUp']), int(meta_data['thumbsDown']),
                                 float(meta_data['rating']), \
                                 float(meta_data['funFactor']), float(meta_data['rhythm']), float(meta_data['flow']), \
                                 float(meta_data['patternQuality']), float(meta_data['readability']),
                                 float(meta_data['levelQuality'])]))
                        except IndexError:
                            print(difficulty_rating)
                            print(i)
                            print(meta_data)
            if len(features) is not 0 and len(targets) is not 0:
                features_and_targets = np.stack([features, targets], axis=1)
                io_functions.saveFile(
                    features_and_targets,
                    os.path.join(
                        EXTRACT_DIR,
                        os.path.join(song_dir, 'features_targets.pkl')))
        except Exception:
            print(Exception)
    return [features, targets]
예제 #3
0
def generate_beatsaber_obstacles_from_ogg(ogg_file, difficulty=0):
    meta_dir = os.path.dirname(ogg_file)
    meta_filename = 'meta_info.pkl'
    meta_file = os.path.join(meta_dir, meta_filename)
    if os.path.isfile(meta_file):
        content = loadFile(meta_filename, meta_dir)
        tempo = content[0]
        beat_times = content[1]
        beat_chroma = content[2]
    else:
        tempo, beat_times, beat_chroma = extract_beat_times_chroma_tempo_from_ogg(
            ogg_file)
        saveFile([tempo, beat_times, beat_chroma],
                 meta_filename,
                 meta_dir,
                 append=False)
    obstacles = generate_beatsaber_obstacles_from_beat_times(
        beat_times, tempo, difficulty)
    return obstacles
예제 #4
0
def measure_regression_prediction_error(model, x_test, y_test):
    errors = []
    for i in range(len(x_test)):
        y_pred = np.multiply(x_test[i][:], model[1, :]).append(model[0])
        errors.append(y_pred - y_test)
    error_mean = np.mean(errors)
    error_std_dev = np.sqrt(
        np.mean(np.power(np.subtract(errors, error_mean), 2)))
    return errors, error_mean, error_std_dev, y_test, y_pred


if __name__ == '__main__':
    # feature_targets = extract_features_targets_from_dir('994)Made In Love - Chart by Mystikmol')
    features_and_targets = extract_features_from_all_levels()
    io_functions.saveFile(features_and_targets,
                          'dataset_features_and_target_metrics.pkl')
    features_and_targets = io_functions.loadFile(
        'dataset_features_and_target_metrics.pkl')
    # models = get_linear_regression_model_for_all_targets(features_and_targets[0], features_and_targets[1])
    # io_functions.saveFile(models, 'dataset_targets_linear_model.pkl')
    # models = io_functions.loadFile('dataset_targets_linear_model.pkl')

    errors = []
    error_mean = []
    error_std_dev = []
    y_test = []
    y_pred = []
    ax = None
    new_features_and_targets = features_and_targets.copy()
    for i in range(20):
        np.random.shuffle(new_features_and_targets)
예제 #5
0
    if one_hot:
        adv_indexing_col = np.arange(len(states))  # Column used for advanced indexing to produce one-hot matrix
        one_hot_states = np.zeros((top_k + NUM_SPECIAL_STATES, states.shape[0]))
        one_hot_states[states.astype(int), adv_indexing_col.astype(int)] = 1  # Advanced Indexing to fill one hot
    time_diffs = np.diff(times_real_extended)  # Compute differences between times
    delta_backward = np.expand_dims(np.insert(time_diffs, 0, times_real_extended[0]), axis=0)
    delta_forward = np.expand_dims(np.append(time_diffs, song_length - times_real_extended[-1]), axis=0)
    if one_hot:
        if return_state_times: # Return state beat times if requested
            return one_hot_states, states, times_beats, delta_forward, delta_backward, feature_indices
        else:
            return one_hot_states, states, delta_forward, delta_backward, feature_indices
    else:
        return states, delta_forward, delta_backward, feature_indices


if __name__ == "__main__":
    sorted_states, states_counts = produce_distinct_state_space_representations(EXTRACT_DIR, k=1000)
    sorted_states_prior_probability = np.divide(states_counts, sum(states_counts))
    output_path = DATA_DIR+"/statespace/"
    if not os.path.isdir(output_path):
        os.mkdir(output_path)
    io_functions.saveFile(sorted_states, 'sorted_states.pkl', output_path, append=False)
    io_functions.saveFile(sorted_states_prior_probability, 'sorted_states_prior_probability.pkl', output_path,
                          append=False)
    sorted_states_transition_probabilities = produce_transition_probability_matrix_from_distinct_state_spaces(
        sorted_states, EXTRACT_DIR)
    io_functions.saveFile(sorted_states_transition_probabilities, 'sorted_states_transition_probabilities.pkl',
                          output_path, append=False)
    # compute_shortest_inter_event_beat_gap(EXTRACT_DIR)