def process_folder(folder_path, quantization, binary_piano, binary_orch, temporal_granularity, gapopen=3, gapextend=1):
    # Get instrus and prs from a folder name name
    pr0, instru0, T0, name0, pr1, instru1, T1, name1 = get_instru_and_pr_from_folder_path(folder_path, quantization)

    pr_piano, instru_piano, T_piano, name_piano, pr_orch, instru_orch, T_orch, name_orch=\
            discriminate_between_piano_and_orchestra(pr0, instru0, T0, name0, pr1, instru1, T1, name1)

    pr_piano = process_data_piano(pr_piano, binary_piano)
    pr_orch = process_data_orch(pr_orch, binary_orch)

    # Temporal granularity
    if temporal_granularity == 'event_level':
        event_piano = get_event_ind_dict(pr_piano)
        event_orch = get_event_ind_dict(pr_orch)
        def get_duration(event, last_time):
            start_ind = event[:]
            end_ind = np.zeros(event.shape, dtype=np.int)
            end_ind[:-1] = event[1:]
            end_ind[-1] = last_time
            duration_list = end_ind - start_ind
            return duration_list
        duration_piano = get_duration(event_piano, T_piano)
        duration_orch = get_duration(event_orch, T_orch)
        # Get the duration of each event
        pr_piano = warp_pr_aux(pr_piano, event_piano)
        pr_orch = warp_pr_aux(pr_orch, event_orch)
    else:
        event_piano = None
        event_orch = None

    # Align tracks
    piano_aligned, trace_piano, orch_aligned, trace_orch, trace_prod, total_time = align_pianorolls(pr_piano, pr_orch, gapopen, gapextend)
    
    # Clean events
    if (temporal_granularity == 'event_level'):
        if (trace_piano is None) or (trace_orch is None):
            event_piano_aligned = None
            event_orch_aligned = None
            duration_piano_aligned = None
            duration_orch_aligned = None
        else:
            event_piano_aligned = clean_event(event_piano, trace_piano, trace_prod)
            event_orch_aligned = clean_event(event_orch, trace_orch, trace_prod)
            duration_piano_aligned = clean_event(duration_piano, trace_piano, trace_prod)
            duration_orch_aligned = clean_event(duration_orch, trace_orch, trace_prod)
    else:
        event_piano_aligned = []
        event_orch_aligned = []
        duration_piano_aligned = []
        duration_orch_aligned = []

    return piano_aligned, event_piano, duration_piano, instru_piano, name_piano, orch_aligned, event_orch, duration_orch, instru_orch, name_orch, total_time
Пример #2
0
def process_folder(folder_path,
                   quantization,
                   unit_type,
                   temporal_granularity,
                   gapopen=3,
                   gapextend=1):
    # Get instrus and prs from a folder name name
    pr0, instru0, _, name0, pr1, instru1, _, name1 = get_instru_and_pr_from_folder_path(
        folder_path, quantization)

    # Unit type
    pr0 = Unit_type.from_rawpr_to_type(pr0, unit_type)
    pr1 = Unit_type.from_rawpr_to_type(pr1, unit_type)

    # Temporal granularity
    if temporal_granularity == 'event_level':
        event_0 = get_event_ind_dict(pr0)
        event_1 = get_event_ind_dict(pr1)
        pr0 = warp_pr_aux(pr0, event_0)
        pr1 = warp_pr_aux(pr1, event_1)
    else:
        event_0 = None
        event_1 = None

    # Align tracks
    pr0_aligned, trace_0, pr1_aligned, trace_1, trace_prod, duration = align_tracks(
        pr0, pr1, unit_type, gapopen, gapextend)

    # Clean events
    event0_aligned = clean_event(event_0, trace_0, trace_prod)
    event1_aligned = clean_event(event_1, trace_1, trace_prod)

    # Find which pr is orchestra, which one is piano
    pr_piano, event_piano, instru_piano, name_piano,\
        pr_orch, event_orch, instru_orch, name_orch,\
        duration =\
        discriminate_between_piano_and_orchestra(pr0_aligned, event0_aligned, instru0, name0,
                                                 pr1_aligned, event1_aligned, instru1, name1,
                                                 duration)

    return pr_piano, event_piano, instru_piano, name_piano, pr_orch, event_orch, instru_orch, name_orch, duration
def load_solo(piano_midi, quantization, binarize_piano, temporal_granularity):
    # Read piano pr
    pr_piano = Read_midi(path, quantization).read_file()
    # Process pr_piano
    pr_piano = process_data_piano(pr_piano, binarize_piano)
    # Take event level representation
    if temporal_granularity == 'event_level':
        event_piano = get_event_ind_dict(pr_piano)
        pr_piano = warp_pr_aux(pr_piano, event_piano)
    else:
        event_piano = None

    name_piano = re.sub(r'/.*\.mid', '', piano_midi)

    duration = get_pianoroll_time(pr_piano)

    return pr_piano, event_piano, name_piano, None, None, duration
Пример #4
0
def process_folder_NP(folder_path, quantization, binary_piano, binary_orch,
                      temporal_granularity):
    """Get the pianoroll from a folder path with containing only an orchestral score. 
    Piano score is created by simply crushing all the instruments on 88 pitches
    """
    # Get instrus and prs from a folder name name
    pr_orch, instru_orch, T, name = get_instru_and_pr_from_folder_path_NP(
        folder_path, quantization)
    # Create the piano score
    pr_piano = {'Piano': sum_along_instru_dim(pr_orch)}
    # Process the two files (remember that even in this context, piano can be real-valued and orchestra discrete)
    pr_piano = process_data_piano(pr_piano, binary_piano)
    pr_orch = process_data_orch(pr_orch, binary_orch)

    # Temporal granularity (use only orchestra to compute events)
    if temporal_granularity == 'event_level':
        event_orch = get_event_ind_dict(pr_orch)
        T = len(event_orch)

        def get_duration(event, last_time):
            start_ind = event[:]
            end_ind = np.zeros(event.shape, dtype=np.int)
            end_ind[:-1] = event[1:]
            end_ind[-1] = last_time
            duration_list = end_ind - start_ind
            return duration_list

        duration_orch = get_duration(event_orch, T)
        # Get the duration of each event
        pr_orch = warp_pr_aux(pr_orch, event_orch)
        pr_piano = warp_pr_aux(pr_piano, event_orch)
    else:
        event_orch = None

    # Fill identical values
    event_piano = event_orch
    duration_piano = duration_orch
    instru_piano = {'Piano': 'Piano'}
    name_piano = name

    return pr_piano, event_piano, duration_piano, instru_piano, name_piano, pr_orch, event_orch, duration_orch, instru_orch, name, T
def process_folder(folder_path, quantization, binary_piano, binary_orch, temporal_granularity, gapopen=3, gapextend=1, align_bool=True):
    ##############################
    # Get instrus and prs from a folder name name
    pr0, articulation_0, staccato_0, T0, name0, pr1, articulation_1, staccato_1, T1, name1 = get_instru_and_pr_from_folder_path(folder_path, quantization)
    data_0 = (pr0, articulation_0, staccato_0, T0, name0)
    data_1 = (pr1, articulation_1, staccato_1, T1, name1)

    (pr_piano_X, articulation_piano, staccato_piano, T_piano, name_piano), \
    (pr_orch, articulation_orch, staccato_orch, T_orch, name_orch)=\
            discriminate_between_piano_and_orchestra(data_0, data_1)

    # if pr_contrabass[:, 62:].sum() > 1:
    #     import pdb; pdb.set_trace()

    # If corrupted files, pr_piano (and pr_orch) will be None
    if pr_piano_X is None:
        return [None] * 9

    # Remove from orch
    if "Remove" in pr_orch.keys():
        pr_orch.pop("Remove")
    # Group in piano
    pr_piano = {'Piano': sum_along_instru_dim(pr_piano_X)}

    # try:
    #     write_midi(pr_piano, ticks_per_beat=quantization, write_path="../DEBUG/test_piano.mid", articulation=articulation_piano)
    #     write_midi(pr_orch, ticks_per_beat=quantization, write_path="../DEBUG/test_orch.mid", articulation=articulation_orch)
        # write_midi({k: v*90 for k,v in pr_piano.items()}, ticks_per_beat=quantization, write_path="../DEBUG/test_piano.mid", articulation=articulation_piano)
        # write_midi({k: v*90 for k,v in pr_orch.items() if (v.sum()>0)}, ticks_per_beat=quantization, write_path="../DEBUG/test_orch.mid", articulation=articulation_orch)
    # except:
    #     print("Because of mixed instru cannot write reference")

    ##############################
    # Process pr (mostly binarized)
    pr_piano = process_data_piano(pr_piano, binary_piano)
    pr_orch = process_data_orch(pr_orch, binary_orch)

    # Temporal granularity
    if temporal_granularity == 'event_level':
        event_piano = get_event_ind_dict(articulation_piano, pr_piano)
        event_orch = get_event_ind_dict(articulation_orch, pr_orch)
        def get_duration(event, last_time):
            start_ind = event[:]
            end_ind = np.zeros(event.shape, dtype=np.int)
            end_ind[:-1] = event[1:]
            end_ind[-1] = last_time
            duration_list = end_ind - start_ind
            return duration_list
        duration_piano = get_duration(event_piano, T_piano)
        duration_orch = get_duration(event_orch, T_orch)
        # Get the duration of each event
        pr_piano_event = warp_pr_aux(pr_piano, event_piano)
        pr_orch_event = warp_pr_aux(pr_orch, event_orch)
    else:
        event_piano = None
        event_orch = None
        duration_piano = None
        duration_orch = None
        pr_piano_event = pr_piano
        pr_orch_event = pr_orch

    ##############################
    ##############################
    # # Test for event-leve -> beat reconstruction
    # ##############################
    # # Instru mapping
    # import pickle as pkl
    # import LOP_database.utils.event_level as event_level
    # import LOP_database.utils.reconstruct_pr as reconstruct_pr
    # temp = pkl.load(open("/Users/crestel/Recherche/lop/LOP/Data/Data_A_ref_bp_bo_noEmb_tempGran32/temp.pkl", 'rb'))
    # instru_mapping = temp['instru_mapping']
    # N_orchestra = temp['N_orchestra']
    # N_piano = temp['instru_mapping']['Piano']['index_max']
    # matrix_orch = cast_small_pr_into_big_pr(pr_orch_event, 0, len(event_orch), instru_mapping, np.zeros((len(event_orch), N_orchestra)))
    # matrix_piano = cast_small_pr_into_big_pr(pr_piano_event, 0, len(event_piano), instru_mapping, np.zeros((len(event_piano), N_piano)))
    # ##############################
    # # Reconstruct rhythm
    # pr_orchestra_rhythm = event_level.from_event_to_frame(matrix_orch, event_orch)
    # pr_orchestra_rhythm_I = reconstruct_pr.instrument_reconstruction(pr_orchestra_rhythm, instru_mapping)
    # pr_piano_rhythm = event_level.from_event_to_frame(matrix_piano, event_piano)
    # pr_piano_rhythm_I = reconstruct_pr.instrument_reconstruction_piano(pr_piano_rhythm, instru_mapping)
    # ##############################
    # # Write midi
    # write_midi({k: v*90 for k,v in pr_piano_rhythm_I.items()}, ticks_per_beat=quantization, write_path="../DEBUG/test_piano_event.mid", articulation=articulation_piano)
    # write_midi({k: v*90 for k,v in pr_orchestra_rhythm_I.items() if (v.sum()>0)}, ticks_per_beat=quantization, write_path="../DEBUG/test_orch_event.mid", articulation=articulation_orch)
    ############################## 

    ##############################
    # Align tracks
    if align_bool:
        # piano_aligned, trace_piano, orch_aligned, trace_orch, trace_prod, total_time = align_pianorolls(pr_piano_event, pr_orch_event, gapopen, gapextend)
        piano_aligned, trace_piano, orch_aligned, trace_orch, trace_prod, total_time = align_pianorolls(pr_piano_event, pr_orch_event, gapopen, gapextend)
        # Clean events
        if (temporal_granularity == 'event_level'):
            if (trace_piano is None) or (trace_orch is None):
                event_piano_aligned = None
                event_orch_aligned = None
                duration_piano_aligned = None
                duration_orch_aligned = None
            else:
                event_piano_aligned = clean_event(event_piano, trace_piano, trace_prod)
                event_orch_aligned = clean_event(event_orch, trace_orch, trace_prod)
                duration_piano_aligned = clean_event(duration_piano, trace_piano, trace_prod)
                duration_orch_aligned = clean_event(duration_orch, trace_orch, trace_prod)
        else:
            event_piano_aligned = []
            event_orch_aligned = []
            duration_piano_aligned = []
            duration_orch_aligned = []
    else:
        piano_aligned = pr_piano_event
        event_piano_aligned = event_piano
        duration_piano_aligned = duration_piano
        orch_aligned = pr_orch_event
        event_orch_aligned = event_orch
        duration_orch_aligned = duration_orch
        total_time = T_piano
    ##############################

    ##############################
    ##############################
    # Test for aligned event Piano/Orch
    ##############################
    # Instru mapping
    # import pickle as pkl
    # import LOP_database.utils.event_level as event_level
    # import LOP_database.utils.reconstruct_pr as reconstruct_pr
    # temp = pkl.load(open("/Users/leo/Recherche/lop/LOP/Data/Data_DEBUG_bp_bo_noEmb_tempGran32/temp.pkl", 'rb'))
    # instru_mapping = temp['instru_mapping']
    # N_orchestra = temp['N_orchestra']
    # N_piano = temp['instru_mapping']['Piano']['index_max']
    # matrix_orch = cast_small_pr_into_big_pr(orch_aligned, 0, len(event_orch_aligned), instru_mapping, np.zeros((len(event_orch_aligned), N_orchestra)))
    # matrix_piano = cast_small_pr_into_big_pr(piano_aligned, 0, len(event_piano_aligned), instru_mapping, np.zeros((len(event_piano_aligned), N_piano)))
    # ##############################
    # # Reconstruct rhythm
    # pr_orchestra_I = reconstruct_pr.instrument_reconstruction(matrix_orch, instru_mapping)
    # pr_orchestra_rhythm = event_level.from_event_to_frame(matrix_orch, event_orch_aligned)
    # pr_orchestra_rhythm_I = reconstruct_pr.instrument_reconstruction(pr_orchestra_rhythm, instru_mapping)
    # #
    # pr_piano_I = reconstruct_pr.instrument_reconstruction_piano(matrix_piano, instru_mapping)
    # pr_piano_rhythm = event_level.from_event_to_frame(matrix_piano, event_piano_aligned)
    # pr_piano_rhythm_I = reconstruct_pr.instrument_reconstruction_piano(pr_piano_rhythm, instru_mapping)
    # ##############################
    # # Write midi
    # write_midi({k: v*90 for k,v in pr_piano_I.items()}, ticks_per_beat=1, write_path="../DEBUG/test_piano_event_aligned.mid", articulation=None)
    # write_midi({k: v*90 for k,v in pr_piano_rhythm_I.items()}, ticks_per_beat=quantization, write_path="../DEBUG/test_piano_rhythm_aligned.mid", articulation=None)
    # #
    # write_midi({k: v*90 for k,v in pr_orchestra_I.items() if (v.sum()>0)}, ticks_per_beat=1, write_path="../DEBUG/test_orch_event_aligned.mid", articulation=None)
    # write_midi({k: v*90 for k,v in pr_orchestra_rhythm_I.items() if (v.sum()>0)}, ticks_per_beat=quantization, write_path="../DEBUG/test_orch_rhythm_aligned.mid", articulation=None)
    # import pdb; pdb.set_trace()
    ##############################

    return piano_aligned, event_piano_aligned, duration_piano_aligned, name_piano, orch_aligned, event_orch_aligned, duration_orch_aligned, name_orch, total_time