def testExtractChordsForMelodiesCoincidentChords(self): music_testing_lib.add_track_to_sequence(self.note_sequence, 0, [(12, 100, 2, 4), (11, 1, 6, 11)]) music_testing_lib.add_track_to_sequence(self.note_sequence, 1, [(12, 127, 2, 4), (14, 50, 6, 8), (50, 100, 33, 37), (52, 100, 34, 37)]) music_testing_lib.add_chords_to_sequence(self.note_sequence, [('C', 2), ('G7', 6), ('E13', 8), ('Cmaj7', 8)]) quantized_sequence = sequences_lib.quantize_note_sequence( self.note_sequence, self.steps_per_quarter) melodies, _ = melody_pipelines.extract_melodies( quantized_sequence, min_bars=1, gap_bars=2, min_unique_pitches=2, ignore_polyphonic_notes=True) chord_progressions, stats = chord_pipelines.extract_chords_for_melodies( quantized_sequence, melodies) expected = [[NO_CHORD, NO_CHORD, 'C', 'C', 'C', 'C', 'G7', 'G7'], ['Cmaj7', 'Cmaj7', 'Cmaj7', 'Cmaj7', 'Cmaj7']] stats_dict = dict((stat.name, stat) for stat in stats) self.assertIsNone(chord_progressions[0]) self.assertEqual(expected, [list(chords) for chords in chord_progressions[1:]]) self.assertEqual(stats_dict['coincident_chords'].count, 1)
def testExtractChordsForMelodies(self): music_testing_lib.add_track_to_sequence(self.note_sequence, 0, [(12, 100, 2, 4), (11, 1, 6, 11)]) music_testing_lib.add_track_to_sequence(self.note_sequence, 1, [(12, 127, 2, 4), (14, 50, 6, 8), (50, 100, 33, 37), (52, 100, 34, 37)]) music_testing_lib.add_chords_to_sequence(self.note_sequence, [('C', 2), ('G7', 6), ('Cmaj7', 33)]) quantized_sequence = sequences_lib.quantize_note_sequence( self.note_sequence, self.steps_per_quarter) melodies, _ = melody_pipelines.extract_melodies( quantized_sequence, min_bars=1, gap_bars=2, min_unique_pitches=2, ignore_polyphonic_notes=True) chord_progressions, _ = chord_pipelines.extract_chords_for_melodies( quantized_sequence, melodies) expected = [[ NO_CHORD, NO_CHORD, 'C', 'C', 'C', 'C', 'G7', 'G7', 'G7', 'G7', 'G7' ], [NO_CHORD, NO_CHORD, 'C', 'C', 'C', 'C', 'G7', 'G7'], ['G7', 'Cmaj7', 'Cmaj7', 'Cmaj7', 'Cmaj7']] self.assertEqual(expected, [list(chords) for chords in chord_progressions])
def testExtractLeadSheetFragmentsCoincidentChords(self): music_testing_lib.add_track_to_sequence(self.note_sequence, 0, [(12, 100, 2, 4), (11, 1, 6, 11)]) music_testing_lib.add_track_to_sequence(self.note_sequence, 1, [(12, 127, 2, 4), (14, 50, 6, 8), (50, 100, 33, 37), (52, 100, 34, 37)]) music_testing_lib.add_chords_to_sequence(self.note_sequence, [('C', 2), ('G7', 6), ('Cmaj7', 33), ('F', 33)]) quantized_sequence = sequences_lib.quantize_note_sequence( self.note_sequence, steps_per_quarter=1) lead_sheets, _ = lead_sheet_pipelines.extract_lead_sheet_fragments( quantized_sequence, min_bars=1, gap_bars=2, min_unique_pitches=2, ignore_polyphonic_notes=True, require_chords=True) melodies, _ = melody_pipelines.extract_melodies( quantized_sequence, min_bars=1, gap_bars=2, min_unique_pitches=2, ignore_polyphonic_notes=True) chord_progressions, _ = chord_pipelines.extract_chords_for_melodies( quantized_sequence, melodies) # Last lead sheet should be rejected for coincident chords. self.assertEqual(list(melodies[:2]), list(lead_sheet.melody for lead_sheet in lead_sheets)) self.assertEqual(list(chord_progressions[:2]), list(lead_sheet.chords for lead_sheet in lead_sheets))
def testExtractLeadSheetFragments(self): music_testing_lib.add_track_to_sequence(self.note_sequence, 0, [(12, 100, .5, 1), (11, 1, 1.5, 2.75)]) music_testing_lib.add_track_to_sequence(self.note_sequence, 1, [(12, 127, .5, 1), (14, 50, 1.5, 2), (50, 100, 8.25, 9.25), (52, 100, 8.5, 9.25)]) music_testing_lib.add_chords_to_sequence(self.note_sequence, [('C', .5), ('G7', 1.5), ('Cmaj7', 8.25)]) quantized_sequence = sequences_lib.quantize_note_sequence( self.note_sequence, self.steps_per_quarter) lead_sheets, _ = lead_sheet_pipelines.extract_lead_sheet_fragments( quantized_sequence, min_bars=1, gap_bars=2, min_unique_pitches=2, ignore_polyphonic_notes=True, require_chords=True) melodies, _ = melody_pipelines.extract_melodies( quantized_sequence, min_bars=1, gap_bars=2, min_unique_pitches=2, ignore_polyphonic_notes=True) chord_progressions, _ = chord_pipelines.extract_chords_for_melodies( quantized_sequence, melodies) self.assertEqual(list(melodies), list(lead_sheet.melody for lead_sheet in lead_sheets)) self.assertEqual(list(chord_progressions), list(lead_sheet.chords for lead_sheet in lead_sheets))
def emissionModelTrainer(self): ALL_CHORD_LIST = ['N.C', 'C', 'Cm', 'C#', 'C#m', 'D', 'Dm', 'Eb', 'Ebm', 'E', 'Em', 'F', 'Fm', 'F#', 'F#m', 'G', 'Gm', 'G#', 'G#m', 'A', 'Am', 'A#', 'A#m', 'B', 'Bm'] Same_Chord = {'Db': 'C#', 'Dbm': 'C#m', 'D#': 'Eb', 'D#m': 'Ebm', 'Gb': 'F#', 'Gbm': 'F#m', 'Ab': 'G#', 'Abm': 'G#m', 'Bb': 'A#', 'Bbm': 'A#m'} ALL_NOTE_LIST = ['C', 'C#', 'D', 'Eb', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B'] path = "D:\FAI\Wikifonia" for file in glob.glob(path): mxlObject = musicxml_parser.MusicXMLDocument(file) mxlSequence = musicxml_reader.musicxml_to_sequence_proto(mxlObject) quantizedNoteSequence = sequences_lib.quantize_note_sequence(mxlSequence, 1) melodies, stats = melody_pipelines.extract_melodies(quantizedNoteSequence) chord_prog, stats = chord_pipelines.extract_chords_for_melodies(quantizedNoteSequence, melodies) if not chord_prog: continue for i in range(len(list(chord_prog[0]))): curChord = list(chord_prog[0])[i] curMel = list(melodies[0])[i] while (curMel > 71): curMel = curMel - 12 while (curMel < 60): curMel = curMel + 12 curChord = re.sub(r'\d+', '', curChord) curChord = curChord[:3] if curChord not in 'N.C.': if len(curChord) == 3 and curChord[2] not in 'm': curChord = curChord[:2] if curChord[1] not in ['#', 'b']: curChord = curChord[:1] if curChord in Same_Chord: curChord = Same_Chord[curChord] if curChord in 'Cb': curChord = 'B' if curChord in 'Fb': curChord = 'E' if curChord in 'Cbm': curChord = 'D' if curChord in 'Fbm': curChord = 'Em' a = ALL_CHORD_LIST.index(re.sub(r'\d+', '', curChord)) b = curMel self.mo_matrix[a][b - 60] = self.mo_matrix[a][b - 60] + 1 normed_mo_matrix = normalize(self.mo_matrix, axis=1, norm='l1') self.mo_matrix = normed_mo_matrix
def cleanDataset(self): path = "D:\FAI\Wikifonia" count = 0 for file in glob.glob(path): try: mxlObject = musicxml_parser.MusicXMLDocument(file) mxlSequence = musicxml_reader.musicxml_to_sequence_proto(mxlObject) quantizedNoteSequence = sequences_lib.quantize_note_sequence(mxlSequence, 1) chord_prog, stats = chord_pipelines.extract_chords(quantizedNoteSequence) melodies, stats = melody_pipelines.extract_melodies(quantizedNoteSequence) ac, stats = chord_pipelines.extract_chords_for_melodies(quantizedNoteSequence, melodies) except: os.remove(file) print(file) count = count + 1
def extract_lead_sheet_fragments(quantized_sequence, search_start_step=0, min_bars=7, max_steps_truncate=None, max_steps_discard=None, gap_bars=1.0, min_unique_pitches=5, ignore_polyphonic_notes=True, pad_end=False, filter_drums=True, require_chords=False, all_transpositions=False): """Extracts a list of lead sheet fragments from a quantized NoteSequence. This function first extracts melodies using melodies_lib.extract_melodies, then extracts the chords underlying each melody using chords_lib.extract_chords_for_melodies. Args: quantized_sequence: A quantized NoteSequence object. search_start_step: Start searching for a melody at this time step. Assumed to be the first step of a bar. min_bars: Minimum length of melodies in number of bars. Shorter melodies are discarded. max_steps_truncate: Maximum number of steps in extracted melodies. If defined, longer melodies are truncated to this threshold. If pad_end is also True, melodies will be truncated to the end of the last bar below this threshold. max_steps_discard: Maximum number of steps in extracted melodies. If defined, longer melodies are discarded. gap_bars: A melody comes to an end when this number of bars (measures) of silence is encountered. min_unique_pitches: Minimum number of unique notes with octave equivalence. Melodies with too few unique notes are discarded. ignore_polyphonic_notes: If True, melodies will be extracted from `quantized_sequence` tracks that contain polyphony (notes start at the same time). If False, tracks with polyphony will be ignored. pad_end: If True, the end of the melody will be padded with NO_EVENTs so that it will end at a bar boundary. filter_drums: If True, notes for which `is_drum` is True will be ignored. require_chords: If True, only return lead sheets that have at least one chord other than NO_CHORD. If False, lead sheets with only melody will also be returned. all_transpositions: If True, also transpose each lead sheet fragment into all 12 keys. Returns: A python list of LeadSheet instances. Raises: NonIntegerStepsPerBarError: If `quantized_sequence`'s bar length (derived from its time signature) is not an integer number of time steps. """ sequences_lib.assert_is_relative_quantized_sequence(quantized_sequence) stats = dict([('empty_chord_progressions', statistics.Counter('empty_chord_progressions'))]) melodies, melody_stats = melody_pipelines.extract_melodies( quantized_sequence, search_start_step=search_start_step, min_bars=min_bars, max_steps_truncate=max_steps_truncate, max_steps_discard=max_steps_discard, gap_bars=gap_bars, min_unique_pitches=min_unique_pitches, ignore_polyphonic_notes=ignore_polyphonic_notes, pad_end=pad_end, filter_drums=filter_drums) chord_progressions, chord_stats = chord_pipelines.extract_chords_for_melodies( quantized_sequence, melodies) lead_sheets = [] for melody, chords in zip(melodies, chord_progressions): # If `chords` is None, it's because a chord progression could not be # extracted for this particular melody. if chords is not None: if require_chords and all(chord == chords_lib.NO_CHORD for chord in chords): stats['empty_chord_progressions'].increment() else: lead_sheet = LeadSheet(melody, chords) if all_transpositions: for amount in range(-6, 6): transposed_lead_sheet = copy.deepcopy(lead_sheet) transposed_lead_sheet.transpose(amount) lead_sheets.append(transposed_lead_sheet) else: lead_sheets.append(lead_sheet) return lead_sheets, list(stats.values()) + melody_stats + chord_stats