def open_midi(midi_path, remove_drums=True): mf = midi.MidiFile() mf.open(midi_path) mf.read() mf.close() if remove_drums: for i in range(len(mf.tracks)): mf.tracks[i].events = [ ev for ev in mf.tracks[i].events if ev.channel != 10 ] return midi.translate.midiFileToStream(mf)
def open_midi(midi_path, remove_drums): # There is an one-line method to read MIDIs # but to remove the drums we need to manipulate some # low level MIDI events. mf = midi.MidiFile() mf.open(midi_path) mf.read() mf.close() if (remove_drums): for i in range(len(mf.tracks)): mf.tracks[i].events = [ev for ev in mf.tracks[i].events if ev.channel != 10] return midi.translate.midiFileToStream(mf)
def preProcessStream(self, music): """ Convert MIDI file to a stream and return """ mf = midi.MidiFile() mf.open(music) print("Reading MIDI data for {}...".format(music)) mf.read() mf.close() print("Converting MIDI to stream for {}...".format(music)) stream = midi.translate.midiFileToStream(mf) return stream
def open_midi(midi_path, remove_drums): mf = midi.MidiFile() mf.open(midi_path) mf.read() mf.close() # print("print tracks", mf.tracks) if (remove_drums): for i in range(len(mf.tracks)): # print("track number", i) mf.tracks[i].events = [ ev for ev in mf.tracks[i].events if ev.channel != 10 ] return midi.translate.midiFileToStream(mf)
def get_midi(fl): """ Reads a file as a Midifile instance, using music21 """ # print('open', fl) try: m = midi.MidiFile() m.open(fl, 'rb') m.read() m.close() return m except midi.MidiException: return None except IndexError: return None
def count_data_tokens(self, path_to_midi_files, all_data=True): """ path_to_midi_files - path to the dataset containing .mid files returns - a list of note vectors, and a token dictionary """ files = [ fname for fname in os.listdir(path_to_midi_files) if fname not in 'sankey bwv988 bwv232 leipzig' ] num_tokens = 0 songs = [] token_dict = {} if all_data == False: files = files[:5] elif self.file_index >= len(files): return None, None, None else: files = files[self.file_index:min(self.file_index + 5, len(files))] self.file_index += 5 print() for i, fname in enumerate(files): print('\r\r Reading file %d out of %d...' % (i + 1, len(files))) midi_file = midi.MidiFile() midi_file.open(os.path.join(path_to_midi_files, fname), 'rb') midi_file.read() midi_file.close() # TODO: normalize ticks per beat # Convert midi song to a vector song_vector = [ ] # TODO: represent each event as a one-hot, remove metadata events for channel in midi_file.tracks: for event in channel.events: if event.type in 'NOTE_ON NOTE_OFF': bytes = event.getBytes() if bytes not in token_dict: token_dict[bytes] = 1 else: token_dict[bytes] = 1 + token_dict[bytes] keys = list(token_dict.values()) keys.sort() for k in keys: print(k)
def open_midi_return_stream(path): mf = midi.MidiFile() mf.open(path) mf.read() mf.close() tracks = len(mf.tracks) # print(f'Midi file has {len(mf.tracks)} tracks') # print(mf.tracks[0]) s = midi.translate.midiFileToStream(mf) parts = len(s.parts) # print(f'Stream has {len(s.parts)} parts') assert tracks - 1 == parts # s.show('text') return s
def loadMidi(data_source): mf = midi.MidiFile() mf.open(filename=data_source) mf.read() mf.close() #read to stream s = midi.translate.midiFileToStream(mf) metronome = s.metronomeMarkBoundaries()[0] temp = metronome[2].getQuarterBPM() global song_tempo # preserve the tempo song_tempo = temp #number of parts/instruments all_tracks = s.parts tracks = [] print("number of tracks is", len(all_tracks)) for track in all_tracks: inst = track.getInstrument().instrumentName print("track is: ", inst) # Music21 does not support many MIDI instruments - we need to handle these cases if (inst == None): print("NO RECOGNIZED INSTRUMENT") inst = 'Piano' instrument_list.append(inst) tracks.append(track) print("CHANNELS : ", len(tracks)) channels = len(tracks) data_shape = (BEATS_PER_MINISONG, NOTE_RANGE, channels) # number of possible songs in the longest track longest_length = 0 for track in tracks: print("track length: ", track.duration.quarterLength) longest_length = max(longest_length, track.duration.quarterLength) mybeats = longest_length / LENGTH_PER_BEAT num_songs = math.ceil(mybeats / BEATS_PER_MINISONG) minisongs = getStandardizedNoteTracks(num_songs, BEATS_PER_MINISONG, tracks) return minisongs, data_shape
def transformMidi(path, firstNote, outputPath): mf = midi.MidiFile() mf.open(path) mf.read() mf.close() s = midi.translate.midiFileToStream(mf) s2 = stream.Stream() marcov = MarcovChainGenerator() marcov.setInput(lambda note_1: note_1.nameWithOctave) for something in s.flat: if isinstance(something, note.Note): marcov.addNext(something) marcov.setState(firstNote) for something in s.flat: if isinstance(something, note.Note): something.nameWithOctave = marcov.getNextState() s2.append(something) s2.flat.write('midi', fp=outputPath)
def midi2keystrikes(filename, tracknum=0): """ Reads a midifile, returns list of key hits""" mf = midi.MidiFile() mf.open(filename) mf.read() mf.close() events = mf.tracks[tracknum].events result = [] t = 0 for data in events: if data.isDeltaTime() and (data.time is not None): t += data.time if data.isNoteOn() or data.isNoteOff(): result.append(data.pitch) result.append(data.velocity) return result
def midi2mat(filename,upper=88,lower=1): import numpy as np from music21 import midi #just in case file = midi.MidiFile() file.open(filename,attrib='rb') file.read() file.close() #read midi file and decode MDtrack = [] #translate midifile into midi track for i in range(0,len(file.tracks)): if file.tracks[i].hasNotes(): MDtrack.append(file.tracks[i]) INFO = [] #translate midi track into midi events and write into arrays for i in range(0,len(MDtrack)): MDevent = MDtrack[i].events info = {"pitch":[],"duration":[],"velocity":[],"channel":[]} #dictionary for one track of information, incase there are multiple for j in range(0,len(MDevent)): if MDevent[j].type == "NOTE_ON": info["pitch"].append(MDevent[j].pitch) info["velocity"].append(MDevent[j].velocity) #velocity.. I guess indicating volume..? info["channel"].append(MDevent[j].channel) if MDevent[j+1].type == "DeltaTime": #seem like the regular midi file doesn't have event NoteOff info["duration"].append(MDevent[j+1].time) else: print("WARNING: track "+str(i)+", NoteOn event "+str(j)) x = np.subtract(info["pitch"],min(info["pitch"]))/(max(info["pitch"])-min(info["pitch"])) x = list(map(round,np.multiply(x,upper-lower)+1)) info["pitch"] = x #rescale the data and linearly map onto the pitch coding range. need to think about this INFO.append(info) OUTPUT = [] for i in range(0,len(INFO)): layer = np.zeros((len(INFO[i]["pitch"]),upper+2)) for j in range(0,len(INFO[i]["pitch"])): layer[j][int(INFO[i]["pitch"][j])-1] = INFO[i]["velocity"][j] layer[j][upper] = INFO[i]["duration"][j] layer[j][upper+1] = INFO[i]["channel"][j] OUTPUT.append(layer) return(OUTPUT)
def from_file(self, name, file=False): mid = midi.MidiFile() if file: mid.open(filename=name) else: mid.open(filename=name + '/melody.mid') with open(name + '/song_metadata.json') as f: metadata = json.load(f) this_key = metadata['Key'].split() # print(this_key) if len(this_key[0]) == 2 and this_key[0][-1] == 'b': this_key[0] = this_key[0][0] + '-' self._key = key.Key(this_key[0], this_key[1].lower()) self._time_signature = meter.TimeSignature(metadata['Time']) mid.read() mid.close() # eventList = midi.translate.keySignatureToMidiEvents(self._key) + \ # midi.translate.timeSignatureToMidiEvents(self._time_signature) # mid.tracks[0].events = [eventList] + mid.tracks[0].events # print(mid.tracks[0].events) # print(eventList) self._score = midi.translate.midiFileToStream(mid) self._melody = [] i = interval.Interval(self._key.tonic, pitch.Pitch('C')) self._score.transpose(i, inPlace=True) self._key = 'C' for i, voice in enumerate(self._score.parts): try: self._melody.append(voice.flat.measures(1, None, collect=['TimeSignature', 'Instrument'], gatherSpanners=False).expandRepeats().sorted) except repeat.ExpanderException: self._melody.append(voice.flat.measures(1, None, collect=['TimeSignature', 'Instrument'], gatherSpanners=False))
def open_midi(midi_path): mf = midi.MidiFile() mf.open(midi_path) mf.read() mf.close() return midi.translate.midiFileToStream(mf)
def open_midi(midi_path): mf = midi.MidiFile() mf.open(midi_path, attrib='rb') mf.read() mf.close() return mf
def np_seq2mid(np_seq): """ Converts a numpy array to a midi file. :param np_seq: numpy beat sequence :return: music21.midi.MidiFile """ mt = midi.MidiTrack(1) t = 0 tlast = 0 for step in np_seq: # onset will be true if at least one trig is > 0.0 # the remaining trigs are added at the same delta time onset = False # we encountered an onset at this step for idx, trig in enumerate(step[:15]): # find the group group = None for index, grp in enumerate(PERC_GROUPS): if idx in grp: group = index if trig > 0.0: vel = int(step[15 + group] * 127) pitch = PERC_MAP[idx] dt = midi.DeltaTime(mt) if onset is False: dt.time = t - tlast else: dt.time = 0 mt.events.append(dt) me = midi.MidiEvent(mt) me.type = "NOTE_ON" me.channel = 10 me.time = None # d me.pitch = pitch me.velocity = vel mt.events.append(me) if onset is False: tlast = t + 6 onset = True if onset is True: # reset onset for the noteoff onset = False # makes the note off now for idx, trig in enumerate(step[:15]): if trig > 0.0: pitch = PERC_MAP[idx] dt = midi.DeltaTime(mt) if onset is False: dt.time = 6 else: dt.time = 0 mt.events.append(dt) me = midi.MidiEvent(mt) me.type = "NOTE_OFF" me.channel = 10 me.time = None # d me.pitch = pitch me.velocity = 0 mt.events.append(me) if onset is False: onset = True t += TICKS_PER_Q / BEAT_DIV # add end of track dt = midi.DeltaTime(mt) dt.time = 0 mt.events.append(dt) me = midi.MidiEvent(mt) me.type = "END_OF_TRACK" me.channel = 1 me.data = '' # must set data to empty string mt.events.append(me) # make midi file mf = midi.MidiFile() mf.ticksPerQuarterNote = TICKS_PER_Q # cannot use: 10080 mf.tracks.append(mt) return mf
def readStream(path): mfIn = midi.MidiFile() mfIn.open(path) mfIn.read() mfIn.close() return midi.translate.midiFileToStream(mfIn).flat
def analysis_from_midi(midi_fname, h5_fname=None) -> SongAnalysis: midi.MidiFile(midi_fname) if h5_fname: pass if __name__ == "__main__": if False: sa = analyse_audio( "/Users/bgeelen/Music/iTunes/iTunes Media/Music/Compilations/Life on Mars/02 Life on Mars_.mp3", include_timbres=True, ) # sb = analysis_from_h5py('/Users/bgeelen/Data/msd/data/a/a/a/TRAAAAW128F429D538.h5', True) f = h5py.File( "/Users/bgeelen/Data/msd/data/a/a/a/TRAAAAW128F429D538.h5") if True: fname = '/Users/bgeelen/Data/lakh/lmd_matched/A/A/A/TRAAAGR128F425B14B/1d9d16a9da90c090809c153754823c2b.mid' midifile = midi.MidiFile() midifile.open(fname) midifile.read() midifile.close() stream = music21.midi.translate.midiFileToStream(midifile) notes = [x for x in work.recurse() if isinstance(x, music21.note.Note)] first_measure = work.measures(0, 1)[0] measure_length = first_measure.quarterLength
def __init__(self): self.mf = midi.MidiFile()
def open_midi_file(self, filename): print("Opening file...") mf = midi.MidiFile() mf.open(filename) mf.read() return mf
def open_midi(self, file): mf = midi.MidiFile() mf.open(file) mf.read() mf.close() return midi.translate.midiFileToStream(mf)
def idxsToMidi(idxs, verbose=False): mf = midi.MidiFile() mf.ticksPerQuarterNote = 1024 # The maestro dataset uses the first track to store tempo data, and the second # track to store the actual performance. So follow that convention. tempo_track = midi.MidiTrack(0) track = midi.MidiTrack(1) mf.tracks = [tempo_track, track] tempo = midi.MidiEvent(tempo_track, type=midi.MetaEvents.SET_TEMPO) # temp.data is the number of microseconds per beat (per quarter note) # So to set ticks per millis = 1 (easy translation from time-shift values to ticks), # tempo.data must be 1e3 * 1024, since ticksPerQuarterNote is 1024 (see above) tempo.data = int(1e3 * 1024).to_bytes(3, 'big') end_of_track = midi.MidiEvent(tempo_track, type=midi.MetaEvents.END_OF_TRACK) end_of_track.data = '' tempo_track.events = [ # there must always be a delta time before each event midi.DeltaTime(tempo_track, time=0), tempo, midi.DeltaTime(tempo_track, time=0), end_of_track ] track.events = [midi.DeltaTime(track, time=0)] current_velocity = 0 notes_on = set() errors = {'is_on': 0, 'is_not_on': 0} for idx in idxs: if 0 <= idx < 128: # note-on pitch = idx if pitch in notes_on: if verbose: print(pitch, 'is already on') errors['is_on'] += 1 continue if track.events[-1].type != 'DeltaTime': track.events.append(midi.DeltaTime(track, time=0)) track.events.append(makeNote(track, pitch, current_velocity)) notes_on.add(pitch) elif 128 <= idx < (128 + 128): # note-off pitch = idx - 128 if pitch not in notes_on: if verbose: print(pitch, 'is not on') errors['is_not_on'] += 1 continue if track.events[-1].type != 'DeltaTime': track.events.append(midi.DeltaTime(track, time=0)) track.events.append(makeNote(track, pitch, 0)) notes_on.remove(pitch) elif (128 + 128) <= idx < (128 + 128 + 100): # time-shift t = (1 + idx - (128 + 128)) * 10 if track.events[-1].type == 'DeltaTime': # combine repeated delta times track.events[-1].time += t else: track.events.append(midi.DeltaTime(track, time=t)) else: # velocity current_velocity = (idx - (128 + 128 + 100)) * 4 if verbose: print('remaining notes left on:', notes_on) if track.events[-1].type != 'DeltaTime': track.events.append(midi.DeltaTime(track, time=0)) track.events.append(end_of_track) return mf, errors
def read_midi(file_name): input_mf = midi.MidiFile() input_mf.open(file_name) input_mf.read() input_mf.close() return input_mf
def readMidi(filepath): mf = midi.MidiFile() mf.open(filepath) mf.read() mf.close() return mf
elif nonzero_index == 2: # note off case pitchOff_store = index_data if not not ((pitchOn_store or pitchOff_store) and velocity_store): if pitchOn_store != 0: deltaWrite( mt2, time_store ) # midi track must have DeltaTime event between any other event noteWrite(mt2, 1, pitchOn_store, velocity_store) # write note on event elif pitchOff_store != 0: deltaWrite( mt2, time_store ) # midi track must have DeltaTime event between any other event noteWrite(mt2, 0, pitchOff_store, velocity_store) # write note off event pitchOn_store, pitchOff_store, velocity_store, time_store = init_pitch_vol( pitchOn_store, pitchOff_store, velocity_store, time_store) # re-initialise pitch, velocity and time requiredFooter(mt2) mf = midi.MidiFile() mf.ticksPerQuarterNote = 492 # Used empirical results to make 30 seconds long... int(defaults.ticksPerQuarter / 2) come close mf.tracks.append(mt2) mf.open('testmidi5.MID', 'wb') mf.write() mf.close() with open('./picklesave/filepathlist.data', 'rb') as filehandle: filepathlist = pickle.load(filehandle)
def analysis_from_midi(midi_fname, h5_fname=None) -> SongAnalysis: midi.MidiFile(midi_fname) if h5_fname: pass