Esempio n. 1
0
    def load_track(self, miditrack):
        """
        The main method for converting the track data into sound events

        Loops sequentially through each event and does the following:
        1. Update the current time from the event
        2. If the event is a note on event, then create a new note and add it to the track
        3. If the event is a note off event, then compute its duration
        """
        present_time = 0
        on_notes = {}
        track = InstrumentTrack()
        for event_index in range(0, len(miditrack), 1):
            event = miditrack[event_index]
            present_time += event.tick
            if MidiUtils.is_new_note(event):
                note = Note(start_time=present_time, pitch=event.pitch, volume=event.velocity)
                on_notes[note.pitch] = note
                track.add_note(note)
            if MidiUtils.has_note_ended(event):
                note = on_notes[event.pitch]
                note.duration = present_time - note.start_time
        instrument = MidiUtils.get_instrument(miditrack)
        if instrument is None:
            instrument = instruments.PIANO
        self.transcript.add_track(instrument, track)
Esempio n. 2
0
 def schedule_note(self, note, start):
     """
     Converts the note into a midi note_on event and schedules its start at the start time and end after its duration
     :param note: Note object
     :param start: start time of the note
     """
     self.schedule_event(MidiUtils.to_note_on_event(note, self.channel), start)
     self.schedule_event(MidiUtils.to_note_off_event(note, self.channel), start + note.duration)
Esempio n. 3
0
 def load_meta(self):
     """
     Loads the metadata from the file into the transcript meta object which is then passed to the transcript object
     """
     transcript_meta = TranscriptMeta(midiformat=self.pattern.format, resolution=self.pattern.resolution)
     start_time = 0
     for event in self.pattern[0]:
         start_time += event.tick
         if MidiUtils.is_key_signature_event(event):
             transcript_meta.key_signature_event = event
         if MidiUtils.is_time_signature_event(event):
             transcript_meta.time_signature_event = event
         if MidiUtils.is_set_tempo_event(event):
             transcript_meta.tempo_dict[start_time] = event
     self.transcript.set_transcript_meta(transcript_meta)
Esempio n. 4
0
 def load_tracks(self):
     """
     Loops through each track after the meta track (the first one), and checks to see if there are any
     notes in the track, and if there are, it proceeds forward to load them.
     """
     for track_index in range(1, len(self.pattern), 1):
         if MidiUtils.has_notes(self.pattern[track_index]):
             self.load_track(self.pattern[track_index])
Esempio n. 5
0
    def perform_analysis(self):
        # check for unprocessed events
        for track in self.pattern:
            channel = -1
            for event in track:
                if MidiUtils.is_channel_event(event):
                    if channel == -1:
                        channel = event.channel
                    if channel != event.channel:
                        print "TRACK HAS MULTIPLE CHANNELS"
                        if Analyzer.DO_EXIT:
                            sys.exit(-1)

        # global meta events should be in the first track
        for i in range(1, len(self.pattern), 1):
            for event in self.pattern[i]:
                if MidiUtils.is_song_meta_event(event):
                    print "GLOBAL META EVENTS NEED TO BE IN THE FIRST TRACK", event
                    if Analyzer.DO_EXIT:
                        sys.exit(-1)
Esempio n. 6
0
 def __init__(self, instrument=0, channel=0):
     super(NotesAndEventsScheduledTrack, self).__init__(channel)
     self._scheduled_events[0] = [MidiUtils.to_program_change_event(instrument)]