示例#1
0
 def copy_event(event):
     if type(event) is SetTempoEvent:
         new_event = SetTempoEvent()
         new_event.tick = event.tick
         new_event.data[0] = event.data[0]
         new_event.data[1] = event.data[1]
         new_event.data[2] = event.data[2]
         return new_event
     elif type(event) is NoteOnEvent:
         new_event = NoteOnEvent()
         new_event.tick = event.tick
         new_event.channel = event.channel
         new_event.data[0] = event.data[0]
         new_event.data[1] = event.data[1]
         return new_event
     elif type(event) is NoteOffEvent:
         new_event = NoteOffEvent()
         new_event.tick = event.tick
         new_event.channel = event.channel
         new_event.data[0] = event.data[0]
         new_event.data[1] = event.data[1]
         return new_event
     elif type(event) is ProgramChangeEvent:
         new_event = ProgramChangeEvent
         new_event.tick = event.tick
         new_event.channel = event.channel
         new_event.data[0] = event.data[0]
         return new_event
     else:
         raise ('Not Supported')
示例#2
0
 def to_midi(self):
     """
     Constructs a L{MIDI EventStream<midi.EventStream>} from the 
     data in this stream.
     This can then be output to a file to be played.
     
     Note that TPCNotes will be output as normal MIDI notes. We 
     can't do anything of the clever tuning stuff that we can do 
     with tonal space coordinates, since we'd need to do a further 
     step of analysis to work out the fully specified TS point from 
     the pitch class.
     
     """
     tempo = 120
     
     from midi import EventStream, NoteOffEvent, NoteOnEvent, SetTempoEvent
     mid = EventStream()
     mid.add_track()
     # Set the tempo first at the beginning
     temp = SetTempoEvent()
     temp.tempo = tempo
     temp.tick = 0
     mid.add_event(temp)
     # Work out how many ticks there are in a millisecond
     ticks_per_ms = float(mid.resolution) * tempo / 60000
     # Create midi events for every event in our stream
     for ev in self.events:
         if isinstance(ev, TPCNoteEvent):
             # Create note-on and note-off events
             note = ev.note
             
             noteon = NoteOnEvent()
             noteon.pitch = note
             noteon.tick = int(ev.start * ticks_per_ms)
             noteon.velocity = 100
             mid.add_event(noteon)
             
             noteoff = NoteOffEvent()
             noteoff.pitch = note
             noteoff.tick = int(ev.end * ticks_per_ms)
             noteoff.velocity = 100
             mid.add_event(noteoff)
         elif isinstance(ev, (ChordEvent,BeatEvent)):
             # These events don't affect the midi data
             continue
         else:
             raise TypeError, "event type %s not recognised by "\
                 "MIDI converter." % type(ev).__name__
     return mid
示例#3
0
    def to_midi(self):
        """
        Constructs a L{MIDI EventStream<midi.EventStream>} from the 
        data in this stream.
        This can then be output to a file to be played.
        
        Note that TPCNotes will be output as normal MIDI notes. We 
        can't do anything of the clever tuning stuff that we can do 
        with tonal space coordinates, since we'd need to do a further 
        step of analysis to work out the fully specified TS point from 
        the pitch class.
        
        """
        tempo = 120

        from midi import EventStream, NoteOffEvent, NoteOnEvent, SetTempoEvent
        mid = EventStream()
        mid.add_track()
        # Set the tempo first at the beginning
        temp = SetTempoEvent()
        temp.tempo = tempo
        temp.tick = 0
        mid.add_event(temp)
        # Work out how many ticks there are in a millisecond
        ticks_per_ms = float(mid.resolution) * tempo / 60000
        # Create midi events for every event in our stream
        for ev in self.events:
            if isinstance(ev, TPCNoteEvent):
                # Create note-on and note-off events
                note = ev.note

                noteon = NoteOnEvent()
                noteon.pitch = note
                noteon.tick = int(ev.start * ticks_per_ms)
                noteon.velocity = 100
                mid.add_event(noteon)

                noteoff = NoteOffEvent()
                noteoff.pitch = note
                noteoff.tick = int(ev.end * ticks_per_ms)
                noteoff.velocity = 100
                mid.add_event(noteoff)
            elif isinstance(ev, (ChordEvent, BeatEvent)):
                # These events don't affect the midi data
                continue
            else:
                raise TypeError, "event type %s not recognised by "\
                    "MIDI converter." % type(ev).__name__
        return mid
示例#4
0
 def from_data(data, directives):
     # Build the tone matrix straight up
     state = {
         'equal_temperament' : False,
         'double_root' : False,
         'envelope' : None,
         'origin' : 440,
         'time' : 0.0,
     }
     tone_matrix = ToneMatrix()
     
     #### Prepare a midi stream
     mid = EventStream()
     mid.add_track()
     # Add a tempo event
     tempo = SetTempoEvent()
     tempo.tempo = directives['tempo']
     tempo.tick = 0
     mid.add_event(tempo)
     
     # Each line represents a single chord or some kind of directive
     for line in data:
         first_word = line.split()[0].lower()
         if "=" in line:
             # This is an assignment
             key, __, value = line.partition("=")
             key = key.strip()
             value = value.strip()
             # Check it's valid
             if key == "equal_temperament":
                 if value not in ['off','on']:
                     raise HarmonicalInputFileReadError, \
                         "equal_temperament must be 'off' or 'on', "\
                         "not '%s'" % value
                 value = (value == 'on')
             elif key == "origin":
                 try:
                     value = int(value)
                 except ValueError:
                     # Try interpreting as a coordinate
                     try:
                         coord = _read_coord(value)
                     except HarmonicalInputFileReadError:
                         raise HarmonicalInputFileReadError, "origin "\
                             "value must be an integer or a coordinate."
                     value = _get_pitch(coord)
             elif key == "double_root":
                 if value not in ['off','on']:
                     raise HarmonicalInputFileReadError, \
                         "double_root must be 'off' or 'on', "\
                         "not '%s'" % value
                 value = (value == 'on')
             elif key == "envelope":
                 if value not in ENVELOPES:
                     raise HarmonicalInputFileReadError, "unknown "\
                         "envelope '%s'. Must be one of: %s" % \
                         (value, ", ".join(ENVELOPES.keys()))
                 value = ENVELOPES[value]()
             elif key == "program":
                 # Midi program change
                 try:
                     value = int(value)
                     if value > 127 or value < 0:
                         raise ValueError
                 except ValueError:
                     raise HarmonicalInputFileReadError, "invalid program "\
                         "change: %s. Should be an integer 0-127" % value
                 pchange = ProgramChangeEvent()
                 pchange.value = value
                 pchange.tick = int(state['time'] * mid.resolution)
                 mid.add_event(pchange)
             else:
                 raise HarmonicalInputFileReadError, "invalid "\
                     "assignment key: '%s'" % key
             # Make this assignment when we get to it in the score
             state[key] = value
         elif first_word == "rest":
             tokens = line.split()
             duration = _get_duration(tokens)
             
             # Just move the time counter on without doing anything
             state['time'] += duration
         elif first_word == "chord":
             tokens = line.lstrip("chord").split()
             duration = _get_duration(tokens)
             root = _get_root(tokens)
             volume = _get_volume(tokens)
             sec_duration = _qn_to_seconds(duration)
             
             # Must be just a chord type left
             if len(tokens) > 1:
                 raise HarmonicalInputFileReadError, "chord must "\
                     "include just a chord type"
             if len(tokens) == 0:
                 ctype = ''
             else:
                 ctype = tokens[0]
             
             # Add the chord to the tone matrix
             tone_matrix.add_tone(
                     _qn_to_seconds(state['time']), 
                     SineChordEvent(
                         _get_pitch(root), 
                         ctype,
                         duration=sec_duration, 
                         amplitude=volume,
                         equal_temperament=state['equal_temperament'],
                         double_root=state['double_root'],
                         envelope=state['envelope']
                     )
                 )
                 
             # Add the same chord to the midi file
             tick_time = int(mid.resolution * state['time'])
             tick_duration = int(duration * mid.resolution)
             # TODO: currently this will always treat C as the origin 
             #  even if you change it with directives
             events = events_for_chord(root, 
                                       ctype,
                                       tick_time,
                                       tick_duration, 
                                       velocity = int(volume*127),
                                       equal_temperament=state['equal_temperament'],
                                       double_root=state['double_root'])
             for ev in events:
                 mid.add_event(ev)
             
             # Move the timer on ready for the next chord
             state['time'] += duration
         elif first_word in ["tones", "t"]:
             # Must be a chord made up of coordinates
             tokens = line.lstrip("tones").split()
             duration = _get_duration(tokens)
             root = _get_root(tokens)
             volume = _get_volume(tokens)
             sec_duration = _qn_to_seconds(duration)
                 
             # The rest should be the list of coordinates
             coordinates = [_read_coord(token) for token in tokens]
             
             # Add the chord to the tone matrix
             tone_matrix.add_tone(
                     _qn_to_seconds(state['time']), 
                     SineClusterEvent(
                         _get_pitch(root), 
                         coordinates,
                         duration=sec_duration, 
                         amplitude=volume,
                         equal_temperament=state['equal_temperament'],
                         double_root=state['double_root'],
                         envelope=state['envelope']
                     )
                 )
                 
             
             # Add the same chord to the midi file
             tick_time = int(mid.resolution * state['time'])
             tick_duration = int(duration * mid.resolution)
             for note in coordinates:
                 # TODO: currently this will always treat C as the origin 
                 #  even if you change it with directives
                 events = tonal_space_note_events(note, 
                                                  tick_time,
                                                  tick_duration, 
                                                  velocity = int(volume*127))
                 if state['equal_temperament']:
                     # Omit tuning event (the first one)
                     events = events[1:]
                 for ev in events:
                     mid.add_event(ev)
             
             # Move the timer on ready for the next chord
             state['time'] += duration
         else:
             raise HarmonicalInputFileReadError, "could not make sense "\
                 "of the line: %s" % line
     return ChordInputFile(tone_matrix, midi_file=mid)