def from_quantized_sequence(self, quantized_sequence, search_start_step=0, gap_bars=1, pad_end=False, ignore_is_drum=False): """Populate self with drums from the given quantized NoteSequence object. A drum track is extracted from the given quantized sequence starting at time step `start_step`. `start_step` can be used to drive extraction of multiple drum tracks from the same quantized sequence. The end step of the extracted drum track will be stored in `self._end_step`. 0 velocity notes are ignored. The drum extraction is ended when there are no drums for a time stretch of `gap_bars` in bars (measures) of music. The number of time steps per bar is computed from the time signature in `quantized_sequence`. Each drum event is a Python frozenset of simultaneous (after quantization) drum "pitches", or an empty frozenset to indicate no drums are played. Args: quantized_sequence: A quantized NoteSequence instance. search_start_step: Start searching for drums at this time step. Assumed to be the beginning of a bar. gap_bars: If this many bars or more follow a non-empty drum event, the drum track is ended. pad_end: If True, the end of the drums will be padded with empty events so that it will end at a bar boundary. ignore_is_drum: Whether accept notes where `is_drum` is False. Raises: NonIntegerStepsPerBarError: If `quantized_sequence`'s bar length (derived from its time signature) is not an integer number of time steps. """ sequences_lib.assert_is_relative_quantized_sequence(quantized_sequence) self._reset() steps_per_bar_float = sequences_lib.steps_per_bar_in_quantized_sequence( quantized_sequence) if steps_per_bar_float % 1 != 0: raise events_lib.NonIntegerStepsPerBarError( 'There are %f timesteps per bar. Time signature: %d/%d' % (steps_per_bar_float, quantized_sequence.time_signatures[0].numerator, quantized_sequence.time_signatures[0].denominator)) self._steps_per_bar = steps_per_bar = int(steps_per_bar_float) self._steps_per_quarter = ( quantized_sequence.quantization_info.steps_per_quarter) # Group all drum notes that start at the same step. all_notes = [note for note in quantized_sequence.notes if ((note.is_drum or ignore_is_drum) # drums only and note.velocity # no zero-velocity notes # after start_step only and note.quantized_start_step >= search_start_step)] grouped_notes = collections.defaultdict(list) for note in all_notes: grouped_notes[note.quantized_start_step].append(note) # Sort by note start times. notes = sorted(grouped_notes.items(), key=operator.itemgetter(0)) if not notes: return gap_start_index = 0 track_start_step = ( notes[0][0] - (notes[0][0] - search_start_step) % steps_per_bar) for start, group in notes: start_index = start - track_start_step pitches = frozenset(note.pitch for note in group) # If a gap of `gap` or more steps is found, end the drum track. note_distance = start_index - gap_start_index if len(self) and note_distance >= gap_bars * steps_per_bar: # pylint:disable=len-as-condition break # Add a drum event, a set of drum "pitches". self.set_length(start_index + 1) self._events[start_index] = pitches gap_start_index = start_index + 1 if not self._events: # If no drum events were added, don't set `_start_step` and `_end_step`. return self._start_step = track_start_step length = len(self) # Optionally round up `_end_step` to a multiple of `steps_per_bar`. if pad_end: length += -len(self) % steps_per_bar self.set_length(length)
def from_quantized_sequence(self, quantized_sequence, start_step, end_step): """Populate self with the chords from the given quantized NoteSequence. A chord progression is extracted from the given sequence starting at time step `start_step` and ending at time step `end_step`. The number of time steps per bar is computed from the time signature in `quantized_sequence`. Args: quantized_sequence: A quantized NoteSequence instance. start_step: Start populating chords at this time step. end_step: Stop populating chords at this time step. Raises: NonIntegerStepsPerBarError: If `quantized_sequence`'s bar length (derived from its time signature) is not an integer number of time steps. CoincidentChordsError: If any of the chords start on the same step. """ sequences_lib.assert_is_relative_quantized_sequence(quantized_sequence) self._reset() steps_per_bar_float = sequences_lib.steps_per_bar_in_quantized_sequence( quantized_sequence) if steps_per_bar_float % 1 != 0: raise events_lib.NonIntegerStepsPerBarError( 'There are %f timesteps per bar. Time signature: %d/%d' % (steps_per_bar_float, quantized_sequence.time_signature.numerator, quantized_sequence.time_signature.denominator)) self._steps_per_bar = int(steps_per_bar_float) self._steps_per_quarter = ( quantized_sequence.quantization_info.steps_per_quarter) # Sort track by chord times. chords = sorted([a for a in quantized_sequence.text_annotations if a.annotation_type == CHORD_SYMBOL], key=lambda chord: chord.quantized_step) prev_step = None prev_figure = NO_CHORD for chord in chords: if chord.quantized_step >= end_step: # No more chords within range. break elif chord.quantized_step < start_step: # Chord is before start of range. prev_step = chord.quantized_step prev_figure = chord.text continue if chord.quantized_step == prev_step: if chord.text == prev_figure: # Identical coincident chords, just skip. continue else: # Two different chords start at the same time step. self._reset() raise CoincidentChordsError( 'chords %s and %s are coincident' % (prev_figure, chord.text)) if chord.quantized_step > start_step: # Add the previous chord. if prev_step is None: start_index = 0 else: start_index = max(prev_step, start_step) - start_step end_index = chord.quantized_step - start_step self._add_chord(prev_figure, start_index, end_index) prev_step = chord.quantized_step prev_figure = chord.text if prev_step is None or prev_step < end_step: # Add the last chord active before end_step. if prev_step is None: start_index = 0 else: start_index = max(prev_step, start_step) - start_step end_index = end_step - start_step self._add_chord(prev_figure, start_index, end_index) self._start_step = start_step self._end_step = end_step
def from_quantized_sequence(self, quantized_sequence, search_start_step=0, instrument=0, gap_bars=1, ignore_polyphonic_notes=False, pad_end=False, filter_drums=True): """Populate self with a melody from the given quantized NoteSequence. A monophonic melody is extracted from the given `instrument` starting at `search_start_step`. `instrument` and `search_start_step` can be used to drive extraction of multiple melodies from the same quantized sequence. The end step of the extracted melody will be stored in `self._end_step`. 0 velocity notes are ignored. The melody extraction is ended when there are no held notes for a time stretch of `gap_bars` in bars (measures) of music. The number of time steps per bar is computed from the time signature in `quantized_sequence`. `ignore_polyphonic_notes` determines what happens when polyphonic (multiple notes start at the same time) data is encountered. If `ignore_polyphonic_notes` is true, the highest pitch is used in the melody when multiple notes start at the same time. If false, an exception is raised. Args: quantized_sequence: A NoteSequence quantized with sequences_lib.quantize_note_sequence. search_start_step: Start searching for a melody at this time step. Assumed to be the first step of a bar. instrument: Search for a melody in this instrument number. gap_bars: If this many bars or more follow a NOTE_OFF event, the melody is ended. ignore_polyphonic_notes: If True, the highest pitch is used in the melody when multiple notes start at the same time. If False, PolyphonicMelodyError will be raised if multiple notes start at the same time. pad_end: If True, the end of the melody will be padded with NO_EVENTs so that it will end at a bar boundary. filter_drums: If True, notes for which `is_drum` is True will be ignored. Raises: NonIntegerStepsPerBarError: If `quantized_sequence`'s bar length (derived from its time signature) is not an integer number of time steps. PolyphonicMelodyError: If any of the notes start on the same step and `ignore_polyphonic_notes` is False. """ sequences_lib.assert_is_relative_quantized_sequence(quantized_sequence) self._reset() steps_per_bar_float = sequences_lib.steps_per_bar_in_quantized_sequence( quantized_sequence) if steps_per_bar_float % 1 != 0: raise events_lib.NonIntegerStepsPerBarError( 'There are %f timesteps per bar. Time signature: %d/%d' % (steps_per_bar_float, quantized_sequence.time_signatures[0].numerator, quantized_sequence.time_signatures[0].denominator)) self._steps_per_bar = steps_per_bar = int(steps_per_bar_float) self._steps_per_quarter = ( quantized_sequence.quantization_info.steps_per_quarter) # Sort track by note start times, and secondarily by pitch descending. notes = sorted([n for n in quantized_sequence.notes if n.instrument == instrument and n.quantized_start_step >= search_start_step], key=lambda note: (note.quantized_start_step, -note.pitch)) if not notes: return # The first step in the melody, beginning at the first step of a bar. melody_start_step = ( notes[0].quantized_start_step - (notes[0].quantized_start_step - search_start_step) % steps_per_bar) for note in notes: if filter_drums and note.is_drum: continue # Ignore 0 velocity notes. if not note.velocity: continue start_index = note.quantized_start_step - melody_start_step end_index = note.quantized_end_step - melody_start_step if not self._events: # If there are no events, we don't need to check for polyphony. self._add_note(note.pitch, start_index, end_index) continue # If `start_index` comes before or lands on an already added note's start # step, we cannot add it. In that case either discard the melody or keep # the highest pitch. last_on, last_off = self._get_last_on_off_events() on_distance = start_index - last_on off_distance = start_index - last_off if on_distance == 0: if ignore_polyphonic_notes: # Keep highest note. # Notes are sorted by pitch descending, so if a note is already at # this position its the highest pitch. continue else: self._reset() raise PolyphonicMelodyError() elif on_distance < 0: raise PolyphonicMelodyError( 'Unexpected note. Not in ascending order.') # If a gap of `gap` or more steps is found, end the melody. if len(self) and off_distance >= gap_bars * steps_per_bar: # pylint:disable=len-as-condition break # Add the note-on and off events to the melody. self._add_note(note.pitch, start_index, end_index) if not self._events: # If no notes were added, don't set `_start_step` and `_end_step`. return self._start_step = melody_start_step # Strip final MELODY_NOTE_OFF event. if self._events[-1] == MELODY_NOTE_OFF: del self._events[-1] length = len(self) # Optionally round up `_end_step` to a multiple of `steps_per_bar`. if pad_end: length += -len(self) % steps_per_bar self.set_length(length)