Exemple #1
0
def infer_chords_for_sequence(sequence,
                              chords_per_bar=None,
                              key_change_prob=0.001,
                              chord_change_prob=0.5,
                              chord_pitch_out_of_key_prob=0.01,
                              chord_note_concentration=100.0,
                              add_key_signatures=False):
  """Infer chords for a NoteSequence using the Viterbi algorithm.

  This uses some heuristics to infer chords for a quantized NoteSequence. At
  each chord position a key and chord will be inferred, and the chords will be
  added (as text annotations) to the sequence.

  If the sequence is quantized relative to meter, a fixed number of chords per
  bar will be inferred. Otherwise, the sequence is expected to have beat
  annotations and one chord will be inferred per beat.

  Args:
    sequence: The NoteSequence for which to infer chords. This NoteSequence will
        be modified in place.
    chords_per_bar: If `sequence` is quantized, the number of chords per bar to
        infer. If None, use a default number of chords based on the time
        signature of `sequence`.
    key_change_prob: Probability of a key change between two adjacent frames.
    chord_change_prob: Probability of a chord change between two adjacent
        frames.
    chord_pitch_out_of_key_prob: Probability of a pitch in a chord not belonging
        to the current key.
    chord_note_concentration: Concentration parameter for the distribution of
        observed pitches played over a chord. At zero, all pitches are equally
        likely. As concentration increases, observed pitches must match the
        chord pitches more closely.
    add_key_signatures: If True, also add inferred key signatures to
        `quantized_sequence` (and remove any existing key signatures).

  Raises:
    SequenceAlreadyHasChordsError: If `sequence` already has chords.
    QuantizationStatusError: If `sequence` is not quantized relative to
        meter but `chords_per_bar` is specified or no beat annotations are
        present.
    UncommonTimeSignatureError: If `chords_per_bar` is not specified and
        `sequence` is quantized and has an uncommon time signature.
    NonIntegerStepsPerChordError: If the number of quantized steps per chord
        is not an integer.
    EmptySequenceError: If `sequence` is empty.
    SequenceTooLongError: If the number of chords to be inferred is too
        large.
  """
  for ta in sequence.text_annotations:
    if ta.annotation_type == music_pb2.NoteSequence.TextAnnotation.CHORD_SYMBOL:
      raise SequenceAlreadyHasChordsError(
          'NoteSequence already has chord(s): %s' % ta.text)

  if sequences_lib.is_relative_quantized_sequence(sequence):
    # Infer a fixed number of chords per bar.
    if chords_per_bar is None:
      time_signature = (sequence.time_signatures[0].numerator,
                        sequence.time_signatures[0].denominator)
      if time_signature not in _DEFAULT_TIME_SIGNATURE_CHORDS_PER_BAR:
        raise UncommonTimeSignatureError(
            'No default chords per bar for time signature: (%d, %d)' %
            time_signature)
      chords_per_bar = _DEFAULT_TIME_SIGNATURE_CHORDS_PER_BAR[time_signature]

    # Determine the number of seconds (and steps) each chord is held.
    steps_per_bar_float = sequences_lib.steps_per_bar_in_quantized_sequence(
        sequence)
    steps_per_chord_float = steps_per_bar_float / chords_per_bar
    if steps_per_chord_float != round(steps_per_chord_float):
      raise NonIntegerStepsPerChordError(
          'Non-integer number of steps per chord: %f' % steps_per_chord_float)
    steps_per_chord = int(steps_per_chord_float)
    steps_per_second = sequences_lib.steps_per_quarter_to_steps_per_second(
        sequence.quantization_info.steps_per_quarter, sequence.tempos[0].qpm)
    seconds_per_chord = steps_per_chord / steps_per_second

    num_chords = int(math.ceil(sequence.total_time / seconds_per_chord))
    if num_chords == 0:
      raise EmptySequenceError('NoteSequence is empty.')

  else:
    # Sequence is not quantized relative to meter; chord changes will happen at
    # annotated beat times.
    if chords_per_bar is not None:
      raise sequences_lib.QuantizationStatusError(
          'Sequence must be quantized to infer fixed number of chords per bar.')
    beats = [
        ta for ta in sequence.text_annotations
        if ta.annotation_type == music_pb2.NoteSequence.TextAnnotation.BEAT
    ]
    if not beats:
      raise sequences_lib.QuantizationStatusError(
          'Sequence must be quantized to infer chords without annotated beats.')

    # Only keep unique beats in the interior of the sequence. The first chord
    # always starts at time zero, the last chord always ends at
    # `sequence.total_time`, and we don't want any zero-length chords.
    sorted_beats = sorted(
        [beat for beat in beats if 0.0 < beat.time < sequence.total_time],
        key=lambda beat: beat.time)
    unique_sorted_beats = [sorted_beats[i] for i in range(len(sorted_beats))
                           if i == 0
                           or sorted_beats[i].time > sorted_beats[i - 1].time]

    num_chords = len(unique_sorted_beats) + 1
    sorted_beat_times = [beat.time for beat in unique_sorted_beats]
    if sequences_lib.is_quantized_sequence(sequence):
      sorted_beat_steps = [beat.quantized_step for beat in unique_sorted_beats]

  if num_chords > _MAX_NUM_CHORDS:
    raise SequenceTooLongError(
        'NoteSequence too long for chord inference: %d frames' % num_chords)

  # Compute pitch vectors for each chord frame, then compute log-likelihood of
  # observing those pitch vectors under each possible chord.
  note_pitch_vectors = sequence_note_pitch_vectors(
      sequence,
      seconds_per_chord if chords_per_bar is not None else sorted_beat_times)
  chord_frame_loglik = _chord_frame_log_likelihood(
      note_pitch_vectors, chord_note_concentration)

  # Compute distribution over chords for each key, and transition distribution
  # between key-chord pairs.
  key_chord_distribution = _key_chord_distribution(
      chord_pitch_out_of_key_prob=chord_pitch_out_of_key_prob)
  key_chord_transition_distribution = _key_chord_transition_distribution(
      key_chord_distribution,
      key_change_prob=key_change_prob,
      chord_change_prob=chord_change_prob)
  key_chord_loglik = np.log(key_chord_distribution)
  key_chord_transition_loglik = np.log(key_chord_transition_distribution)

  key_chords = _key_chord_viterbi(
      chord_frame_loglik, key_chord_loglik, key_chord_transition_loglik)

  if add_key_signatures:
    del sequence.key_signatures[:]

  # Add the inferred chord changes to the sequence, optionally adding key
  # signature(s) as well.
  current_key_name = None
  current_chord_name = None
  for frame, (key, chord) in enumerate(key_chords):
    if chords_per_bar is not None:
      time = frame * seconds_per_chord
    else:
      time = 0.0 if frame == 0 else sorted_beat_times[frame - 1]

    if _PITCH_CLASS_NAMES[key] != current_key_name:
      # A key change was inferred.
      if add_key_signatures:
        ks = sequence.key_signatures.add()
        ks.time = time
        ks.key = key
      else:
        if current_key_name is not None:
          tf.logging.info(
              'Sequence has key change from %s to %s at %f seconds.',
              current_key_name, _PITCH_CLASS_NAMES[key], time)

      current_key_name = _PITCH_CLASS_NAMES[key]

    if chord == constants.NO_CHORD:
      figure = constants.NO_CHORD
    else:
      root, kind = chord
      figure = '%s%s' % (_PITCH_CLASS_NAMES[root], kind)

    if figure != current_chord_name:
      ta = sequence.text_annotations.add()
      ta.time = time
      if sequences_lib.is_quantized_sequence(sequence):
        if chords_per_bar is not None:
          ta.quantized_step = frame * steps_per_chord
        else:
          ta.quantized_step = 0 if frame == 0 else sorted_beat_steps[frame - 1]
      ta.text = figure
      ta.annotation_type = music_pb2.NoteSequence.TextAnnotation.CHORD_SYMBOL
      current_chord_name = figure
Exemple #2
0
def infer_chords_for_sequence(sequence,
                              chords_per_bar=None,
                              key_change_prob=0.001,
                              chord_change_prob=0.5,
                              chord_pitch_out_of_key_prob=0.01,
                              chord_note_concentration=100.0,
                              add_key_signatures=False):
    """Infer chords for a NoteSequence using the Viterbi algorithm.

  This uses some heuristics to infer chords for a quantized NoteSequence. At
  each chord position a key and chord will be inferred, and the chords will be
  added (as text annotations) to the sequence.

  If the sequence is quantized relative to meter, a fixed number of chords per
  bar will be inferred. Otherwise, the sequence is expected to have beat
  annotations and one chord will be inferred per beat.

  Args:
    sequence: The NoteSequence for which to infer chords. This NoteSequence will
        be modified in place.
    chords_per_bar: If `sequence` is quantized, the number of chords per bar to
        infer. If None, use a default number of chords based on the time
        signature of `sequence`.
    key_change_prob: Probability of a key change between two adjacent frames.
    chord_change_prob: Probability of a chord change between two adjacent
        frames.
    chord_pitch_out_of_key_prob: Probability of a pitch in a chord not belonging
        to the current key.
    chord_note_concentration: Concentration parameter for the distribution of
        observed pitches played over a chord. At zero, all pitches are equally
        likely. As concentration increases, observed pitches must match the
        chord pitches more closely.
    add_key_signatures: If True, also add inferred key signatures to
        `quantized_sequence` (and remove any existing key signatures).

  Raises:
    SequenceAlreadyHasChordsException: If `sequence` already has chords.
    QuantizationStatusException: If `sequence` is not quantized relative to
        meter but `chords_per_bar` is specified or no beat annotations are
        present.
    UncommonTimeSignatureException: If `chords_per_bar` is not specified and
        `sequence` is quantized and has an uncommon time signature.
    NonIntegerStepsPerChordException: If the number of quantized steps per chord
        is not an integer.
    EmptySequenceException: If `sequence` is empty.
    SequenceTooLongException: If the number of chords to be inferred is too
        large.
  """
    for ta in sequence.text_annotations:
        if ta.annotation_type == music_pb2.NoteSequence.TextAnnotation.CHORD_SYMBOL:
            raise SequenceAlreadyHasChordsException(
                'NoteSequence already has chord(s): %s' % ta.text)

    if sequences_lib.is_relative_quantized_sequence(sequence):
        # Infer a fixed number of chords per bar.
        if chords_per_bar is None:
            time_signature = (sequence.time_signatures[0].numerator,
                              sequence.time_signatures[0].denominator)
            if time_signature not in _DEFAULT_TIME_SIGNATURE_CHORDS_PER_BAR:
                raise UncommonTimeSignatureException(
                    'No default chords per bar for time signature: (%d, %d)' %
                    time_signature)
            chords_per_bar = _DEFAULT_TIME_SIGNATURE_CHORDS_PER_BAR[
                time_signature]

        # Determine the number of seconds (and steps) each chord is held.
        steps_per_bar_float = sequences_lib.steps_per_bar_in_quantized_sequence(
            sequence)
        steps_per_chord_float = steps_per_bar_float / chords_per_bar
        if steps_per_chord_float != round(steps_per_chord_float):
            raise NonIntegerStepsPerChordException(
                'Non-integer number of steps per chord: %f' %
                steps_per_chord_float)
        steps_per_chord = int(steps_per_chord_float)
        steps_per_second = sequences_lib.steps_per_quarter_to_steps_per_second(
            sequence.quantization_info.steps_per_quarter,
            sequence.tempos[0].qpm)
        seconds_per_chord = steps_per_chord / steps_per_second

        num_chords = int(math.ceil(sequence.total_time / seconds_per_chord))
        if num_chords == 0:
            raise EmptySequenceException('NoteSequence is empty.')

    else:
        # Sequence is not quantized relative to meter; chord changes will happen at
        # annotated beat times.
        if chords_per_bar is not None:
            raise sequences_lib.QuantizationStatusException(
                'Sequence must be quantized to infer fixed number of chords per bar.'
            )
        beats = [
            ta for ta in sequence.text_annotations
            if ta.annotation_type == music_pb2.NoteSequence.TextAnnotation.BEAT
        ]
        if not beats:
            raise sequences_lib.QuantizationStatusException(
                'Sequence must be quantized to infer chords without annotated beats.'
            )

        # Only keep unique beats in the interior of the sequence. The first chord
        # always starts at time zero, the last chord always ends at
        # `sequence.total_time`, and we don't want any zero-length chords.
        sorted_beats = sorted(
            [beat for beat in beats if 0.0 < beat.time < sequence.total_time],
            key=lambda beat: beat.time)
        unique_sorted_beats = [
            sorted_beats[i] for i in range(len(sorted_beats))
            if i == 0 or sorted_beats[i].time > sorted_beats[i - 1].time
        ]

        num_chords = len(unique_sorted_beats) + 1
        sorted_beat_times = [beat.time for beat in unique_sorted_beats]
        if sequences_lib.is_quantized_sequence(sequence):
            sorted_beat_steps = [
                beat.quantized_step for beat in unique_sorted_beats
            ]

    if num_chords > _MAX_NUM_CHORDS:
        raise SequenceTooLongException(
            'NoteSequence too long for chord inference: %d frames' %
            num_chords)

    # Compute pitch vectors for each chord frame, then compute log-likelihood of
    # observing those pitch vectors under each possible chord.
    note_pitch_vectors = sequence_note_pitch_vectors(
        sequence,
        seconds_per_chord if chords_per_bar is not None else sorted_beat_times)
    chord_frame_loglik = _chord_frame_log_likelihood(note_pitch_vectors,
                                                     chord_note_concentration)

    # Compute distribution over chords for each key, and transition distribution
    # between key-chord pairs.
    key_chord_distribution = _key_chord_distribution(
        chord_pitch_out_of_key_prob=chord_pitch_out_of_key_prob)
    key_chord_transition_distribution = _key_chord_transition_distribution(
        key_chord_distribution,
        key_change_prob=key_change_prob,
        chord_change_prob=chord_change_prob)
    key_chord_loglik = np.log(key_chord_distribution)
    key_chord_transition_loglik = np.log(key_chord_transition_distribution)

    key_chords = _key_chord_viterbi(chord_frame_loglik, key_chord_loglik,
                                    key_chord_transition_loglik)

    if add_key_signatures:
        del sequence.key_signatures[:]

    # Add the inferred chord changes to the sequence, optionally adding key
    # signature(s) as well.
    current_key_name = None
    current_chord_name = None
    for frame, (key, chord) in enumerate(key_chords):
        if chords_per_bar is not None:
            time = frame * seconds_per_chord
        else:
            time = 0.0 if frame == 0 else sorted_beat_times[frame - 1]

        if _PITCH_CLASS_NAMES[key] != current_key_name:
            # A key change was inferred.
            if add_key_signatures:
                ks = sequence.key_signatures.add()
                ks.time = time
                ks.key = key
            else:
                if current_key_name is not None:
                    tf.logging.info(
                        'Sequence has key change from %s to %s at %f seconds.',
                        current_key_name, _PITCH_CLASS_NAMES[key], time)

            current_key_name = _PITCH_CLASS_NAMES[key]

        if chord == constants.NO_CHORD:
            figure = constants.NO_CHORD
        else:
            root, kind = chord
            figure = '%s%s' % (_PITCH_CLASS_NAMES[root], kind)

        if figure != current_chord_name:
            ta = sequence.text_annotations.add()
            ta.time = time
            if sequences_lib.is_quantized_sequence(sequence):
                if chords_per_bar is not None:
                    ta.quantized_step = frame * steps_per_chord
                else:
                    ta.quantized_step = 0 if frame == 0 else sorted_beat_steps[
                        frame - 1]
            ta.text = figure
            ta.annotation_type = music_pb2.NoteSequence.TextAnnotation.CHORD_SYMBOL
            current_chord_name = figure
Exemple #3
0
 def testStepsPerQuarterToStepsPerSecond(self):
     self.assertEqual(
         4.0, sequences_lib.steps_per_quarter_to_steps_per_second(4, 60.0))
Exemple #4
0
def infer_chords_for_sequence(quantized_sequence,
                              chords_per_bar,
                              key_change_prob=0.001,
                              chord_change_prob=0.5,
                              chord_pitch_out_of_key_prob=0.01,
                              chord_note_concentration=100.0):
    """Infer chords for a quantized NoteSequence using the Viterbi algorithm.

  This uses some heuristics to infer chords for a quantized NoteSequence. At
  each chord position a key and chord will be inferred, and the chords will be
  added (as text annotations) to the sequence.

  Args:
    quantized_sequence: The quantized NoteSequence for which to infer chords.
        This NoteSequence will be modified in place.
    chords_per_bar: The number of chords per bar to infer.
    key_change_prob: Probability of a key change between two adjacent frames.
    chord_change_prob: Probability of a chord change between two adjacent
        frames.
    chord_pitch_out_of_key_prob: Probability of a pitch in a chord not belonging
        to the current key.
    chord_note_concentration: Concentration parameter for the distribution of
        observed pitches played over a chord. At zero, all pitches are equally
        likely. As concentration increases, observed pitches must match the
        chord pitches more closely.

  Raises:
    SequenceAlreadyHasChordsException: If `quantized_sequence` already has
        chords.
    NonIntegerStepsPerChordException: If the number of quantized steps per chord
        is not an integer.
    EmptySequenceException: If `quantized_sequence` is empty.
    SequenceTooLongException: If the number of chords to be inferred is too
        large.
  """
    sequences_lib.assert_is_relative_quantized_sequence(quantized_sequence)
    for ta in quantized_sequence.text_annotations:
        if ta.annotation_type == music_pb2.NoteSequence.TextAnnotation.CHORD_SYMBOL:
            raise SequenceAlreadyHasChordsException(
                'NoteSequence already has chord(s): %s' % ta.text)

    # Determine the number of seconds (and steps) each chord is held.
    steps_per_bar_float = sequences_lib.steps_per_bar_in_quantized_sequence(
        quantized_sequence)
    steps_per_chord_float = steps_per_bar_float / chords_per_bar
    if steps_per_chord_float != round(steps_per_chord_float):
        raise NonIntegerStepsPerChordException(
            'Non-integer number of steps per chord: %f' %
            steps_per_chord_float)
    steps_per_chord = int(steps_per_chord_float)
    steps_per_second = sequences_lib.steps_per_quarter_to_steps_per_second(
        quantized_sequence.quantization_info.steps_per_quarter,
        quantized_sequence.tempos[0].qpm)
    seconds_per_chord = steps_per_chord / steps_per_second

    num_chords = int(
        math.ceil(quantized_sequence.total_time / seconds_per_chord))
    if num_chords == 0:
        raise EmptySequenceException('NoteSequence is empty.')
    if num_chords > _MAX_NUM_CHORDS:
        raise SequenceTooLongException(
            'NoteSequence too long for chord inference: %d frames' %
            num_chords)

    # Compute pitch vectors for each chord frame, then compute log-likelihood of
    # observing those pitch vectors under each possible chord.
    note_pitch_vectors = sequence_note_pitch_vectors(
        quantized_sequence, seconds_per_frame=seconds_per_chord)
    chord_frame_loglik = _chord_frame_log_likelihood(note_pitch_vectors,
                                                     chord_note_concentration)

    # Compute distribution over chords for each key, and transition distribution
    # between key-chord pairs.
    key_chord_distribution = _key_chord_distribution(
        chord_pitch_out_of_key_prob=chord_pitch_out_of_key_prob)
    key_chord_transition_distribution = _key_chord_transition_distribution(
        key_chord_distribution,
        key_change_prob=key_change_prob,
        chord_change_prob=chord_change_prob)
    key_chord_loglik = np.log(key_chord_distribution)
    key_chord_transition_loglik = np.log(key_chord_transition_distribution)

    key_chords = _key_chord_viterbi(chord_frame_loglik, key_chord_loglik,
                                    key_chord_transition_loglik)

    # Add the inferred chord changes to the sequence, logging any key changes.
    current_key_name = None
    current_chord_name = None
    for frame, (key, chord) in enumerate(key_chords):
        if _PITCH_CLASS_NAMES[key] != current_key_name:
            if current_key_name is not None:
                tf.logging.info(
                    'Sequence has key change from %s to %s at %f seconds.',
                    current_key_name, _PITCH_CLASS_NAMES[key],
                    frame * seconds_per_chord)
            current_key_name = _PITCH_CLASS_NAMES[key]

        if chord == constants.NO_CHORD:
            figure = constants.NO_CHORD
        else:
            root, kind = chord
            figure = '%s%s' % (_PITCH_CLASS_NAMES[root], kind)

        if figure != current_chord_name:
            ta = quantized_sequence.text_annotations.add()
            ta.time = frame * seconds_per_chord
            ta.quantized_step = frame * steps_per_chord
            ta.text = figure
            ta.annotation_type = music_pb2.NoteSequence.TextAnnotation.CHORD_SYMBOL
            current_chord_name = figure
def infer_chords_for_sequence(quantized_sequence,
                              chords_per_bar=None,
                              key_change_prob=0.001,
                              chord_change_prob=0.5,
                              chord_pitch_out_of_key_prob=0.01,
                              chord_note_concentration=100.0):
  """Infer chords for a quantized NoteSequence using the Viterbi algorithm.

  This uses some heuristics to infer chords for a quantized NoteSequence. At
  each chord position a key and chord will be inferred, and the chords will be
  added (as text annotations) to the sequence.

  Args:
    quantized_sequence: The quantized NoteSequence for which to infer chords.
        This NoteSequence will be modified in place.
    chords_per_bar: The number of chords per bar to infer. If None, use a
        default number of chords based on the time signature of
        `quantized_sequence`.
    key_change_prob: Probability of a key change between two adjacent frames.
    chord_change_prob: Probability of a chord change between two adjacent
        frames.
    chord_pitch_out_of_key_prob: Probability of a pitch in a chord not belonging
        to the current key.
    chord_note_concentration: Concentration parameter for the distribution of
        observed pitches played over a chord. At zero, all pitches are equally
        likely. As concentration increases, observed pitches must match the
        chord pitches more closely.

  Raises:
    SequenceAlreadyHasChordsException: If `quantized_sequence` already has
        chords.
    UncommonTimeSignatureException: If `chords_per_bar` is not specified and
        `quantized_sequence` has an uncommon time signature.
    NonIntegerStepsPerChordException: If the number of quantized steps per chord
        is not an integer.
    EmptySequenceException: If `quantized_sequence` is empty.
    SequenceTooLongException: If the number of chords to be inferred is too
        large.
  """
  sequences_lib.assert_is_relative_quantized_sequence(quantized_sequence)
  for ta in quantized_sequence.text_annotations:
    if ta.annotation_type == music_pb2.NoteSequence.TextAnnotation.CHORD_SYMBOL:
      raise SequenceAlreadyHasChordsException(
          'NoteSequence already has chord(s): %s' % ta.text)

  if chords_per_bar is None:
    time_signature = (quantized_sequence.time_signatures[0].numerator,
                      quantized_sequence.time_signatures[0].denominator)
    if time_signature not in _DEFAULT_TIME_SIGNATURE_CHORDS_PER_BAR:
      raise UncommonTimeSignatureException(
          'No default chords per bar for time signature: (%d, %d)' %
          time_signature)
    chords_per_bar = _DEFAULT_TIME_SIGNATURE_CHORDS_PER_BAR[time_signature]

  # Determine the number of seconds (and steps) each chord is held.
  steps_per_bar_float = sequences_lib.steps_per_bar_in_quantized_sequence(
      quantized_sequence)
  steps_per_chord_float = steps_per_bar_float / chords_per_bar
  if steps_per_chord_float != round(steps_per_chord_float):
    raise NonIntegerStepsPerChordException(
        'Non-integer number of steps per chord: %f' % steps_per_chord_float)
  steps_per_chord = int(steps_per_chord_float)
  steps_per_second = sequences_lib.steps_per_quarter_to_steps_per_second(
      quantized_sequence.quantization_info.steps_per_quarter,
      quantized_sequence.tempos[0].qpm)
  seconds_per_chord = steps_per_chord / steps_per_second

  num_chords = int(math.ceil(quantized_sequence.total_time / seconds_per_chord))
  if num_chords == 0:
    raise EmptySequenceException('NoteSequence is empty.')
  if num_chords > _MAX_NUM_CHORDS:
    raise SequenceTooLongException(
        'NoteSequence too long for chord inference: %d frames' % num_chords)

  # Compute pitch vectors for each chord frame, then compute log-likelihood of
  # observing those pitch vectors under each possible chord.
  note_pitch_vectors = sequence_note_pitch_vectors(
      quantized_sequence, seconds_per_frame=seconds_per_chord)
  chord_frame_loglik = _chord_frame_log_likelihood(
      note_pitch_vectors, chord_note_concentration)

  # Compute distribution over chords for each key, and transition distribution
  # between key-chord pairs.
  key_chord_distribution = _key_chord_distribution(
      chord_pitch_out_of_key_prob=chord_pitch_out_of_key_prob)
  key_chord_transition_distribution = _key_chord_transition_distribution(
      key_chord_distribution,
      key_change_prob=key_change_prob,
      chord_change_prob=chord_change_prob)
  key_chord_loglik = np.log(key_chord_distribution)
  key_chord_transition_loglik = np.log(key_chord_transition_distribution)

  key_chords = _key_chord_viterbi(
      chord_frame_loglik, key_chord_loglik, key_chord_transition_loglik)

  # Add the inferred chord changes to the sequence, logging any key changes.
  current_key_name = None
  current_chord_name = None
  for frame, (key, chord) in enumerate(key_chords):
    if _PITCH_CLASS_NAMES[key] != current_key_name:
      if current_key_name is not None:
        tf.logging.info('Sequence has key change from %s to %s at %f seconds.',
                        current_key_name, _PITCH_CLASS_NAMES[key],
                        frame * seconds_per_chord)
      current_key_name = _PITCH_CLASS_NAMES[key]

    if chord == constants.NO_CHORD:
      figure = constants.NO_CHORD
    else:
      root, kind = chord
      figure = '%s%s' % (_PITCH_CLASS_NAMES[root], kind)

    if figure != current_chord_name:
      ta = quantized_sequence.text_annotations.add()
      ta.time = frame * seconds_per_chord
      ta.quantized_step = frame * steps_per_chord
      ta.text = figure
      ta.annotation_type = music_pb2.NoteSequence.TextAnnotation.CHORD_SYMBOL
      current_chord_name = figure
Exemple #6
0
 def testStepsPerQuarterToStepsPerSecond(self):
   self.assertEqual(
       4.0, sequences_lib.steps_per_quarter_to_steps_per_second(4, 60.0))