Exemplo n.º 1
0
def main(unused_argv):
    absl.flags.FLAGS.alsologtostderr = True
    # Set hyperparams from json args and defaults
    flags = lib_flags.Flags()
    # Config hparams
    if FLAGS.config:
        config_module = importlib.import_module(
            'magenta.models.gansynth.configs.{}'.format(FLAGS.config))
        flags.load(config_module.hparams)
    # Command line hparams
    flags.load_json(FLAGS.hparams)
    # Set default flags
    lib_model.set_flags(flags)

    print('Flags:')
    flags.print_values()

    # Create training directory
    flags['train_root_dir'] = util.expand_path(flags['train_root_dir'])
    if not tf.gfile.Exists(flags['train_root_dir']):
        tf.gfile.MakeDirs(flags['train_root_dir'])

    # Save the flags to help with loading the model latter
    fname = os.path.join(flags['train_root_dir'], 'experiment.json')
    with tf.gfile.Open(fname, 'w') as f:
        json.dump(flags, f)  # pytype: disable=wrong-arg-types

    # Run training
    run(flags)
Exemplo n.º 2
0
    def load_from_path(cls, path, flags=None):
        """Instantiate a Model for eval using flags and weights from a saved model.

    Currently only supports models trained by the experiment runner, since
    Model itself doesn't save flags (so we rely the runner's experiment.json)

    Args:
      path: Path to model directory (which contains stage folders).
      flags: Additional flags for loading the model.

    Raises:
      ValueError: If folder of path contains no stage folders.

    Returns:
      model: Instantiated model with saved weights.
    """
        # Read the flags from the experiment.json file
        # experiment.json is in the folder above
        # Remove last '/' if present
        path = path[:-1] if path.endswith('/') else path
        path = util.expand_path(path)
        if flags is None:
            flags = lib_flags.Flags()
        flags['train_root_dir'] = path
        experiment_json_path = os.path.join(path, 'experiment.json')
        try:
            # Read json to dict
            with tf.gfile.GFile(experiment_json_path, 'r') as f:
                experiment_json = json.load(f)
            # Load dict as a Flags() object
            flags.load(experiment_json)
        except Exception as e:  # pylint: disable=broad-except
            print("Warning! Couldn't load model flags from experiment.json")
            print(e)
        # Set default flags
        set_flags(flags)
        flags.print_values()
        # Get list_of_directories
        train_sub_dirs = sorted([
            sub_dir for sub_dir in tf.gfile.ListDirectory(path)
            if sub_dir.startswith('stage_')
        ])
        if not train_sub_dirs:
            raise ValueError(
                'No stage folders found, is %s the correct model path?' % path)
        # Get last checkpoint
        last_stage_dir = train_sub_dirs[-1]
        stage_id = int(last_stage_dir.split('_')[-1])
        weights_dir = os.path.join(path, last_stage_dir)
        ckpt = tf.train.latest_checkpoint(weights_dir)
        # Restore from a specific checkpoint
        # ckpt = "/content/drive/My Drive/magenta-master/magenta/models/gansynth/train_root3/stage_00012/model.ckpt-403384"
        print('Load model from {}'.format(ckpt))
        # Load the model, use eval_batch_size if present
        batch_size = flags.get('eval_batch_size',
                               train_util.get_batch_size(stage_id, **flags))
        model = cls(stage_id, batch_size, flags)
        model.saver.restore(model.sess, ckpt)
        return model
Exemplo n.º 3
0
def main(unused_argv):
    absl.flags.FLAGS.alsologtostderr = True

    # Load the model
    flags = lib_flags.Flags({'batch_size_schedule': [FLAGS.batch_size]})
    model = lib_model.Model.load_from_path(FLAGS.ckpt_dir, flags)

    # Get configs
    sample_rate = model.get_sample_rate()
    audio_length = model.get_audio_length()

    # Make an output directory if it doesn't exist
    output_dir = util.expand_path(FLAGS.output_dir)
    if not tf.gfile.Exists(output_dir):
        tf.gfile.MakeDirs(output_dir)

    if FLAGS.midi_file:
        # If a MIDI file is provided, synthesize interpolations across the clip
        unused_ns, notes = gu.load_midi(FLAGS.midi_file)

        # Distribute latent vectors linearly in time
        z_instruments, t_instruments = gu.get_random_instruments(
            model,
            notes['end_times'][-1],
            secs_per_instrument=FLAGS.secs_per_instrument)

        # Get latent vectors for each note
        z_notes = gu.get_z_notes(notes['start_times'], z_instruments,
                                 t_instruments)

        # Generate audio for each note
        print('Generating {} samples...'.format(len(z_notes)))
        # Load Instrument
        audio_notes = model.generate_samples_from_z(z_notes, notes['pitches'])

        # Make a single audio clip
        audio_clip = gu.combine_notes(audio_notes, sample_rate, audio_length,
                                      FLAGS.attack_percent, FLAGS.attack_slope,
                                      FLAGS.release_percent,
                                      FLAGS.release_slope,
                                      notes['start_times'], notes['end_times'],
                                      notes['velocities'])

        # Write the wave files
        fname = os.path.join(output_dir, 'generated_clip.wav')
        gu.save_wav(audio_clip, fname, sample_rate)

        # Save z note
        fname = os.path.join(output_dir, 'instrument.npy')
        np.save(fname, z_notes[0])
    else:
        # Otherwise, just generate a batch of random sounds
        waves = model.generate_samples(FLAGS.batch_size)
        # Write the wave files
        for i in range(len(waves)):
            fname = os.path.join(output_dir, 'generated_{}.wav'.format(i))
            gu.save_wav(waves[i], fname, sample_rate)
Exemplo n.º 4
0
def load_midi(midi_path, min_pitch=24, max_pitch=84):
    """Load midi as a notesequence."""
    midi_path = util.expand_path(midi_path)
    ns = mm.midi_file_to_sequence_proto(midi_path)
    pitches = np.array([n.pitch for n in ns.notes])
    velocities = np.array([n.velocity for n in ns.notes])
    start_times = np.array([n.start_time for n in ns.notes])
    end_times = np.array([n.end_time for n in ns.notes])
    valid = np.logical_and(pitches >= min_pitch, pitches <= max_pitch)
    notes = {
        'pitches': pitches[valid],
        'velocities': velocities[valid],
        'start_times': start_times[valid],
        'end_times': end_times[valid]
    }
    return ns, notes
Exemplo n.º 5
0
 def __init__(self, config):
   self._config = config
   self._channel_mode = config['channel_mode']
   self._train_data_path = util.expand_path(config['train_data_path'])