Exemple #1
0
    def __init__(self,
                 graph,
                 scope,
                 checkpoint_dir,
                 checkpoint_file=None,
                 midi_primer=None,
                 training_file_list=None,
                 hparams=None,
                 note_rnn_type='default',
                 checkpoint_scope='rnn_model'):
        """Initialize by building the graph and loading a previous checkpoint.

    Args:
      graph: A tensorflow graph where the MelodyRNN's graph will be added.
      scope: The tensorflow scope where this network will be saved.
      checkpoint_dir: Path to the directory where the checkpoint file is saved.
      checkpoint_file: Path to a checkpoint file to be used if none can be
        found in the checkpoint_dir
      midi_primer: Path to a single midi file that can be used to prime the
        model.
      training_file_list: List of paths to tfrecord files containing melody
        training data.
      hparams: A tf_lib.HParams object. Must match the hparams used to create
        the checkpoint file.
      note_rnn_type: If 'default', will use the basic LSTM described in the
        research paper. If 'basic_rnn', will assume the checkpoint is from a
        Magenta basic_rnn model.
      checkpoint_scope: The scope in lstm which the model was originally defined
        when it was first trained.
    """
        self.graph = graph
        self.session = None
        self.saver = None
        self.scope = scope
        self.batch_size = 1
        self.midi_primer = midi_primer
        self.checkpoint_scope = checkpoint_scope
        self.note_rnn_type = note_rnn_type
        self.training_file_list = training_file_list
        self.checkpoint_dir = checkpoint_dir
        self.checkpoint_file = checkpoint_file

        if hparams is not None:
            tf.logging.info('Using custom hparams')
            self.hparams = hparams
        else:
            tf.logging.info('Empty hparams string. Using defaults')
            self.hparams = rl_tuner_ops.default_hparams()

        self.build_graph()
        self.state_value = self.get_zero_state()

        if midi_primer is not None:
            self.load_primer()

        self.variable_names = rl_tuner_ops.get_variable_names(
            self.graph, self.scope)

        self.transpose_amount = 0
Exemple #2
0
  def __init__(self, graph, scope, checkpoint_dir, checkpoint_file=None,
               midi_primer=None, training_file_list=None, hparams=None,
               note_rnn_type='default', checkpoint_scope='rnn_model'):
    """Initialize by building the graph and loading a previous checkpoint.

    Args:
      graph: A tensorflow graph where the MelodyRNN's graph will be added.
      scope: The tensorflow scope where this network will be saved.
      checkpoint_dir: Path to the directory where the checkpoint file is saved.
      checkpoint_file: Path to a checkpoint file to be used if none can be
        found in the checkpoint_dir
      midi_primer: Path to a single midi file that can be used to prime the
        model.
      training_file_list: List of paths to tfrecord files containing melody
        training data.
      hparams: A tf_lib.HParams object. Must match the hparams used to create
        the checkpoint file.
      note_rnn_type: If 'default', will use the basic LSTM described in the
        research paper. If 'basic_rnn', will assume the checkpoint is from a
        Magenta basic_rnn model.
      checkpoint_scope: The scope in lstm which the model was originally defined
        when it was first trained.
    """
    self.graph = graph
    self.session = None
    self.scope = scope
    self.batch_size = 1
    self.midi_primer = midi_primer
    self.checkpoint_scope = checkpoint_scope
    self.note_rnn_type = note_rnn_type
    self.training_file_list = training_file_list
    self.checkpoint_dir = checkpoint_dir
    self.checkpoint_file = checkpoint_file

    if hparams is not None:
      tf.logging.info('Using custom hparams')
      self.hparams = hparams
    else:
      tf.logging.info('Empty hparams string. Using defaults')
      self.hparams = rl_tuner_ops.default_hparams()

    self.build_graph()
    self.state_value = self.get_zero_state()

    if midi_primer is not None:
      self.load_primer()

    self.variable_names = rl_tuner_ops.get_variable_names(self.graph,
                                                          self.scope)

    self.transpose_amount = 0