Ejemplo n.º 1
0
 def forward_to_hdf(self, data, output_file, combine_labels='', batch_size=0):
   """
   :type data: Dataset.Dataset
   :type output_file: str
   :type combine_labels: str
   """
   cache = h5py.File(output_file, "w")
   batches = data.generate_batches(recurrent_net=self.network.recurrent, batch_size=batch_size, max_seqs=self.max_seqs, max_seq_length=self.max_seq_length_eval)
   forwarder = HDFForwardTaskThread(self.network, self.devices, data, batches, cache,
                                    "gzip" if self.compression else None)
   forwarder.join()
   cache.close()
Ejemplo n.º 2
0
 def forward_to_hdf(self, data, output_file, combine_labels='', batch_size=0):
   """
   :type data: Dataset.Dataset
   :type output_file: str
   :type combine_labels: str
   """
   cache = h5py.File(output_file, "w")
   batches = data.generate_batches(recurrent_net=self.network.recurrent, batch_size=batch_size, max_seqs=self.max_seqs, max_seq_length=self.max_seq_length_eval)
   forwarder = HDFForwardTaskThread(self.network, self.devices, data, batches, cache,
                                    "gzip" if self.compression else None)
   forwarder.join()
   cache.close()
Ejemplo n.º 3
0
 def forward_to_hdf(self, data, output_file, combine_labels=''):
   """
   :type data: Dataset.Dataset
   :type output_file: str
   :type combine_labels: str
   """
   cache = h5py.File(output_file, "w")
   batches = data.generate_batches(recurrent_net=self.network.recurrent,
                                   batch_size=0, #data.get_num_timesteps(),
                                   max_seqs=self.max_seqs)
   merge = {}
   forwarder = HDFForwardTaskThread(self.network, self.devices, data, batches, cache, merge)
   forwarder.join()
   cache.close()