def __init__(self, name): self._ffconfig = ff.FFConfig() print("Python API batchSize(%d) workersPerNodes(%d) numNodes(%d)" % (self._ffconfig.batch_size, self._ffconfig.workers_per_node, self._ffconfig.num_nodes)) self._ffmodel = None self._name = name self._ffoptimizer = None self._layers = [] self._nb_layers = 0 self._input_layers = [] self._input_tensors = [] self._output_tensor = 0 self._label_tensor = 0 self._num_samples = 0 self._input_dataloaders = [] self._input_dataloaders_dim = [] self._label_dataloader = 0 self._label_dataloader_dim = 0 self._loss = None self._metrics = [] self._label_type = ff.DataType.DT_FLOAT self._layer_inited = False global tracing_id self.__tracing_id = tracing_id tracing_id += 1
def __init__(self, name): self._ffconfig = ff.FFConfig() self._ffconfig.parse_args() print("Python API batchSize(%d) workersPerNodes(%d) numNodes(%d)" % (self._ffconfig.get_batch_size(), self._ffconfig.get_workers_per_node(), self._ffconfig.get_num_nodes())) self._ffmodel = None self._name = name self._ffoptimizer = None self._layers = [] self._nb_layers = 0 self._input_layers = [] self._input_tensors = [] self._output_tensor = 0 self._label_tensor = 0 self._full_input_tensors = [] self._full_label_tensor = 0 self._num_samples = 0 self._input_dataloaders = [] self._input_dataloaders_dim = [] self._label_dataloader = 0 self._label_dataloader_dim = 0 self._loss = None self._metrics = [] global tracing_id self.__tracing_id = tracing_id tracing_id += 1
def __init__(self, inputs, onnx_model): self._ffconfig = ff.FFConfig() self._ffconfig.parse_args() print("Python API batchSize(%d) workersPerNodes(%d) numNodes(%d)" % (self._ffconfig.get_batch_size(), self._ffconfig.get_workers_per_node(), self._ffconfig.get_num_nodes())) self._ffmodel = None self._onnx_model = onnx_model for node in onnx_model.graph.node: print(node) for input in onnx_model.graph.initializer: print(input.name, input.dims, len(input.dims)) # for input in onnx_model.graph.input: # print(input) self._input_tensors = [] for key in inputs: input_tensor = inputs[key] t = Tensor(ffconfig=self._ffconfig, key=key, shape=input_tensor.shape, dtype=input_tensor.dtype) self._input_tensors.append(t) self._loss = None self._label_type = None self._metrics = [] self._label_type = ff.DataType.DT_FLOAT self._my_onnx_model = None self._output_tensor = None self._full_input_tensors = [] self._full_label_tensor = 0 self._num_samples = 0 self._input_dataloaders = [] self._input_dataloaders_dim = [] self._label_dataloader = 0 self._label_dataloader_dim = 0 global tracing_id self.__tracing_id = tracing_id tracing_id += 1
def __init__(self): self.ffconfig = ff.FFConfig() self.ffconfig.parse_args() print("Python API batchSize(%d) workersPerNodes(%d) numNodes(%d)" % (self.ffconfig.get_batch_size(), self.ffconfig.get_workers_per_node(), self.ffconfig.get_num_nodes())) self.ffmodel = ff.FFModel(self.ffconfig) self.ffoptimizer = 0 self._layers = dict() self._nb_layers = 0 self.input_tensors = [] self.output_tensor = 0 self.label_tensor = 0 self.full_input_tensors = [] self.full_label_tensor = 0 self.num_samples = 0 self.input_dataloaders = [] self.input_dataloaders_dim = [] self.label_dataloader = 0 self.label_dataloader_dim = 0
def __init__(self): super(Module, self).__init__() self._ffconfig = ff.FFConfig() self._ffconfig.parse_args() self._ffmodel = ff.FFModel(self._ffconfig) self._graph = None