Exemplo n.º 1
0
 def __init__(self,
              X,
              Y,
              model,
              criterion,
              end_trigger,
              batch_size,
              optim_method=None,
              cores=None,
              bigdl_type="float"):
     if not optim_method:
         optim_methods = {model.name(): SGD()}
     elif isinstance(optim_method, OptimMethod):
         optim_methods = {model.name(): optim_method}
     elif isinstance(optim_method, JavaObject):
         optim_methods = {
             model.name(): OptimMethod(optim_method, bigdl_type)
         }
     else:
         optim_methods = optim_method
     if cores is None:
         cores = multiprocessing.cpu_count()
     JavaValue.__init__(self, None, bigdl_type,
                        [JTensor.from_ndarray(X) for X in to_list(X)],
                        JTensor.from_ndarray(Y), model.value, criterion,
                        optim_methods, end_trigger, batch_size, cores)
Exemplo n.º 2
0
 def __init__(self,
              matrix_b,
              matrix_c,
              alpha=float(1.0),
              beta=float(1.0),
              trans_a=0,
              trans_b=0,
              bigdl_type="float"):
     super(Gemm, self).__init__(None, bigdl_type, alpha, beta, trans_a,
                                trans_b, JTensor.from_ndarray(matrix_b),
                                JTensor.from_ndarray(matrix_c))
Exemplo n.º 3
0
 def set_running_std(self, running_std):
     """
     Set the running variance of the BatchNormalization layer.
     :param running_std: a Numpy array.
     """
     callZooFunc(self.bigdl_type, "setRunningStd", self.value,
                 JTensor.from_ndarray(running_std))
     return self
Exemplo n.º 4
0
 def set_running_mean(self, running_mean):
     """
     Set the running mean of the BatchNormalization layer.
     :param running_mean: a Numpy array.
     """
     callZooFunc(self.bigdl_type, "setRunningMean", self.value,
                 JTensor.from_ndarray(running_mean))
     return self
Exemplo n.º 5
0
 def __init__(self,
              weights=None,
              size_average=True,
              logProbAsInput=True,
              bigdl_type="float"):
     super(ClassNLLCriterion, self).__init__(None, bigdl_type,
                                             JTensor.from_ndarray(weights),
                                             size_average, logProbAsInput)
Exemplo n.º 6
0
 def __init__(self,
              learningrate=1e-3,
              learningrate_decay=0.0,
              weightdecay=0.0,
              momentum=0.0,
              dampening=DOUBLEMAX,
              nesterov=False,
              leaningrate_schedule=None,
              learningrates=None,
              weightdecays=None,
              bigdl_type="float"):
     super(SGD, self).__init__(
         None, bigdl_type, learningrate, learningrate_decay, weightdecay,
         momentum, dampening, nesterov, leaningrate_schedule if
         (leaningrate_schedule) else Default(),
         JTensor.from_ndarray(learningrates),
         JTensor.from_ndarray(weightdecays))
Exemplo n.º 7
0
 def __init__(self,
              p=1,
              weights=None,
              margin=1.0,
              size_average=True,
              bigdl_type="float"):
     super(MultiMarginCriterion,
           self).__init__(None, bigdl_type, p,
                          JTensor.from_ndarray(weights), margin,
                          size_average)
Exemplo n.º 8
0
 def __init__(self,
              log_prob_as_input=False,
              zero_based_label=True,
              weights=None,
              size_average=True,
              padding_value=-1,
              bigdl_type="float"):
     super(SparseCategoricalCrossEntropy,
           self).__init__(None, bigdl_type,
                          log_prob_as_input, zero_based_label,
                          JTensor.from_ndarray(weights), size_average,
                          padding_value)
Exemplo n.º 9
0
    def set_validation(self,
                       batch_size,
                       X_val,
                       Y_val,
                       trigger,
                       val_method=None):
        """
        Configure validation settings.

        :param batch_size: validation batch size
        :param X_val: features of validation dataset
        :param Y_val: label of validation dataset
        :param trigger: validation interval
        :param val_method: the ValidationMethod to use,e.g. "Top1Accuracy", "Top5Accuracy", "Loss"
        """
        if val_method is None:
            val_method = [Top1Accuracy()]
        callBigDlFunc(self.bigdl_type, "setValidation", self.value, batch_size,
                      trigger,
                      [JTensor.from_ndarray(X) for X in to_list(X_val)],
                      JTensor.from_ndarray(Y_val), to_list(val_method))
Exemplo n.º 10
0
def save_variable_bigdl(tensors, target_path, bigdl_type="float"):
    """
    Save a variable dictionary to a Java object file, so it can be read by BigDL

    :param tensors: tensor dictionary
    :param target_path: where is the Java object file store
    :param bigdl_type: model variable numeric type
    :return: nothing
    """
    import numpy as np
    jtensors = {}
    for tn in tensors.keys():
        if not isinstance(tensors[tn], np.ndarray):
            value = np.array(tensors[tn])
        else:
            value = tensors[tn]
        jtensors[tn] = JTensor.from_ndarray(value)
        
    callBigDlFunc(bigdl_type, "saveTensorDictionary", jtensors, target_path)
Exemplo n.º 11
0
 def __init__(self,
              input_dim,
              output_dim,
              init="uniform",
              weights=None,
              trainable=True,
              input_length=None,
              W_regularizer=None,
              input_shape=None,
              mask_zero=False,
              padding_value=0,
              zero_based_id=True,
              **kwargs):
     if input_length:
         input_shape = (input_length, )
     super(Embedding,
           self).__init__(None, input_dim, output_dim, init,
                          JTensor.from_ndarray(weights), trainable,
                          W_regularizer,
                          list(input_shape) if input_shape else None,
                          mask_zero, padding_value, zero_based_id, **kwargs)
Exemplo n.º 12
0
 def from_pytorch(model):
     """
     Create a TorchModel directly from PyTorch model, e.g. model in torchvision.models.
     :param model: a PyTorch model, or a function to create PyTorch model
     """
     weights = []
     import types
     if isinstance(model, types.FunctionType) or isinstance(model, type):
         for param in trainable_param(model()):
             weights.append(param.view(-1))
     else:
         for param in trainable_param(model):
             weights.append(param.view(-1))
     flatten_weight = torch.nn.utils.parameters_to_vector(
         weights).data.numpy()
     bys = io.BytesIO()
     torch.save(model, bys, pickle_module=zoo_pickle_module)
     weights = JTensor.from_ndarray(flatten_weight)
     jvalue = callZooFunc("float", "createTorchModel", bys.getvalue(),
                          weights)
     net = TorchModel(jvalue, bys.getvalue())
     return net
Exemplo n.º 13
0
 def __init__(self, weights=None, size_average=True, bigdl_type="float"):
     super(MultiLabelSoftMarginCriterion,
           self).__init__(None, bigdl_type, JTensor.from_ndarray(weights),
                          size_average)
Exemplo n.º 14
0
 def __init__(self, weights=None, size_average=True, bigdl_type="float"):
     super(CrossEntropyCriterion,
           self).__init__(None, bigdl_type, JTensor.from_ndarray(weights),
                          size_average)
Exemplo n.º 15
0
 def __init__(self, data, name=None, bigdl_type="float"):
     self.data = data
     super(Constant, self).__init__(None, bigdl_type,
                                    JTensor.from_ndarray(data), name)
Exemplo n.º 16
0
 def __init__(self, value, bigdl_type="float"):
     super(Constant, self).__init__(None, bigdl_type,
                                    JTensor.from_ndarray(value))