예제 #1
0
 def __init__(self, upper=None, lower=None, bigdl_type="float"):
     if upper is not None and lower is not None:
         upper = upper + 0.0
         lower = lower + 0.0
         JavaValue.__init__(self, None, bigdl_type, upper, lower)
     else:
         JavaValue.__init__(self, None, bigdl_type)
예제 #2
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
 def __init__(self,
              X,
              Y,
              model,
              criterion,
              end_trigger,
              batch_size,
              optim_method=None,
              cores=None,
              bigdl_type="float"):
     if not optim_method:
         optim_methods = {model.name(): SGD()}
     elif isinstance(optim_method, OptimMethod):
         optim_methods = {model.name(): optim_method}
     elif isinstance(optim_method, JavaObject):
         optim_methods = {model.name(): OptimMethod(optim_method, bigdl_type)}
     else:
         optim_methods = optim_method
     if cores is None:
         cores = multiprocessing.cpu_count()
     JavaValue.__init__(self, None, bigdl_type,
                        [JTensor.from_ndarray(X) for X in to_list(X)],
                        JTensor.from_ndarray(Y),
                        model.value,
                        criterion,
                        optim_methods, end_trigger, batch_size, cores)
예제 #3
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
    def __init__(self, min, bigdl_type="float"):
        """
        Create a MinLoss trigger.


        :param min: min loss
        """
        JavaValue.__init__(self, None, bigdl_type, min)
예제 #4
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
    def __init__(self, interval, bigdl_type="float"):
        """
        Create a SeveralIteration trigger.


        :param interval: interval is the "n" where an action is triggeredevery "n" iterations
        """
        JavaValue.__init__(self, None, bigdl_type, interval)
예제 #5
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
    def __init__(self, max_epoch, bigdl_type="float"):
        """
        Create a MaxEpoch trigger.


        :param max_epoch: max_epoch
        """
        JavaValue.__init__(self, None, bigdl_type, max_epoch)
예제 #6
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
    def __init__(self, max, bigdl_type="float"):
        """
        Create a MaxIteration trigger.


        :param max: max
        """
        JavaValue.__init__(self, None, bigdl_type, max)
예제 #7
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
    def __init__(self, max, bigdl_type="float"):
        """
        Create a MaxScore trigger.


        :param max: max score
        """
        JavaValue.__init__(self, None, bigdl_type, max)
예제 #8
0
    def __init__(self, log_dir, app_name, bigdl_type="float"):
        """
        Create a TrainSummary. Logs will be saved to log_dir/app_name/train.


        :param log_dir: the root dir to store the logs
        :param app_name: the application name
        """
        JavaValue.__init__(self, None, bigdl_type, log_dir, app_name)
예제 #9
0
 def __init__(self,
              learningrate=1e-3,
              learningrate_decay=0.0,
              beta1=0.9,
              beta2=0.999,
              epsilon=1e-8,
              bigdl_type="float"):
     JavaValue.__init__(self, None, bigdl_type, learningrate,
                        learningrate_decay, beta1, beta2, epsilon)
예제 #10
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
    def __init__(self, log_dir, app_name, bigdl_type="float"):
        """
        Create a TrainSummary. Logs will be saved to log_dir/app_name/train.


        :param log_dir: the root dir to store the logs
        :param app_name: the application name
        """
        JavaValue.__init__(self, None, bigdl_type, log_dir, app_name)
예제 #11
0
    def __init__(self, first, *other):
        """
        Create a Or trigger.


        :param first: first Trigger
        :param other: other Trigger
        """
        JavaValue.__init__(self, None, "float", first, list(other))
예제 #12
0
    def __init__(self, log_dir, app_name, bigdl_type="float"):
        """
        Create a ValidationSummary. Logs will be saved to 
        log_dir/app_name/train. By default, all ValidationMethod set into 
        optimizer will be recorded and the recording interval is the same 
        as trigger of ValidationMethod in the optimizer. 

        :param log_dir: the root dir to store the logs
        :param app_name: the application name
        """
        JavaValue.__init__(self, None, bigdl_type, log_dir, app_name)
예제 #13
0
 def __init__(self,
              monitor,
              factor=0.1,
              patience=10,
              mode="min",
              epsilon=1e-4,
              cooldown=0,
              min_lr=0.0,
              bigdl_type="float"):
     JavaValue.__init__(self, None, bigdl_type, monitor, factor, patience,
                        mode, epsilon, cooldown, min_lr)
예제 #14
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
 def __init__(self,
              monitor,
              factor=0.1,
              patience=10,
              mode="min",
              epsilon=1e-4,
              cooldown=0,
              min_lr=0.0,
              bigdl_type="float"):
     JavaValue.__init__(self, None, bigdl_type, monitor, factor, patience, mode, epsilon,
                        cooldown, min_lr)
예제 #15
0
 def __init__(self, text=None, label=None, jvalue=None, bigdl_type="float"):
     self.bigdl_type = bigdl_type
     if jvalue:
         self.value = jvalue
     else:
         assert isinstance(text, six.string_types), "text of a TextFeature should be a string"
         if label is not None:
             self.value = callBigDlFunc(bigdl_type, JavaValue.jvm_class_constructor(self),
                                        text, int(label))
         else:
             self.value = callBigDlFunc(bigdl_type, JavaValue.jvm_class_constructor(self),
                                        text)
예제 #16
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
    def __init__(self, log_dir, app_name, bigdl_type="float"):
        """
        Create a ValidationSummary. Logs will be saved to
        log_dir/app_name/train. By default, all ValidationMethod set into
        optimizer will be recorded and the recording interval is the same
        as trigger of ValidationMethod in the optimizer.


        :param log_dir: the root dir to store the logs
        :param app_name: the application name
        """
        JavaValue.__init__(self, None, bigdl_type, log_dir, app_name)
예제 #17
0
 def __init__(self,
              classes,
              iou=0.5,
              use_voc2007=False,
              skip_class=-1,
              bigdl_type="float"):
     """
     :param classes: the number of classes
     :param iou: the IOU threshold
     :param use_voc2007: use validation method before voc2010 (i.e. voc2007)
     :param skip_class: skip calculation on a specific class (e.g. background)
     """
     JavaValue.__init__(self, None, bigdl_type, classes, iou, use_voc2007,
                        skip_class)
예제 #18
0
 def __init__(self,
              learningrate=1e-3,
              learningrate_decay=0.0,
              weightdecay=0.0,
              momentum=0.0,
              dampening=DOUBLEMAX,
              nesterov=False,
              leaningrate_schedule=None,
              learningrates=None,
              weightdecays=None,
              bigdl_type="float"):
     JavaValue.__init__(self, None, bigdl_type, learningrate, learningrate_decay, weightdecay,
                        momentum, dampening, nesterov,
                        leaningrate_schedule if (leaningrate_schedule) else Default(),
                        JTensor.from_ndarray(learningrates), JTensor.from_ndarray(weightdecays))
예제 #19
0
 def __init__(self,
              max_iter=20,
              max_eval=DOUBLEMAX,
              tolfun=1e-5,
              tolx=1e-9,
              ncorrection=100,
              learningrate=1.0,
              verbose=False,
              linesearch=None,
              linesearch_options=None,
              bigdl_type="float"):
     if linesearch or linesearch_options:
         raise ValueError('linesearch and linesearch_options must be None in LBFGS')
     JavaValue.__init__(self, None, bigdl_type, max_iter, max_eval, tolfun, tolx,
                        ncorrection, learningrate, verbose, linesearch, linesearch_options)
예제 #20
0
 def __init__(self, model, configure=None, bigdl_type="float"):
     self.bigdl_type = bigdl_type
     self.value = callBigDlFunc(bigdl_type,
                                JavaValue.jvm_class_constructor(self),
                                model, configure)
     self.configure = Configure(
         jvalue=callBigDlFunc(self.bigdl_type, "getConfigure", self.value))
예제 #21
0
 def __init__(self,
              X,
              y,
              model,
              criterion,
              end_trigger,
              batch_size,
              optim_method=None,
              cores=None,
              bigdl_type="float"):
     if cores is None:
         cores = multiprocessing.cpu_count()
     JavaValue.__init__(self, None, bigdl_type,
                        [JTensor.from_ndarray(X) for X in to_list(X)],
                        JTensor.from_ndarray(y), model.value, criterion,
                        optim_method if optim_method else SGD(),
                        end_trigger, batch_size, cores)
예제 #22
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
 def __init__(self, jvalue, bigdl_type, *args):
     if (jvalue):
         assert(type(jvalue) == JavaObject)
         self.value = jvalue
     else:
         self.value = callBigDlFunc(
             bigdl_type, JavaValue.jvm_class_constructor(self), *args)
     self.bigdl_type = bigdl_type
예제 #23
0
 def __init__(self,
              label_map,
              thresh=0.3,
              encoding="png",
              bigdl_type="float"):
     self.value = callBigDlFunc(bigdl_type,
                                JavaValue.jvm_class_constructor(self),
                                label_map, thresh, encoding)
예제 #24
0
파일: optimizer.py 프로젝트: zhaonaiy/BigDL
 def __init__(self, jvalue, bigdl_type, *args):
     if (jvalue):
         assert(type(jvalue) == JavaObject)
         self.value = jvalue
     else:
         self.value = callBigDlFunc(
             bigdl_type, JavaValue.jvm_class_constructor(self), *args)
     self.bigdl_type = bigdl_type
예제 #25
0
파일: image.py 프로젝트: doc-vu/BigDL
 def __init__(self, image=None, label=None, path=None, bigdl_type="float"):
     image_tensor = JTensor.from_ndarray(
         image) if image is not None else None
     label_tensor = JTensor.from_ndarray(
         label) if label is not None else None
     self.bigdl_type = bigdl_type
     self.value = callBigDlFunc(bigdl_type,
                                JavaValue.jvm_class_constructor(self),
                                image_tensor, label_tensor, path)
예제 #26
0
파일: image.py 프로젝트: ysluodz/BigDL
    def __init__(self, image_rdd=None, label_rdd=None, jvalue=None, bigdl_type="float"):
        assert jvalue or image_rdd, "jvalue and image_rdd cannot be None in the same time"
        if jvalue:
            self.value = jvalue
        else:
            # init from image ndarray rdd and label rdd(optional)
            image_tensor_rdd = image_rdd.map(lambda image: JTensor.from_ndarray(image))
            label_tensor_rdd = label_rdd.map(lambda label: JTensor.from_ndarray(label)) if label_rdd else None
            self.value = callBigDlFunc(bigdl_type, JavaValue.jvm_class_constructor(self),
                                       image_tensor_rdd, label_tensor_rdd)

        self.bigdl_type = bigdl_type
예제 #27
0
    def __init__(self,
                 model,
                 training_rdd,
                 criterion,
                 end_trigger,
                 batch_size,
                 optim_method=None,
                 bigdl_type="float"):
        """
        Create an optimizer.


        :param model: the neural net model
        :param training_data: the training dataset
        :param criterion: the loss function
        :param optim_method: the algorithm to use for optimization,
           e.g. SGD, Adagrad, etc. If optim_method is None, the default algorithm is SGD.
        :param end_trigger: when to end the optimization
        :param batch_size: training batch size
        """
        if not optim_method:
            optim_methods = {model.name(): SGD()}
        elif isinstance(optim_method, OptimMethod):
            optim_methods = {model.name(): optim_method}
        elif isinstance(optim_method, JavaObject):
            optim_methods = {
                model.name(): OptimMethod(optim_method, bigdl_type)
            }
        else:
            optim_methods = optim_method
        if isinstance(training_rdd, RDD):
            JavaValue.__init__(self, None, bigdl_type, model.value,
                               training_rdd, criterion, optim_methods,
                               end_trigger, batch_size)
        elif isinstance(training_rdd, DataSet):
            self.bigdl_type = bigdl_type
            self.value = callBigDlFunc(self.bigdl_type,
                                       "createDistriOptimizerFromDataSet",
                                       model.value, training_rdd, criterion,
                                       optim_methods, end_trigger, batch_size)
예제 #28
0
    def __init__(self,
                 model,
                 training_rdd,
                 criterion,
                 end_trigger,
                 batch_size,
                 optim_method=None,
                 bigdl_type="float"):
        """
       Create an optimizer.

       :param model: the neural net model
       :param traiing_rdd: the training dataset 
       :param criterion: the loss function
       :param optim_method: the algorithm to use for optimization, 
          e.g. SGD, Adagrad, etc. If optim_method is None, the default algorithm is SGD.
       :param end_trigger: when to end the optimization
       :param batch_size: training batch size
       """
        JavaValue.__init__(self, None, bigdl_type, model.value, training_rdd,
                           criterion, optim_method if optim_method else SGD(),
                           end_trigger, batch_size)
예제 #29
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
    def __init__(self,
                 model,
                 training_rdd,
                 criterion,
                 end_trigger,
                 batch_size,
                 optim_method=None,
                 bigdl_type="float"):
        """
        Create an optimizer.


        :param model: the neural net model
        :param training_data: the training dataset
        :param criterion: the loss function
        :param optim_method: the algorithm to use for optimization,
           e.g. SGD, Adagrad, etc. If optim_method is None, the default algorithm is SGD.
        :param end_trigger: when to end the optimization
        :param batch_size: training batch size
        """
        if not optim_method:
            optim_methods = {model.name(): SGD()}
        elif isinstance(optim_method, OptimMethod):
            optim_methods = {model.name(): optim_method}
        elif isinstance(optim_method, JavaObject):
            optim_methods = {model.name(): OptimMethod(optim_method, bigdl_type)}
        else:
            optim_methods = optim_method
        if isinstance(training_rdd, RDD):
            JavaValue.__init__(self, None, bigdl_type, model.value,
                               training_rdd, criterion,
                               optim_methods, end_trigger, batch_size)
        elif isinstance(training_rdd, DataSet):
            self.bigdl_type = bigdl_type
            self.value = callBigDlFunc(self.bigdl_type, "createDistriOptimizerFromDataSet",
                                       model.value, training_rdd, criterion,
                                       optim_methods, end_trigger, batch_size)
예제 #30
0
 def __init__(self, pre_processor=None,
              post_processor=None,
              batch_per_partition=4,
              label_map=None, feature_padding_param=None, jvalue=None, bigdl_type="float"):
     self.bigdl_type=bigdl_type
     if jvalue:
         self.value = jvalue
     else:
         if pre_processor:
             assert pre_processor.__class__.__bases__[0].__name__ == "FeatureTransformer",\
                 "the pre_processor should be subclass of FeatureTransformer"
         if post_processor:
             assert post_processor.__class__.__bases__[0].__name__ == "FeatureTransformer", \
                 "the pre_processor should be subclass of FeatureTransformer"
         self.value = callBigDlFunc(
             bigdl_type, JavaValue.jvm_class_constructor(self),
             pre_processor,
             post_processor,
             batch_per_partition,
             label_map,
             feature_padding_param)
예제 #31
0
 def __init__(self, pre_processor=None,
              post_processor=None,
              batch_per_partition=4,
              label_map=None, feature_padding_param=None, jvalue=None, bigdl_type="float"):
     self.bigdl_type = bigdl_type
     if jvalue:
         self.value = jvalue
     else:
         if pre_processor:
             assert issubclass(pre_processor.__class__, Preprocessing), \
                 "the pre_processor should be subclass of Preprocessing"
         if post_processor:
             assert issubclass(post_processor.__class__, Preprocessing), \
                 "the pre_processor should be subclass of Preprocessing"
         self.value = callBigDlFunc(
             bigdl_type, JavaValue.jvm_class_constructor(self),
             pre_processor,
             post_processor,
             batch_per_partition,
             label_map,
             feature_padding_param)
예제 #32
0
 def __init__(self, varianceNormAverage=True, bigdl_type="float"):
     JavaValue.__init__(self, None, bigdl_type, varianceNormAverage)
예제 #33
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
 def __init__(self, bigdl_type="float"):
     """
     Create a EveryEpoch trigger.
     """
     JavaValue.__init__(self, None, bigdl_type)
예제 #34
0
 def __init__(self, metric_name, idx):
     JavaValue.__init__(self, None, "float", metric_name, idx)
예제 #35
0
 def __init__(self, mean, stdv, bigdl_type="float"):
     mean = mean + 0.0
     stdv = stdv + 0.0
     JavaValue.__init__(self, None, bigdl_type, mean, stdv)
예제 #36
0
 def __init__(self, bigdl_type="float"):
     """
     Create a EveryEpoch trigger.
     """
     JavaValue.__init__(self, None, bigdl_type)
예제 #37
0
 def __init__(self, value, bigdl_type="float"):
     value = value + 0.0
     JavaValue.__init__(self, None, bigdl_type, value)
예제 #38
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
 def __init__(self, step_sizes, gamma, bigdl_type="float"):
     JavaValue.__init__(self, None, bigdl_type, step_sizes, gamma)
예제 #39
0
파일: criterion.py 프로젝트: ru003ar/BigDL
 def __init__(self, jvalue, bigdl_type, *args):
     self.value = jvalue if jvalue else callBigDlFunc(
         bigdl_type, JavaValue.jvm_class_constructor(self), *args)
     self.bigdl_type = bigdl_type
예제 #40
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
 def __init__(self, cri=None, bigdl_type="float"):
     from bigdl.nn.criterion import ClassNLLCriterion
     if cri is None:
         cri = ClassNLLCriterion()
     JavaValue.__init__(self, None, bigdl_type, cri)
예제 #41
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
 def __init__(self, bigdl_type="float"):
     JavaValue.__init__(self, None, bigdl_type)
예제 #42
0
 def __init__(self, val_method, output_length, target_length):
     JavaValue.__init__(self, None, "float",
                        val_method, output_length, target_length)
예제 #43
0
파일: image.py 프로젝트: doc-vu/BigDL
 def __init__(self, bigdl_type="float", *args):
     self.value = callBigDlFunc(bigdl_type,
                                JavaValue.jvm_class_constructor(self),
                                *args)
예제 #44
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
 def __init__(self, iteration_per_epoch, bigdl_type="float"):
     JavaValue.__init__(self, None, bigdl_type, iteration_per_epoch)
예제 #45
0
 def __init__(self, metric_name, idx, count_idx):
     self.name = metric_name
     self.idx = idx
     self.count_idx = count_idx
     JavaValue.__init__(self, None, "float", metric_name, idx, count_idx)
예제 #46
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
 def __init__(self, decay_step, decay_rate, stair_case=False, bigdl_type="float"):
     JavaValue.__init__(self, None, bigdl_type, decay_step, decay_rate, stair_case)
예제 #47
0
 def __init__(self, power, max_iteration, bigdl_type="float"):
     JavaValue.__init__(self, None, bigdl_type, power, max_iteration)
예제 #48
0
파일: optimizer.py 프로젝트: ru003ar/BigDL
 def __init__(self, power, max_iteration, bigdl_type="float"):
         JavaValue.__init__(self, None, bigdl_type, power, max_iteration)
예제 #49
0
 def __init__(self, step_sizes, gamma, bigdl_type="float"):
     JavaValue.__init__(self, None, bigdl_type, step_sizes, gamma)
예제 #50
0
 def __init__(self, bigdl_type="float"):
     JavaValue.__init__(self, None, bigdl_type)
예제 #51
0
 def __init__(self, cri=None, bigdl_type="float"):
     from bigdl.nn.criterion import ClassNLLCriterion
     if cri is None:
         cri = ClassNLLCriterion()
     JavaValue.__init__(self, None, bigdl_type, cri)
예제 #52
0
파일: image.py 프로젝트: ru003ar/BigDL
 def __init__(self, bigdl_type="float", *args):
     self.value = callBigDlFunc(
             bigdl_type, JavaValue.jvm_class_constructor(self), *args)
예제 #53
0
 def __init__(self, val_method, name, output_indices, label_indices):
     JavaValue.__init__(self, None, "float", val_method, name,
                        output_indices, label_indices)
예제 #54
0
파일: image.py 프로젝트: ru003ar/BigDL
 def __init__(self, image=None, label=None, path=None, bigdl_type="float"):
     image_tensor = JTensor.from_ndarray(image) if image is not None else None
     label_tensor = JTensor.from_ndarray(label) if label is not None else None
     self.bigdl_type = bigdl_type
     self.value = callBigDlFunc(
         bigdl_type, JavaValue.jvm_class_constructor(self), image_tensor, label_tensor, path)
예제 #55
0
 def __init__(self, jvalue, bigdl_type, *args):
     self.value = jvalue if jvalue else callBigDlFunc(
         bigdl_type, JavaValue.jvm_class_constructor(self), *args)
     self.bigdl_type = bigdl_type
예제 #56
0
 def __init__(self, label_map, thresh=0.3, encoding="png",
              bigdl_type="float"):
     self.value = callBigDlFunc(
         bigdl_type, JavaValue.jvm_class_constructor(self), label_map, thresh, encoding)