Beispiel #1
0
 def fromJson(obj, secuml_conf):
     conf = ClassifierConfFactory.getFactory().fromJson(
         obj['classification_conf'])
     experiment = ClassificationExperiment(secuml_conf)
     experiment.initExperiment(obj['project'], obj['dataset'], create=False)
     Experiment.expParamFromJson(experiment, obj, conf)
     return experiment
Beispiel #2
0
 def fromJson(obj):
     classification_factory = ClassifierConfFactory.getFactory()
     classification_conf = classification_factory.fromJson(
         obj['classification_conf'])
     conf = RareCategoryDetectionStrategy(classification_conf,
                                          obj['cluster_strategy'],
                                          obj['num_annotations'],
                                          obj['cluster_weights'])
     return conf
 def fromJson(obj):
     validation_conf = None
     if obj['validation_conf'] is not None:
         validation_conf = ValidationDatasetConf.fromJson(
             obj['validation_conf'])
     binary_model_conf = ClassifierConfFactory.getFactory().fromJson(
         obj['models_conf']['binary'])
     conf = RandomSamplingConfiguration(obj['auto'], obj['budget'], obj['batch'],
                                        binary_model_conf, validation_conf)
     return conf
def gornitzBinaryModelConf(logger):
    classifier_args = {}
    classifier_args['num_folds'] = 4
    classifier_args['sample_weight'] = False
    classifier_args['families_supervision'] = False
    test_conf = UnlabeledLabeledConf()
    classifier_args['test_conf'] = test_conf
    binary_model_conf = ClassifierConfFactory.getFactory().fromParam(
        'Sssvdd', classifier_args, logger=logger)
    return binary_model_conf
Beispiel #5
0
 def fromJson(obj):
     validation_conf = None
     if obj['validation_conf'] is not None:
         validation_conf = ValidationDatasetConf.fromJson(
             obj['validation_conf'])
     binary_model_conf = ClassifierConfFactory.getFactory().fromJson(
         obj['models_conf']['binary'])
     conf = AladinConfiguration(obj['auto'], obj['budget'],
                                obj['num_annotations'], binary_model_conf,
                                validation_conf)
     return conf
 def setExperimentFromArgs(self, args):
     factory = ClassifierConfFactory.getFactory()
     if args.model == 'AlreadyTrained':
         self.already_trained = args.model_exp_id
         conf = self.generateAlreadyTrainedConf(factory, args, self.logger)
     else:
         conf = factory.fromArgs(args.model, args, logger=self.logger)
     self.setConf(conf,
                  args.features_file,
                  annotations_filename='ground_truth.csv')
     self.export()
 def generateParamsFromArgs(args):
     supervised_args = {}
     supervised_args['num_folds'] = 4
     supervised_args['sample_weight'] = False
     supervised_args['families_supervision'] = False
     test_conf = UnlabeledLabeledConf()
     supervised_args['test_conf'] = test_conf
     binary_model_conf = ClassifierConfFactory.getFactory().fromParam(
         'Sssvdd', supervised_args)
     params = ActiveLearningConfiguration.generateParamsFromArgs(
         args, binary_model_conf=binary_model_conf)
     params['batch'] = args.batch
     return params
Beispiel #8
0
 def getMulticlassModel(self):
     params = {}
     params['num_folds'] = 4
     params['sample_weight'] = False
     params['families_supervision'] = True
     params['optim_algo'] = 'liblinear'
     params['alerts_conf'] = None
     test_conf = UnlabeledLabeledConf(logger=self.alerts_conf.logger)
     params['test_conf'] = test_conf
     conf = ClassifierConfFactory.getFactory().fromParam(
         'LogisticRegression', params, logger=self.alerts_conf.logger)
     model = conf.model_class(conf)
     return model
 def fromJson(obj):
     validation_conf = None
     if obj['validation_conf'] is not None:
         validation_conf = ValidationDatasetConf.fromJson(
             obj['validation_conf'])
     multiclass_model_conf = ClassifierConfFactory.getFactory().fromJson(
         obj['models_conf']['multiclass'])
     rare_category_detection_conf = RareCategoryDetectionStrategy.fromJson(
         obj['rare_category_detection_conf'])
     conf = RareCategoryDetectionConfiguration(
         obj['auto'], obj['budget'], rare_category_detection_conf,
         multiclass_model_conf, validation_conf)
     return conf
Beispiel #10
0
 def generateParamsFromArgs(args, logger=None):
     supervised_args = {}
     supervised_args['num_folds'] = 4
     supervised_args['sample_weight'] = False
     supervised_args['families_supervision'] = False
     test_conf = UnlabeledLabeledConf(logger=logger)
     supervised_args['test_conf'] = test_conf
     binary_model_conf = ClassifierConfFactory.getFactory().fromParam(
         'LogisticRegression', supervised_args, logger=logger)
     params = ActiveLearningConfiguration.generateParamsFromArgs(
         args, binary_model_conf=binary_model_conf, logger=logger)
     params['num_annotations'] = args.num_annotations
     return params
Beispiel #11
0
def aladinMulticlassModelConf(logger):
    classifier_args = {}
    classifier_args['num_folds'] = 4
    classifier_args['sample_weight'] = False
    classifier_args['families_supervision'] = True
    classifier_args['alerts_conf'] = None
    classifier_args['optim_algo'] = 'liblinear'
    test_conf = UnlabeledLabeledConf(logger=logger)
    classifier_args['test_conf'] = test_conf
    factory = ClassifierConfFactory.getFactory()
    multiclass_model_conf = factory.fromParam('LogisticRegression',
                                              classifier_args,
                                              logger=logger)
    return multiclass_model_conf
 def fromJson(obj):
     validation_conf = None
     if obj['validation_conf'] is not None:
         validation_conf = ValidationDatasetConf.fromJson(
             obj['validation_conf'])
     rare_category_detection_conf = RareCategoryDetectionStrategy.fromJson(
         obj['rare_category_detection_conf'])
     binary_model_conf = ClassifierConfFactory.getFactory().fromJson(
         obj['models_conf']['binary'])
     conf = IlabConfiguration(obj['auto'], obj['budget'],
                              rare_category_detection_conf,
                              obj['num_uncertain'], obj['eps'],
                              binary_model_conf,
                              validation_conf)
     return conf
Beispiel #13
0
 def generateParamsFromArgs(args):
     params = ActiveLearningConfiguration.generateParamsFromArgs(args)
     multiclass_classifier_args = {}
     multiclass_classifier_args['num_folds'] = args.num_folds
     multiclass_classifier_args['sample_weight'] = False
     multiclass_classifier_args['families_supervision'] = True
     multiclass_classifier_args['alerts_conf'] = None
     test_conf = UnlabeledLabeledConf()
     multiclass_classifier_args['test_conf'] = test_conf
     multiclass_conf = ClassifierConfFactory.getFactory().fromParam(
         args.model_class, multiclass_classifier_args)
     rare_category_detection_conf = RareCategoryDetectionStrategy(multiclass_conf,
                                                                  args.cluster_strategy, args.num_annotations, 'uniform')
     params['rare_category_detection_conf'] = rare_category_detection_conf
     params['num_annotations'] = args.num_annotations
     params['multiclass_model_conf'] = multiclass_conf
     return params
 def generateParamsFromArgs(args):
     params = ActiveLearningConfiguration.generateParamsFromArgs(args)
     multiclass_classifier_args = {}
     multiclass_classifier_args['num_folds'] = args.num_folds
     multiclass_classifier_args['sample_weight'] = False
     multiclass_classifier_args['families_supervision'] = True
     multiclass_classifier_args['optim_algo'] = 'liblinear'
     test_conf = UnlabeledLabeledConf()
     multiclass_classifier_args['test_conf'] = test_conf
     multiclass_conf = ClassifierConfFactory.getFactory().fromParam(
         'LogisticRegression', multiclass_classifier_args)
     rare_category_detection_conf = RareCategoryDetectionStrategy(multiclass_conf,
                                                                  args.cluster_strategy, args.num_annotations, 'uniform')
     params['rare_category_detection_conf'] = rare_category_detection_conf
     params['num_uncertain'] = args.num_uncertain
     params['eps'] = 0.49
     return params
 def generateParser():
     parser = argparse.ArgumentParser(
         description='Learn a detection model. ' +
         'The ground-truth must be stored in annotations/ground_truth.csv.')
     Experiment.projectDatasetFeturesParser(parser)
     models = [
         'LogisticRegression', 'Svc', 'GaussianNaiveBayes', 'DecisionTree',
         'RandomForest', 'GradientBoosting'
     ]
     subparsers = parser.add_subparsers(dest='model')
     factory = ClassifierConfFactory.getFactory()
     for model in models:
         model_parser = subparsers.add_parser(model)
         factory.generateParser(model, model_parser)
     ## Add subparser for already trained model
     already_trained = subparsers.add_parser('AlreadyTrained')
     factory.generateParser('AlreadyTrained', already_trained)
     return parser
    def generateParamsFromArgs(args, binary_model_conf=None):
        if binary_model_conf is None:
            supervised_args = {}
            supervised_args['num_folds'] = args.num_folds
            supervised_args['sample_weight'] = args.sample_weight
            supervised_args['families_supervision'] = False
            supervised_args['test_conf'] = UnlabeledLabeledConf()
            binary_model_conf = ClassifierConfFactory.getFactory().fromParam(
                args.model_class, supervised_args)

        active_learning_params = {}
        active_learning_params['auto'] = args.auto
        active_learning_params['budget'] = args.budget
        active_learning_params['binary_model_conf'] = binary_model_conf

        validation_conf = None
        if args.validation_dataset is not None:
            validation_conf = ValidationDatasetConf(args.validation_dataset)
        active_learning_params['validation_conf'] = validation_conf

        return active_learning_params