def __init__(self, data_path, **kwargs):
        """
            options:
                loglevel
        """
        loglevel = logging.ERROR if 'loglevel' not in kwargs else kwargs['loglevel']
        logging.basicConfig(format='[%(levelname)s][%(name)s] %(message)s')
        self.logger = logging.getLogger(__name__+'.'+self.__class__.__name__)
        self.logger.setLevel(loglevel)

        # only use 40 emotions
        self.emotions = filename.emotions['LJ40K']
        self.X = OrderedDict()

        for emotion in self.emotions:

            fpath = FeatureList.get_full_data_path(emotion, data_path)
            self.logger.info("load features from %s", fpath)
            Xy = utils.load_pkl_file(fpath)

            self.X[emotion] = np.zeros((len(Xy), Xy[0]['X'].shape[1]), dtype="float32")
            for i in range(len(Xy)):    
                # make sure only one feature vector in each doc
                assert Xy[i]['X'].shape[0] == 1         
                self.X[emotion][i] = Xy[i]['X']
    def __init__(self, file_path, **kwargs):
        loglevel = logging.ERROR if 'loglevel' not in kwargs else kwargs['loglevel']
        logging.basicConfig(format='[%(levelname)s][%(name)s] %(message)s')
        self.logger = logging.getLogger(__name__+'.'+self.__class__.__name__)
        self.logger.setLevel(loglevel)

        self.logger.info('load feature file %s' % (file_path))
        Xy = utils.load_pkl_file(file_path)
        n_doc = len(Xy)
        self.X = [Xy[idoc]['X'] for idoc in range(n_doc)]
    else:
        loglevel = logging.ERROR
    logging.basicConfig(format='[%(levelname)s][%(name)s] %(message)s', level=loglevel) 
    logger = logging.getLogger(__name__)

    # pre-checking
    # if not os.path.exists(args.output_folder):
    #     logger.info('create output folder %s' % (args.output_folder))
    #     os.makedirs(args.output_folder)

    # load feature list
    feature_list = preprocessing.FeatureList(args.feature_list_file)
    emotions = filename.emotions['LJ40K']

    # create fused dataset
    idxs = utils.load_pkl_file(args.index_file) if args.index_file is not None else None
    fused_dataset = preprocessing.FusedDataset(idxs, loglevel=loglevel)

    for feature_name, data_path in feature_list:
        dataset = preprocessing.Dataset(data_path, loglevel=loglevel)
        fused_dataset.add_feature(feature_name, dataset)


    # load models
    if args.linear:
        from model import linearsvm
    else:
        from model import svm

    # main loop
    output_dict = OrderedDict()
    if args.debug:
        loglevel = logging.DEBUG
    elif args.verbose:
        loglevel = logging.INFO
    else:
        loglevel = logging.ERROR
    logging.basicConfig(format='[%(levelname)s][%(name)s] %(message)s', level=loglevel) 
    logger = logging.getLogger(__name__)

    # pre-checking
    if not os.path.exists(args.output_folder):
        logger.info('create output folder %s' % (args.output_folder))
        os.makedirs(args.output_folder)


    all_idxs = utils.load_pkl_file(args.index_file) if args.index_file is not None else None
    emotions = filename.emotions['LJ40K']

    for emotion_id in args.emotion_ids:

        emotion_name = emotions[emotion_id]

        fname = filename.get_filename_by_emotion(emotion_name, args.input_folder)
        fpath = os.path.join(args.input_folder, fname)
        logger.info("load features from %s", fpath)
        Xy = utils.load_pkl_file(fpath)

        idxs = all_idxs['train'][emotion_name][emotion_name] if all_idxs is not None else range(len(Xy))

        X = np.zeros((len(idxs), Xy[0]['X'].shape[1]), dtype="float32")
        logger.info('X.shape = (%u, %u)' % (X.shape[0], X.shape[1]))
        loglevel = logging.INFO
    else:
        loglevel = logging.ERROR
    logging.basicConfig(format='[%(levelname)s][%(name)s] %(message)s', level=loglevel) 
    logger = logging.getLogger(__name__)

    # pre-checking
    if not os.path.exists(args.output_folder):
        logger.info('create output folder %s' % (args.output_folder))
        os.makedirs(args.output_folder)

    # load features
    feature_list = preprocessing.FeatureList(args.feature_list_file)

    # load the index file
    idxs = utils.load_pkl_file(args.index_file)

    # create fused dataset
    fused_dataset = preprocessing.FusedDataset(idxs, loglevel=loglevel)

    for feature_name, data_path in feature_list:
        dataset = preprocessing.Dataset(data_path, loglevel=loglevel)
        fused_dataset.add_feature(feature_name, dataset)

    # read parameter file
    if args.parameter_file != None:
        param_dict = utils.read_parameter_file(args.parameter_file)

    # main loop
    best_res = {}
    emotions = filename.emotions['LJ40K']

    # load models
    if args.linear:
        from model import linearsvm
    else:
        from model import svm

    learners = {}
    scalers = {}
    for emotion in emotions:
        learners[emotion] = linearsvm.LinearSVM(loglevel=loglevel) if args.linear else svm.SVM(loglevel=loglevel)
        
        if args.scaler_folder != None:
            fpath = os.path.join(args.scaler_folder, filename.get_filename_by_emotion(emotion, args.scaler_folder))
            scalers[emotion] = utils.load_pkl_file(fpath)

        fpath = os.path.join(args.model_folder, filename.get_filename_by_emotion(emotion, args.model_folder))
        logger.info('loading model for emotion %s' % (emotion))
        learners[emotion].load_model(fpath)


    # main loop
    for emotion_id in args.emotion_ids:

        emotion_name = emotions[emotion_id]
        logger.info('predicting model for emotion "%s"' % emotion_name)

        # create output dir
        emotion_dir =  os.path.join(args.output_folder, emotion_name)
        logger.info('create output folder %s' % (emotion_dir))
 def load_results(self, result_files=[]):
     self.results = {}
     for f in result_files:
         res = utils.load_pkl_file(f)
         self.results.update(res)
 def load(self, filename):
     self = utils.load_pkl_file(filename)
 def load_results(self, result_files=[]):
     self.results = {}
     for f in result_files:
         res = utils.load_pkl_file(f)
         self.results.update(res)
Example #10
0
 def load(self, filename):
     self = utils.load_pkl_file(filename)