Пример #1
0
 def __init__(self, D, H, W, K, iternum):
     Classifier.__init__(self, D, H, W, K, iternum)
     self.L = 100  # size of hidden layer
     """ Layer 1 Parameters """
     # weight matrix: [M * L]
     self.A1 = 0.01 * np.random.randn(self.M, self.L)
     # bias: [1 * L]
     self.b1 = np.zeros((1, self.L))
     """ Layer 3 Parameters """
     # weight matrix: [L * K]
     self.A3 = 0.01 * np.random.randn(self.L, K)
     # bias: [1 * K]
     self.b3 = np.zeros((1, K))
     """ Hyperparams """
     # learning rate
     self.rho = 1e-2
     # momentum
     self.mu = 0.9
     # reg strencth
     self.lam = 0.1
     # velocity for A1: [M * L]
     self.v1 = np.zeros((self.M, self.L))
     # velocity for A3: [L * K]
     self.v3 = np.zeros((self.L, K))
     return
Пример #2
0
  def __init__(self, D, H, W, K, iternum):
    Classifier.__init__(self, D, H, W, K, iternum)
    self.L = 100 # size of hidden layer

    """ Layer 1 Parameters """
    # weight matrix: [M * L]
    self.A1 = 0.01 * np.random.randn(self.M, self.L)
    # bias: [1 * L]
    self.b1 = np.zeros((1,self.L))

    """ Layer 3 Parameters """
    # weight matrix: [L * K]
    self.A3 = 0.01 * np.random.randn(self.L, K)
    # bias: [1 * K]
    self.b3 = np.zeros((1,K))

    """ Hyperparams """
    # learning rate
    self.rho = 1e-2
    # momentum
    self.mu = 0.9
    # reg strencth
    self.lam = 0.1
    # velocity for A1: [M * L]
    self.v1 = np.zeros((self.M, self.L))
    # velocity for A3: [L * K] 
    self.v3 = np.zeros((self.L, K))
    return
Пример #3
0
    def __init__(self,
                 engine,
                 recorder,
                 window_size=2.0,
                 window_step=0.5,
                 freqs=[60 / 4., 60 / 5., 60 / 6., 60 / 7.],
                 bandpass=[2, 45],
                 cl_type='MNEC',
                 nharmonics=3):
        """ Constructor.

        Required parameters:
        recorder: The Recorder object to read data from

        Keyword parameters (configures the classifier): 
        window_size: The window size in seconds to use on the data
        window_step: The window step in seconds to use on the data
        freqs: The frequencies in Hertz of the SSVEP stimuli to look for.
        bandpass: [lo, hi] cutoff frequencies for the bandpass filter to use on the data
        """
        self.window_size = window_size
        self.window_step = window_step
        self.freqs = freqs
        self.bandpass = bandpass
        self.nharmonics = nharmonics
        self.pipeline = None
        self.cl_type = cl_type

        # Figure out a sane target sample rate, using only a decimation factor
        self.target_sample_rate = np.floor(
            recorder.sample_rate /
            np.max([1, np.floor(recorder.sample_rate / 200)]))

        Classifier.__init__(self, engine, recorder)
Пример #4
0
    def __init__(self, fname, *args, **kargs):
        Classifier.__init__(self, fname, *args, **kargs)

        # sometimes a threshold value is trained during Bayesian
        # classification to avoid classifying too many 'documents' as
        # one kind or the other
        self.thresholds = [1.0, 1.0]
Пример #5
0
    def __init__(self, engine, recorder, window=(0, 1.0), bandpass=[0.5, 15]):
        """ Constructor.

        Required parameters:
        recorder: The imec.Recorder object to read data from

        Keyword parameters (configures the classifier): 
        window: A pair (from, to) in samples of the window to extract around the stimulation onset
        bandpass: [lo, hi] cutoff frequencies for the bandpass filter to use on the data
        """
        assert len(window) == 2

        # Create pipeline
        self.bp_node = psychic.nodes.OnlineFilter(
            lambda s: scipy.signal.iirfilter(
                3, [bandpass[0] / (s / 2.0), bandpass[1] / (s / 2.0)]))
        self.preprocessing = psychic.nodes.Chain([self.bp_node])

        self.window = window
        self.cl_lab = None
        self.format = 'png'

        Classifier.__init__(self, engine, recorder)

        self.logger.info('sample_rate: %d Hz' % recorder.sample_rate)
        self.logger.info('bandpass: %s' % bandpass)
Пример #6
0
    def __init__(self, engine, recorder, window_size=2.0, window_step=0.5, ncomp=4, bandpass=[8, 15]):
        """ Constructor.

        Required parameters:
        recorder: The Recorder object to read data from

        Keyword parameters (configures the classifier): 
        window_size: The window size in seconds to use on the data
        window_step: The window step in seconds to use on the data
        ncomp: Number of ICA components to use
        bandpass: [lo, hi] cutoff frequencies for the bandpass filter to use on the data
        """
        self.window_size = window_size
        self.window_step = window_step
        self.ncomp = ncomp
        self.bandpass = bandpass
        self.pipeline = None
        self.target_sample_rate = 256
    
        Classifier.__init__(self, engine, recorder)

        self.logger.info("sample_rate: %f" % recorder.sample_rate)
        self.logger.info("window_size: %f" % window_size)
        self.logger.info("window_step: %f" % window_step)
        self.logger.info("ncomp: %d" % ncomp)
        self.logger.info("bandpass: %s" % bandpass)
Пример #7
0
    def __init__(self, engine, recorder, num_repetitions=10, num_options=7, window=(0.0, 1.0), bandpass=[0.5, 15]):
        """ Constructor.

        Required parameters:
        recorder: The imec.Recorder object to read data from

        Keyword parameters (configures the classifier): 
        window: A pair (from, to) in samples of the window to extract around the stimulation onset
        bandpass: [lo, hi] cutoff frequencies for the bandpass filter to use on the data
        """

        assert len(window) == 2

        self.num_repetitions = num_repetitions
        self.num_options = num_options
        self.target_sample_rate = 128
        self.window = window
        self.window_samples = (int(recorder.sample_rate*window[0]), int(recorder.sample_rate*window[1]))
        self.target_window = (int(self.target_sample_rate*window[0]), int(self.target_sample_rate*window[1]))

        self.mdict = {}
        for i in range(1,self.num_options+1):
            self.mdict[i] = 'target %02d' % i

        # Create pipelines
        self.preprocessing = psychic.nodes.Chain([
            psychic.nodes.OnlineFilter( lambda s : scipy.signal.iirfilter(3, [bandpass[0]/(s/2.0), bandpass[1]/(s/2.0)]) ),
            psychic.nodes.Resample(self.target_sample_rate, max_marker_delay=1),
        ])

        self.slice_node = psychic.nodes.OnlineSlice(self.mdict, window)

        self.classification = psychic.nodes.Chain([
            #psychic.nodes.Blowup(100),
            psychic.nodes.Mean(axis=2),
            sklearn.grid_search.GridSearchCV(
                sklearn.svm.LinearSVC(),
                {'C': numpy.logspace(-3, 5, 10)},
                cv=5,
            )
        ])

        Classifier.__init__(self, engine, recorder)

        self.logger.info('sample_rate: %d Hz' % recorder.sample_rate)
        self.logger.info('bandpass: %s' % bandpass)
        self.logger.info('window: %s' % str(window))
        self.logger.info('num_repetitions: %d' % num_repetitions)

        # Construct feature labels
        self.channel_labels = recorder.channel_names
        self.time_labels = (\
                (numpy.arange(self.target_window[1]-self.target_window[0])
                  + self.target_window[0]) \
                / float(self.target_sample_rate)).tolist()
        self.repetition_labels = range(self.num_repetitions)
        self.feat_lab = [
            [self.channel_labels[i] for i in self.recorder.target_channels],
            self.time_labels,
            self.repetition_labels]
    def __init__(self, fname, *args, **kargs):
        Classifier.__init__(self, fname, *args, **kargs)

        # sometimes a threshold value is trained during Bayesian
        # classification to avoid classifying too many 'documents' as
        # one kind or the other
        self.thresholds = [1.0, 1.0]
Пример #9
0
 def __init__(self,
              train_set,
              epochs=10,
              eta=None,
              ratio=None,
              validate_set=None,
              num_classes=3):
     Classifier.__init__(self, train_set, epochs, eta, ratio, validate_set,
                         num_classes)
Пример #10
0
    def __init__(self,
                 chars,
                 maxsentlen,
                 maxwordlen,
                 use_alphabets=False,
                 hyper_parameters=classifier.DEFAULT_HYPER_PARAMETERS,
                 model=None):

        Classifier.__init__(self, chars, self.labels, maxsentlen, maxwordlen,
                            use_alphabets, hyper_parameters, model)
Пример #11
0
 def __init__(self,
              train_set,
              test_set,
              epochs=10,
              eta=0.1,
              lamda=0.1,
              ratio=None,
              validate_set=None,
              num_classes=3):
     Classifier.__init__(self, train_set, test_set, epochs, eta, ratio,
                         validate_set, num_classes)
     self.lamda = lamda
Пример #12
0
    def __init__(self, rawfname, min_occurences=5, **kargs):
        Classifier.__init__(self, rawfname, **kargs)

        self.min_occurences = min_occurences

        self.all_training_examples = []

        self.all_features = {}
        self.model = None

        self.filesubset = kargs.get('filesubset', 3000)

        self.max_iter = kargs.get('max_iter', 4)
Пример #13
0
    def __init__(self):
        '''
    '''

        Classifier.__init__(self)

        self.name = 'VGG19 (Imagenet pre-trained) + MLP'

        self._VGG19 = None
        self._MLP = None

        self._VGG19_features_for_kfold = None
        self._VGG19_features_calculated = False
Пример #14
0
 def __init__(self, D, H, W, K, iternum):
     Classifier.__init__(self, D, H, W, K, iternum)
     """ Parameters """
     # weight matrix: [M * K]
     self.A = 0.01 * np.random.randn(self.M, K)
     # bias: [1 * K]
     self.b = np.zeros((1, K))
     """ Hyperparams """
     # learning rate
     self.rho = 1e-5
     # momentum
     self.mu = 0.9
     # reg strength
     self.lam = 1e1
     # velocity for A: [M * K]
     self.v = np.zeros((self.M, K))
     return
Пример #15
0
  def __init__(self, D, H, W, K, iternum):
    Classifier.__init__(self, D, H, W, K, iternum)
    """ Parameters """
    # weight matrix: [M * K]
    self.A = 0.01 * np.random.randn(self.M, K)
    # bias: [1 * K]
    self.b = np.zeros((1,K))

    """ Hyperparams """
    # learning rate
    self.rho = 1e-5
    # momentum
    self.mu = 0.9
    # reg strength
    self.lam = 1e1
    # velocity for A: [M * K]
    self.v = np.zeros((self.M, K))
    return
    def __init__(self, rawfname, min_occurences=5, **kargs):
        Classifier.__init__(self, rawfname, **kargs)

        self.min_occurences = min_occurences

        # Maintains all training examples
        self.all_training_examples = []

        # Each example contains only keys for features which occurred more
        # than <min_occurences> times in the training set
        self.shrunk_training_examples = []

        # { feature -> num times <feature> was seen }
        self.all_features = {}
        self.model = None

        self.filesubset = kargs.get('filesubset', 3000)

        self.max_iter = kargs.get('max_iter', 4)
    def __init__(self, rawfname, min_occurences=5, **kargs):
        Classifier.__init__(self, rawfname, **kargs)

        self.min_occurences = min_occurences

        # Maintains all training examples
        self.all_training_examples = []

        # Each example contains only keys for features which occurred more
        # than <min_occurences> times in the training set
        self.shrunk_training_examples = []

        # { feature -> num times <feature> was seen }
        self.all_features = {}
        self.model = None

        self.filesubset = kargs.get('filesubset', 3000)

        self.max_iter = kargs.get('max_iter', 4)
Пример #18
0
 def __init__(self, feature):
     Classifier.__init__(self, feature)
     self.threshold = None
Пример #19
0
 def __init__(self, microDataLoc, estimators=55, depth=7):
     Classifier.__init__(self, microDataLoc)
     self.estimators = estimators
     self.depth = depth
 def __init__(self,
              microDataLoc,
              clusterNum=1,
              macroDataLoc="data/clusterData.txt"):
     Classifier.__init__(self, microDataLoc, clusterNum, macroDataLoc)
 def __init__(self, fname, *args, **kargs):
     # type: (object, object, object) -> object
     Classifier.__init__(self, fname, *args, **kargs)
     self.thresholds = [1.0, 1.0]
Пример #22
0
	def __init__(self, root_dir, input_text, config_dirs):
		Classifier.__init__(self, input_text)

		self.master_word_list = []
		self.word_features = []
		self.configs = utils.load_json_file(config_dirs)
Пример #23
0
  def __init__(self, D, H, W, K, iternum):
    Classifier.__init__(self, D, H, W, K, iternum)

    """ 
    Layer 1 Parameters (Conv 32 x 32 x 16) 
    K = 16, F = 5, S = 1, P = 2
    weight matrix: [K1 * D * F1 * F1]
    bias: [K1 * 1]
    """
    K1, F1, self.S1, self.P1 = 16, 5, 1, 2
    self.A1 = 0.01 * np.random.randn(K1, D, F1, F1)
    self.b1 = np.zeros((K1, 1))
    H1 = (H - F1 + 2*self.P1) / self.S1 + 1
    W1 = (W - F1 + 2*self.P1) / self.S1 + 1

    """ 
    Layer 3 Parameters (Pool 16 x 16 x 16) 
    K = 16, F = 2, S = 2
    """
    K3, self.F3, self.S3 = K1, 2, 2
    H3 = (H1 - self.F3) / self.S3 + 1
    W3 = (W1 - self.F3) / self.S3 + 1
 
    """ 
    Layer 4 Parameters (Conv 16 x 16 x 20) 
    K = 20, F = 5, S = 1, P = 2
    weight matrix: [K4 * K3 * F4 * F4]
    bias: [K4 * 1]
    """
    K4, F4, self.S4, self.P4 = 20, 5, 1, 2
    self.A4 = 0.01 * np.random.randn(K4, K3, F4, F4)
    self.b4 = np.zeros((K4, 1))
    H4 = (H3 - F4 + 2*self.P4) / self.S4 + 1
    W4 = (W3 - F4 + 2*self.P4) / self.S4 + 1

    """ 
    Layer 6 Parameters (Pool 8 x 8 x 20) 
    K = 20, F = 2, S = 2
    """
    K6, self.F6, self.S6 = K4, 2, 2
    H6 = (H4 - self.F6) / self.S6 + 1
    W6 = (W4 - self.F6) / self.S6 + 1

    """ 
    Layer 7 Parameters (Conv 8 x 8 x 20) 
    K = 20, F = 5, S = 1, P = 2
    weight matrix: [K7 * K6 * F7 * F7]
    bias: [K7 * 1]
    """
    K7, F7, self.S7, self.P7 = 20, 5, 1, 2
    self.A7 = 0.01 * np.random.randn(K7, K6, F7, F7)
    self.b7 = np.zeros((K7, 1))
    H7 = (H6 - F7 + 2*self.P7) / self.S7 + 1
    W7 = (W6 - F7 + 2*self.P7) / self.S7 + 1

    """ 
    Layer 9 Parameters (Pool 4 x 4 x 20) 
    K = 20, F = 2, S = 2
    """
    K9, self.F9, self.S9 = K7, 2, 2
    H9 = (H7 - self.F9) / self.S9 + 1
    W9 = (W7 - self.F9) / self.S9 + 1

    """ 
    Layer 10 Parameters (FC 1 x 1 x K)
    weight matrix: [(K6 * H_6 * W_6) * K] 
    bias: [1 * K]
    """
    self.A10 = 0.01 * np.random.randn(K9 * H9 * W9, K)
    self.b10 = np.zeros((1, K))

    """ Hyperparams """
    # learning rate
    self.rho = 1e-2
    # momentum
    self.mu = 0.9
    # reg strength
    self.lam = 0.1
    # velocity for A1: [K1 * D * F1 * F1]
    self.v1 = np.zeros((K1, D, F1, F1))
    # velocity for A4: [K4 * K3 * F4 * F4]
    self.v4 = np.zeros((K4, K3, F4, F4))
    # velocity for A7: [K7 * K6 * F7 * F7]
    self.v7 = np.zeros((K7, K6, F7, F7))
    # velocity for A10: [(K9 * H9 * W9) * K]   
    self.v10 = np.zeros((K9 * H9 * W9, K))
 
    return
 def __init__(self):
     Classifier.__init__(self)
     self.class_priors = None
     self.conditional_probabilities = None
     self.class_indices = {}
     self.class_term_probability = {}
Пример #25
0
    def __init__(self, D, H, W, K, iternum, verbose=False):
        Classifier.__init__(self, D, H, W, K, iternum)
        self.verbose = verbose
        """ 
    Layer 1 Parameters (Conv 32 x 32 x 16) 
    K = 16, F = 5, S = 1, P = 2
    weight matrix: [K1 * D * F1 * F1]
    bias: [K1 * 1]
    """
        K1, F1, self.S1, self.P1 = 16, 5, 1, 2
        self.A1 = 0.01 * np.random.randn(K1, D, F1, F1)
        self.b1 = np.zeros((K1, 1))
        H1 = (H - F1 + 2 * self.P1) / self.S1 + 1
        W1 = (W - F1 + 2 * self.P1) / self.S1 + 1
        """ 
    Layer 3 Parameters (Pool 16 x 16 x 16) 
    K = 16, F = 2, S = 2
    """
        K3, self.F3, self.S3 = K1, 2, 2
        H3 = (H1 - self.F3) / self.S3 + 1
        W3 = (W1 - self.F3) / self.S3 + 1
        """ 
    Layer 4 Parameters (Conv 16 x 16 x 20) 
    K = 20, F = 5, S = 1, P = 2
    weight matrix: [K4 * K3 * F4 * F4]
    bias: [K4 * 1]
    """
        K4, F4, self.S4, self.P4 = 20, 5, 1, 2
        self.A4 = 0.01 * np.random.randn(K4, K3, F4, F4)
        self.b4 = np.zeros((K4, 1))
        H4 = (H3 - F4 + 2 * self.P4) / self.S4 + 1
        W4 = (W3 - F4 + 2 * self.P4) / self.S4 + 1
        """ 
    Layer 6 Parameters (Pool 8 x 8 x 20) 
    K = 20, F = 2, S = 2
    """
        K6, self.F6, self.S6 = K4, 2, 2
        H6 = (H4 - self.F6) / self.S6 + 1
        W6 = (W4 - self.F6) / self.S6 + 1
        """ 
    Layer 7 Parameters (Conv 8 x 8 x 20) 
    K = 20, F = 5, S = 1, P = 2
    weight matrix: [K7 * K6 * F7 * F7]
    bias: [K7 * 1]
    """
        K7, F7, self.S7, self.P7 = 20, 5, 1, 2
        self.A7 = 0.01 * np.random.randn(K7, K6, F7, F7)
        self.b7 = np.zeros((K7, 1))
        H7 = (H6 - F7 + 2 * self.P7) / self.S7 + 1
        W7 = (W6 - F7 + 2 * self.P7) / self.S7 + 1
        """ 
    Layer 9 Parameters (Pool 4 x 4 x 20) 
    K = 20, F = 2, S = 2
    """
        K9, self.F9, self.S9 = K7, 2, 2
        H9 = (H7 - self.F9) / self.S9 + 1
        W9 = (W7 - self.F9) / self.S9 + 1
        """ 
    Layer 10 Parameters (FC 1 x 1 x K)
    weight matrix: [(K6 * H_6 * W_6) * K] 
    bias: [1 * K]
    """
        self.A10 = 0.01 * np.random.randn(K9 * H9 * W9, K)
        self.b10 = np.zeros((1, K))
        """ Hyperparams """
        # learning rate
        self.rho = 1e-2
        # momentum
        self.mu = 0.9
        # reg strength
        self.lam = 0.1
        # velocity for A1: [K1 * D * F1 * F1]
        self.v1 = np.zeros((K1, D, F1, F1))
        # velocity for A4: [K4 * K3 * F4 * F4]
        self.v4 = np.zeros((K4, K3, F4, F4))
        # velocity for A7: [K7 * K6 * F7 * F7]
        self.v7 = np.zeros((K7, K6, F7, F7))
        # velocity for A10: [(K9 * H9 * W9) * K]
        self.v10 = np.zeros((K9 * H9 * W9, K))

        return
Пример #26
0
    def __init__(self, root_dir, input_text, config_dirs):
        Classifier.__init__(self, input_text)

        self.master_word_list = []
        self.word_features = []
        self.configs = utils.load_json_file(config_dirs)