def getLabel(self, value): feature = FeatureFactory() feature.createFeature(value, "") dict = {}; dict['attributes'] = {} attributes = [] line = feature.datatable[0] for i in range(len(line)): dict['attributes'][str(i)] = line[i] attributes.append(str(i)) res = self.model.predict(dict) r = max(res.iterkeys(),key=lambda k:res[k]) return r
def getLabel(self, value): feature = FeatureFactory() feature.createFeature(value, "") dict = {} dict['attributes'] = {} attributes = [] line = feature.datatable[0] for i in range(len(line)): dict['attributes'][str(i)] = line[i] attributes.append(str(i)) res = self.model.predict(dict) r = max(res.iterkeys(), key=lambda k: res[k]) return r
def getClass(setting, value): setting = setting.decode("string-escape") #print setting classifier = pickle.loads(setting) feature = FeatureFactory() feature.createFeature(value, "") dict = {} dict['attributes'] = {} attributes = [] line = feature.datatable[0] for i in range(len(line)): dict['attributes'][str(i)] = line[i] attributes.append(str(i)) res = classifier.predict(dict) r = max(res.iterkeys(), key=lambda k: res[k]) return r
def getClass(setting, value): setting = setting.decode("string-escape") #print setting classifier = pickle.loads(setting) feature = FeatureFactory() feature.createFeature(value, "") dict = {}; dict['attributes'] = {} attributes = [] line = feature.datatable[0] for i in range(len(line)): dict['attributes'][str(i)] = line[i] attributes.append(str(i)) res = classifier.predict(dict) r = max(res.iterkeys(), key=lambda k: res[k]) return r
class IDCTClassifier(PartitionClassifierType): def __init__(self): self.path = "./" self.featureFactory = FeatureFactory() def addTrainingData(self, value, label): self.featureFactory.createFeature(value, label) def learnClassifer(self): model = NaiveBayes() dict = {} dict['cases'] = 1 attributes = [] for j in range(len(self.featureFactory.datatable)): dict = {} dict['cases'] = 1 dict['attributes'] = {} line = self.featureFactory.datatable[j] for i in range(len(line)): dict['attributes'][str(i)] = line[i] attributes.append(str(i)) dict['label'] = self.featureFactory.classes[j] model.add_instances(dict) model.set_real(attributes) model.train() self.model = model return pickle.dumps(model).encode('string_escape') def getLabel(self, value): feature = FeatureFactory() feature.createFeature(value, "") dict = {} dict['attributes'] = {} attributes = [] line = feature.datatable[0] for i in range(len(line)): dict['attributes'][str(i)] = line[i] attributes.append(str(i)) res = self.model.predict(dict) r = max(res.iterkeys(), key=lambda k: res[k]) return r
class IDCTClassifier(PartitionClassifierType): def __init__(self): self.path = "./" print "building classifier" self.featureFactory = FeatureFactory() def addTrainingData(self, value, label): self.featureFactory.createFeature(value, label) def learnClassifer(self): model = NaiveBayes() dict = {}; dict['cases'] = 1 attributes = [] for j in range(len(self.featureFactory.datatable)): dict = {}; dict['cases'] = 1 dict['attributes'] = {} line = self.featureFactory.datatable[j] for i in range(len(line)): dict['attributes'][str(i)] = line[i] attributes.append(str(i)) dict['label'] = self.featureFactory.classes[j] model.add_instances(dict) model.set_real(attributes) model.train() self.model = model return pickle.dumps(model).encode('string_escape') def getLabel(self, value): feature = FeatureFactory() feature.createFeature(value, "") dict = {}; dict['attributes'] = {} attributes = [] line = feature.datatable[0] for i in range(len(line)): dict['attributes'][str(i)] = line[i] attributes.append(str(i)) res = self.model.predict(dict) r = max(res.iterkeys(),key=lambda k:res[k]) return r