class Processor(object): def __init__(self, datafile='data.txt', statsNum=3): self.analyzer = Analyzer() self.classificator = Classificator() self.datafile = datafile self.statsNum = statsNum def writeStats(self, files): datafile = open(self.datafile, 'w') for file in files: ffts = self.analyzer.getFFTs(file) stats = self.analyzer.getStats(ffts) datafile.write(' '.join(str(x) for x in stats) + '\n') datafile.close() def normalize(self, data): transposed = data.transpose() meanVal = 1 for i in range(len(transposed)): if i % self.statsNum == 0: meanVal = np.mean(transposed[i]) transposed[i] /= meanVal data = transposed.transpose() return data def cluster(self, files, clustersNum): self.writeStats(files) datafile = open(self.datafile) data = datafile.split('\n') data = np.array([[float(x) for x in row.split(' ')] for row in data[:-1]]) data = self.normalize(data) net = self.classificator.newnet(clustersNum) net.train(data, epochs=500) result = net.sim(data) self.classificator.savenet(net) return self.classificator.getGroupedResult(result) def classify(self, file): ffts = self.analyzer.getFFTs(file) stats = self.analyzer.getStats(ffts) net = self.classificator.loadnet() return net.sim(stats)