def flockFinder(self, filename): global delta global mu Maximal.epsilon = self.epsilon mu = Maximal.mu = self.mu delta = self.delta Maximal.precision = 0.0 tag = filename.split(".")[0].split("/")[-1][1:] dataset = csv.reader(open(filename, 'r'),delimiter=',') # next(dataset) t1 = time.time() points = Maximal.pointTimestamp(dataset) input_size = len(points['0']) # timestamps = list(map(int,points.keys())) # timestamps.sort() previousFlocks = [] keyFlock = 1 diskID = 1 stdin = [] # for timestamp in timestamps: centers, treeCenters = Maximal.disksTimestamp(points, '0') centers_size = len(centers) # if centersDiskCompare == 0: # continue #print(timestamp, len(centersDiskCompare)) # maximalDisks, diskID = Maximal.maximalDisksTimestamp(centersDiskCompare, treeCenters,disksTime, timestamp, diskID) #print("Maximal",len(maximalDisks)) # previousFlocks, keyFlock, stdin = BFEFlock.flocks(maximalDisks, previousFlocks, int(timestamp), keyFlock, stdin) # table = ('flocksBFE') # print("Flocks: ",len(stdin)) # flocks = len(stdin) # stdin = '\n'.join(stdin) # db = Pdbc.DBConnector() # db.createTableFlock(table) # db.resetTable(table.format(filename)) # db.copyToTable(table,io.StringIO(stdin)) t2 = round(time.time()-t1,3) print("BFE,{3},{0},{1},{2}".format(tag, centers_size, t2, float(Maximal.epsilon)))
def flockFinder(self, filename, tag): global delta global mu Maximal.epsilon = self.epsilon mu = Maximal.mu = self.mu delta = self.delta Maximal.precision = 0.001 dataset = csv.reader(open('Datasets/' + filename, 'r'), delimiter='\t') next(dataset) t1 = time.time() points = Maximal.pointTimestamp(dataset) timestamps = list(map(int, points.keys())) timestamps.sort() previousFlocks = [] keyFlock = 1 diskID = 1 stdin = [] for timestamp in timestamps: centersDiskCompare, treeCenters, disksTime = Maximal.disksTimestamp( points, timestamp) if centersDiskCompare == 0: continue #print(timestamp, len(centersDiskCompare)) maximalDisks, diskID = Maximal.maximalDisksTimestamp( centersDiskCompare, treeCenters, disksTime, timestamp, diskID) #print("Maximal",len(maximalDisks)) previousFlocks, keyFlock, stdin = BFEFlock.flocks( maximalDisks, previousFlocks, int(timestamp), keyFlock, stdin) table = ('flocksBFE') print("Flocks: ", len(stdin)) flocks = len(stdin) stdin = '\n'.join(stdin) db = Pdbc.DBConnector() db.createTableFlock(table) db.resetTable(table.format(filename)) db.copyToTable(table, io.StringIO(stdin)) t2 = round(time.time() - t1, 3) print("\nTime: ", t2) db.createTableTest() db.insertTest(filename, self.epsilon, mu, delta, t2, flocks, tag)
def flockFinder(self, filename,tag): global delta global mu Maximal.epsilon = self.epsilon mu = Maximal.mu = self.mu delta = self.delta Maximal.precision = 0.001 dataset = csv.reader(open('Datasets/'+filename, 'r'),delimiter='\t') next(dataset) t1 = time.time() points = Maximal.pointTimestamp(dataset) timestamps = list(map(int,points.keys())) timestamps.sort() previousFlocks = [] keyFlock = 1 diskID = 1 stdin = [] for timestamp in timestamps: centersDiskCompare, treeCenters, disksTime = Maximal.disksTimestamp(points, timestamp) if centersDiskCompare == 0: continue #print(timestamp, len(centersDiskCompare)) maximalDisks, diskID = Maximal.maximalDisksTimestamp(centersDiskCompare, treeCenters,disksTime, timestamp, diskID) #print("Maximal",len(maximalDisks)) previousFlocks, keyFlock, stdin = BFEFlock.flocks(maximalDisks, previousFlocks, int(timestamp), keyFlock, stdin) table = ('flocksBFE') print("Flocks: ",len(stdin)) flocks = len(stdin) stdin = '\n'.join(stdin) db = Pdbc.DBConnector() db.createTableFlock(table) db.resetTable(table.format(filename)) db.copyToTable(table,io.StringIO(stdin)) t2 = round(time.time()-t1,3) print("\nTime: ",t2) db.createTableTest() db.insertTest(filename,self.epsilon,mu, delta, t2, flocks, tag)
def flockFinder(self, filename, tag): global traj global stdin global delta Maximal.epsilon = self.epsilon Maximal.mu = self.mu delta = self.delta Maximal.precision = 0.001 dataset = csv.reader(open('Datasets/' + filename, 'r'), delimiter='\t') if os.path.exists('output.dat'): os.system('rm output.dat') if os.path.exists('output.mfi'): os.system('rm output.mfi') output = open('output.dat', 'w') next(dataset) t1 = time.time() points = Maximal.pointTimestamp(dataset) timestamps = list(map(int, points.keys())) timestamps.sort() previousFlocks = [] keyFlock = 1 diskID = 1 traj = {} totalMaximalDisks = {} stdin = [] for timestamp in timestamps: centersDiskCompare, treeCenters, disksTime = Maximal.disksTimestamp( points, timestamp) if centersDiskCompare == 0: continue #print(timestamp, len(centersDiskCompare)) maximalDisks, diskID = Maximal.maximalDisksTimestamp( centersDiskCompare, treeCenters, disksTime, timestamp, diskID) totalMaximalDisks.update(maximalDisks) LCMFlock.getTransactions(maximalDisks) for i in traj: if len(traj[i]) == 1: continue output.write(str(traj[i]) + '\n') output.close() os.system("./fim_closed output.dat " + str(Maximal.mu) + " output.mfi > /dev/null") if os.path.exists('output.mfi'): output1 = open('output.mfi', 'r') stdin = LCMFlock.flocks(output1, totalMaximalDisks, keyFlock) table = ('flocksLCM') print("Flocks: ", len(stdin)) flocks = len(stdin) stdin = '\n'.join(stdin) db = Pdbc.DBConnector() db.createTableFlock(table) db.resetTable(table.format(filename)) db.copyToTable(table, io.StringIO(stdin)) t2 = round(time.time() - t1, 3) print("\nTime: ", t2) db.createTableTest() db.insertTest(filename, self.epsilon, self.mu, delta, t2, flocks, tag)
def flockFinder(self,filename,tag): global traj global stdin global delta Maximal.epsilon = self.epsilon Maximal.mu = self.mu delta = self.delta Maximal.precision = 0.001 dataset = csv.reader(open('Datasets/'+filename, 'r'),delimiter='\t') if os.path.exists('output.dat'): os.system('rm output.dat') if os.path.exists('output.mfi'): os.system('rm output.mfi') output = open('output.dat','w') next(dataset) t1 = time.time() points = Maximal.pointTimestamp(dataset) timestamps = list(map(int,points.keys())) timestamps.sort() previousFlocks = [] keyFlock = 1 diskID = 1 traj = {} totalMaximalDisks = {} stdin = [] for timestamp in timestamps: centersDiskCompare, treeCenters, disksTime = Maximal.disksTimestamp(points, timestamp) if centersDiskCompare == 0: continue #print(timestamp, len(centersDiskCompare)) maximalDisks, diskID = Maximal.maximalDisksTimestamp(centersDiskCompare, treeCenters,disksTime, timestamp, diskID) totalMaximalDisks.update(maximalDisks) LCMFlock.getTransactions(maximalDisks) for i in traj: if len(traj[i]) == 1: continue output.write(str(traj[i])+'\n') output.close() os.system("./fim_closed output.dat " + str(Maximal.mu) + " output.mfi > /dev/null") if os.path.exists('output.mfi'): output1 = open('output.mfi','r') stdin = LCMFlock.flocks(output1, totalMaximalDisks, keyFlock) table = ('flocksLCM') print("Flocks: ",len(stdin)) flocks = len(stdin) stdin = '\n'.join(stdin) db = Pdbc.DBConnector() db.createTableFlock(table) db.resetTable(table.format(filename)) db.copyToTable(table,io.StringIO(stdin)) t2 = round(time.time()-t1,3) print("\nTime: ",t2) db.createTableTest() db.insertTest(filename,self.epsilon,self.mu, delta, t2, flocks, tag)