def Logistic_regression(modelpath=None): train_data, train_flag, val_data, val_flag, test_data, test_flag = get_data( ) if modelpath is not None: lr = joblib.load(modelpath) else: lr = LogisticRegression(penalty='l2', solver='liblinear', class_weight="balanced") lr.fit(train_data, train_flag) joblib.dump(lr, "params/LR/LR.model") # val val_output_prob = lr.predict_proba(val_data) val_output_prob = np.array(val_output_prob)[:, 1] _, _, thresold = Measure().get_pr_curve(val_flag, val_output_prob) test_output_proba = lr.predict_proba(test_data) test_output_proba = np.array(test_output_proba)[:, 1] test_output = np.zeros(test_output_proba.shape) test_output[test_output_proba > thresold] = 1 precision = Measure().Precision(test_flag, test_output) recall = Measure().Recall(test_flag, test_output) f1 = Measure().F1_score(test_flag, test_output) acc = Measure().Accuracy(test_flag, test_output) print "precision:%.2f\nrecall:%.2f\nf1:%.2f\nacc:%.2f\n" % ( precision, recall, f1, acc) print "auc:%.2f" % roc_auc_score(test_flag, test_output_proba)
def SVM(modelpath=None): #modelpath="params/SVM/svm_balanced.model"): train_data, train_flag, val_data, val_flag, test_data, test_flag = get_data( ) train_data = train_data[:40000] train_flag = train_flag[:40000] if modelpath is not None: svm_ = joblib.load(modelpath) else: svm_ = SVC(kernel='rbf', probability=True) svm_.fit(train_data, train_flag) joblib.dump(svm_, "params/SVM/svm_balanced.model") #val val_output_prob = svm_.predict_proba(val_data) val_output_prob = np.array(val_output_prob)[:, 1] _, _, thresold = Measure().get_pr_curve(val_flag, val_output_prob) #test test_output_prob = svm_.predict_proba(test_data) test_output_prob = np.array(test_output_prob)[:, 1] test_output = np.zeros(test_output_prob.shape) test_output[test_output_prob > thresold] = 1 precision = Measure().Precision(test_flag, test_output) recall = Measure().Recall(test_flag, test_output) f1 = Measure().F1_score(test_flag, test_output) acc = Measure().Accuracy(test_flag, test_output) print "model:SVM:\nprecision:%.2f\nrecall:%.2f\nf1:%.2f\nacc:%.2f\n" % ( precision, recall, f1, acc) print "auc:%.2f" % roc_auc_score(test_flag, test_output_prob)
def __init__(self, pattern, period, simulationTime, units): Measure.__init__(self, period, simulationTime, units) self.__periodicValues = PeriodicValues(0, period, simulationTime) self.__prog = re.compile(pattern) self.__units = units
def __init__(self, config, eventSched, httpRequester, ownAddrFunc, peerId, persister, pInMeasure, pOutMeasure, peerPool, connBuilder, connListener, connHandler, choker, torrent, torrentIdent, torrentDataPath, version): ##global stuff self.config = config self.version = version self.peerPool = peerPool self.connBuilder = connBuilder self.connListener = connListener self.connHandler = connHandler self.choker = choker ##own stuff self.log = Logger('Bt', '%-6s - ', torrentIdent) self.torrent = torrent self.torrentIdent = torrentIdent self.log.debug("Creating object persister") self.btPersister = BtObjectPersister(persister, torrentIdent) self.log.debug("Creating measure classes") self.inRate = Measure(eventSched, 60, [pInMeasure]) self.outRate = Measure(eventSched, 60, [pOutMeasure]) self.inRate.stop() self.outRate.stop() self.log.debug("Creating storage class") self.storage = Storage(self.config, self.btPersister, torrentIdent, self.torrent, torrentDataPath) self.log.debug("Creating global status class") self.pieceStatus = PieceStatus(self.torrent.getTotalAmountOfPieces()) self.log.debug("Creating file priority class") self.filePrio = FilePriority(self.btPersister, self.version, self.pieceStatus, self.storage.getStatus(), self.torrent, torrentIdent) self.log.debug("Creating requester class") self.requester = Requester(self.config, self.torrentIdent, self.pieceStatus, self.storage, self.torrent) self.log.debug("Creating tracker requester class") self.trackerRequester = TrackerRequester(self.config, self.btPersister, eventSched, peerId, self.peerPool, ownAddrFunc, httpRequester, self.inRate, self.outRate, self.storage, self.torrent, self.torrentIdent, self.version) self.log.debug("Creating superseeding handler class") self.superSeedingHandler = SuperSeedingHandler(self.torrentIdent, self.btPersister, self.storage.getStatus(), self.pieceStatus) ##callbacks self.log.debug("Adding callbacks") self._addCallbacks() ##status self.state = 'stopped' self.started = False self.paused = True ##lock self.lock = threading.Lock()
def insertMeasureByPosition(self, width, position=None, counter=None): if position is None: if self.numStaffs() == 0: self._addStaff() position = NotePosition(self.numStaffs() - 1) staff = self.getStaff(self.numStaffs() - 1) position.measureIndex = staff.numMeasures() self._checkStaffIndex(position.staffIndex) newMeasure = Measure(width) newMeasure.counter = counter staff = self.getStaff(position.staffIndex) staff.insertMeasure(position, newMeasure) return newMeasure
def insertMeasureByPosition(self, width, position = None, counter = None): if position is None: if self.numStaffs() == 0: self._addStaff() position = NotePosition(self.numStaffs() - 1) staff = self.getStaff(self.numStaffs() - 1) position.measureIndex = staff.numMeasures() self._checkStaffIndex(position.staffIndex) newMeasure = Measure(width) newMeasure.counter = counter staff = self.getStaff(position.staffIndex) staff.insertMeasure(position, newMeasure) return newMeasure
def model_agg_svm(model_path_dir="params/SVM/Agg_svm"): train_data, train_flag, val_data, val_flag, test_data, test_flag = get_downsampling_data( ) model_li = [] for i in range(10): model_name = "svm-%d.model" % i path = os.path.join(model_path_dir, model_name) if os.path.exists(path): svm_ = joblib.load(os.path.join(model_path_dir, model_name)) else: print "train submodel-%d)" % (i) svm_ = SVC(kernel='rbf', probability=True, class_weight={ 0: 0.45, 1: 0.55 }) svm_.fit(train_data[i], train_flag[i]) joblib.dump(svm_, os.path.join(model_path_dir, model_name)) model_li.append(svm_) #val val_output_li = [] for i in range(10): val_output = model_li[i].predict_proba(val_data) val_output_li.append(val_output) val_output_li = np.array(val_output_li) val_output_li = val_output_li.sum(0) / 10 val_output_proba = val_output_li[:, 1] _, _, thresold = Measure().get_pr_curve(val_flag, val_output_proba) #test test_output_li = [] for i in range(10): test_output = model_li[i].predict_proba(test_data) test_output_li.append(test_output) test_output_li = np.array(test_output_li) test_output_li = test_output_li.sum(0) / 10 test_output_proba = test_output_li[:, 1] test_output = np.zeros(test_output_proba.shape) test_output[test_output_proba > thresold] = 1 precision = Measure().Precision(test_flag, test_output) recall = Measure().Recall(test_flag, test_output) f1 = Measure().F1_score(test_flag, test_output) acc = Measure().Accuracy(test_flag, test_output) print "model:multi-SVM:\nprecision:%.2f\nrecall:%.2f\nf1:%.2f\nacc:%.2f\n" % ( precision, recall, f1, acc) print "auc:%.2f" % roc_auc_score(test_flag, test_output_proba)
def insertMeasureByIndex(self, width, index=None, counter=None): if index is None: index = self.numMeasures() if self.numStaffs() == 0: self._addStaff() staff = self.getStaff(0) elif index == self.numMeasures(): staff = self.getStaff(-1) index = staff.numMeasures() else: staff, index = self._staffContainingMeasure(index) newMeasure = Measure(width) newMeasure.counter = counter staff.insertMeasure(NotePosition(measureIndex=index), newMeasure) return newMeasure
def insertMeasureByIndex(self, width, index = None, counter = None): if index is None: index = self.numMeasures() if self.numStaffs() == 0: self._addStaff() staff = self.getStaff(0) elif index == self.numMeasures(): staff = self.getStaff(-1) index = staff.numMeasures() else: staff, index = self._staffContainingMeasure(index) newMeasure = Measure(width) newMeasure.counter = counter staff.insertMeasure(NotePosition(measureIndex = index), newMeasure) return newMeasure
def model_agg_lr(model_path_dir="params/LR/Agg_lr"): train_data, train_flag, val_data, val_flag, test_data, test_flag = get_downsampling_data( ) model_li = [] for i in range(10): model_name = "lr-%d.model" % i path = os.path.join(model_path_dir, model_name) if os.path.exists(path): lr = joblib.load(os.path.join(model_path_dir, model_name)) else: print "train submodel-%d)" % (i) lr = LogisticRegression(solver='liblinear', class_weight="balanced") lr.fit(train_data[i], train_flag[i]) joblib.dump(lr, os.path.join(model_path_dir, model_name)) model_li.append(lr) # val val_output_li = [] for i in range(10): val_output = model_li[i].predict_proba(val_data) val_output_li.append(val_output) val_output_li = np.array(val_output_li) val_output_li = val_output_li.sum(0) / 10 val_output_proba = val_output_li[:, 1] _, _, thresold = Measure().get_pr_curve(val_flag, val_output_proba) #test test_output_li = [] for i in range(10): test_output = model_li[i].predict_proba(test_data) test_output_li.append(test_output) test_output_li = np.array(test_output_li) test_output_li = test_output_li.sum(0) / 10 test_output_proba = test_output_li[:, 1] test_output = np.zeros(test_output_proba.shape) test_output[test_output_proba > thresold] = 1 precision = Measure().Precision(test_flag, test_output) recall = Measure().Recall(test_flag, test_output) f1 = Measure().F1_score(test_flag, test_output) acc = Measure().Accuracy(test_flag, test_output) print "precision:%.2f\nrecall:%.2f\nf1:%.2f\nacc:%.2f\n" % ( precision, recall, f1, acc) print "auc:%.2f" % roc_auc_score(test_flag, test_output_proba)
def get_mean(self): """ Get mean of distribution with error from delta theorem :return: Mean as a Measure """ self.calc_cent_moments(1, 2) val = self.raw_moments[1] err = (self.cent_moments[2] / self.total_counts)**0.5 return Measure(val, err)
def get_cumulant(self, order): """ Calculate cumulant of distribution for given order from central moments with error from delta theorem :param order: Order of cumulant to calculate :return: Cumulant value and error as Measure object """ self.calc_cent_moments(1, 2 * order) # Maybe write in bell polynomials later cm = self.cent_moments n = self.total_counts if order == 1: val = self.raw_moments[1] err = (cm[2] / n)**0.5 elif order == 2: val = cm[2] err = pow((cm[4] - pow(cm[2], 2)) / n, 0.5) elif order == 3: val = cm[3] err = ((cm[6] - cm[3]**2 + 9 * cm[2]**3 - 6 * cm[2] * cm[4]) / n)**0.5 elif order == 4: val = cm[4] - 3 * cm[2]**2 err = pow((cm[8] - 12 * cm[6] * cm[2] - 8 * cm[5] * cm[3] - pow(cm[4], 2) + 48 * cm[4] * pow(cm[2], 2) + 64 * pow(cm[3], 2) * cm[2] - 36 * pow(cm[2], 4)) / n, 0.5) elif order == 5: val = cm[5] - 10 * cm[2] * cm[3] err = pow( (cm[10] - pow(cm[5], 2) - 10 * cm[4] * cm[6] + 900 * pow(cm[2], 5) - 20 * cm[3] * cm[7] - 20 * cm[8] * cm[2] + 125 * cm[2] * pow(cm[4], 2) + 200 * cm[4] * pow(cm[3], 2) - 1000 * pow(cm[3] * cm[2], 2) + 160 * cm[6] * pow(cm[2], 2) - 900 * cm[4] * pow(cm[2], 3) + 240 * cm[2] * cm[3] * cm[5]) / n, 0.5) elif order == 6: val = cm[6] - 15 * cm[4] * cm[2] - 10 * cm[3]**2 + 30 * cm[2]**3 err = pow( (-30 * cm[4] * cm[8] + 510 * cm[4] * cm[2] * cm[6] + 1020 * cm[4] * cm[3] * cm[5] + 405 * cm[8] * pow(cm[2], 2) - 2880 * cm[6] * pow(cm[2], 3) - 9720 * cm[3] * cm[5] * pow(cm[2], 2) - 30 * cm[2] * cm[10] + 840 * cm[2] * cm[3] * cm[7] + 216 * cm[2] * pow(cm[5], 2) - 40 * cm[3] * cm[9] + 440 * cm[6] * pow(cm[3], 2) - 3600 * pow(cm[2] * cm[4], 2) - 9600 * cm[2] * cm[4] * pow(cm[3], 2) + 13500 * cm[4] * pow(cm[2], 4) + 39600 * pow(cm[2], 3) * pow(cm[3], 2) + cm[12] - pow(cm[6], 2) - 12 * cm[5] * cm[7] + 225 * pow(cm[4], 3) - 8100 * pow(cm[2], 6) - 400 * pow(cm[3], 4)) / n, 0.5) else: print(f'{order}th order cumulant not implemented, returning nan!') val = float('nan') err = float('nan') return Measure(val, err)
def test(): dst = Dataset(split="test") valdst=Dataset(split="val") model=Classifier().cuda() testloader = data.DataLoader(dst, batch_size=1) valloader = data.DataLoader(valdst, batch_size=1) model.load_state_dict(torch.load('params/NN/params2.pkl')) model.eval() val_flag = [] val_output_prob = [] for i, _data in enumerate(valloader): feature, flag = _data feature = Variable(feature).cuda() output = model(feature) output_prob = F.softmax(output, dim=1).detach().cpu().numpy() val_output_prob.append(output_prob[0][1]) val_flag.append(flag.numpy()[0]) val_flag = np.array(val_flag) val_output_prob = np.array(val_output_prob) _,_,thresold=Measure().get_pr_curve(val_flag,val_output_prob) test_flag=[] test_output_prob=[] for i, _data in enumerate(testloader): feature, flag = _data feature = Variable(feature).cuda() output = model(feature) output_prob=F.softmax(output,dim=1).detach().cpu().numpy() test_output_prob.append(output_prob[0][1]) test_flag.append(flag.numpy()[0]) test_flag=np.array(test_flag) test_output_prob=np.array(test_output_prob) test_output = np.zeros(test_output_prob.shape) test_output[test_output_prob > thresold] = 1 precision = Measure().Precision(test_flag, test_output) recall = Measure().Recall(test_flag, test_output) f1 = Measure().F1_score(test_flag, test_output) acc = Measure().Accuracy(test_flag, test_output) print "precision:%f\nrecall:%f\nf1:%f\nacc:%f\n" % (precision, recall, f1, acc) print "auc:%f"%roc_auc_score(test_flag, test_output_prob)
def model_agg_dt(model_path_dir="params/DecisionTree/Agg_DT"): train_data, train_flag,val_data, val_flag,test_data, test_flag = get_downsampling_data() model_li=[] for i in range(10): model_name="DT-%d.model"%i path=os.path.join(model_path_dir,model_name) if os.path.exists(path): dt = joblib.load(os.path.join(model_path_dir,model_name)) else: print "train submodel-%d)"%(i) dt = tree.DecisionTreeClassifier(max_depth=6, class_weight="balanced") dt.fit(train_data[i], train_flag[i]) joblib.dump(dt, os.path.join(model_path_dir,model_name)) model_li.append(dt) #val val_output_li=[] for i in range(10): val_output=model_li[i].predict_proba(val_data) val_output_li.append(val_output) val_output_li = np.array(val_output_li) val_output_li = val_output_li.sum(0) / 10 val_output_proba = val_output_li[:, 1] _,_,thresold=Measure().get_pr_curve(val_flag,val_output_proba) #test test_output_li=[] for i in range(10): test_output=model_li[i].predict_proba(test_data) test_output_li.append(test_output) test_output_li=np.array(test_output_li) test_output_li=test_output_li.sum(0)/10 test_output_proba = test_output_li[:, 1] test_output=np.zeros(test_output_proba.shape) test_output[test_output_proba>thresold]=1 precision = Measure().Precision(test_flag, test_output) recall = Measure().Recall(test_flag, test_output) f1 = Measure().F1_score(test_flag, test_output) acc = Measure().Accuracy(test_flag, test_output) print "model:multi-SVM:\nprecision:%.2f\nrecall:%.2f\nf1:%.2f\nacc:%.2f\n" % (precision, recall, f1, acc) print "auc:%.2f" % roc_auc_score(test_flag, test_output_proba)
def Decision_tree(): train_data, train_flag,val_data,val_flag, test_data, test_flag = get_data() dt = tree.DecisionTreeClassifier(max_depth=6,class_weight="balanced") dt = dt.fit(train_data, train_flag) # val val_output_prob = dt.predict_proba(val_data) val_output_prob = np.array(val_output_prob)[:, 1] _, _, thresold = Measure().get_pr_curve(val_flag, val_output_prob) # test test_output_prob = dt.predict_proba(test_data) test_output_prob = np.array(test_output_prob)[:, 1] test_output = np.zeros(test_output_prob.shape) test_output[test_output_prob > thresold] = 1 precision = Measure().Precision(test_flag, test_output) recall = Measure().Recall(test_flag, test_output) f1 = Measure().F1_score(test_flag, test_output) acc = Measure().Accuracy(test_flag, test_output) print "precision:%.2f\nrecall:%.2f\nf1:%.2f\nacc:%.2f\n" % (precision, recall, f1, acc) print "auc:%.2f"%roc_auc_score(test_flag, test_output_prob)
def get_sd(self): """ Get standard deviation of distribution with error from delta theorem :return: Standard deviation as a Measure """ self.calc_cent_moments(1, 4) val = self.cent_moments[2]**0.5 err = (self.m[4] - 1) * self.cent_moments[2] if err >= 0: err = (err / (4 * self.total_counts))**0.5 else: err = float('nan') return Measure(val, err)
def parse_activebandwidthreply(jsonData, clientT_observerT, clientT_observerR, observerT_remoteR, observerR_remoteR): bucket = "" for line in jsonData: if 'local' in line['Keyword']: isLocal = True else: isLocal = False direction = line["Direction"] receiver_identity = line["ReceiverIdentity"] sender_identity = line["SenderIdentity"] timestamp = line["Timestamp"] timestamp_d = datetime.datetime.strptime( timestamp.replace(".000000", ""), '%Y-%m-%d %H:%M:%S') bandwidthbps = float(line["Bandwidth [bit/s]"]) bandwidthKbps = bandwidthbps / 1024 bandwidthMbps = bandwidthKbps / 1024 keyword = line['Keyword'] measure = Measure(timestamp_d, bandwidthMbps, keyword.split("_")[3].replace("noise", "").strip()) if ((direction == "Upstream" and sender_identity == "Client") or \ (direction == "Downstream" and receiver_identity == "Client" )) and bool(isLocal)== True: clientT_observerT.append(measure) bucket = "clientT_observerT" elif ((direction == "Upstream" and sender_identity == "Client") or \ (direction == "Downstream" and receiver_identity == "Client" )) and bool(isLocal)== False: clientT_observerR.append(measure) bucket = "clientT_observerR" elif ((direction == "Upstream" and sender_identity == "Observer") or \ (direction == "Downstream" and receiver_identity == "Observer" )) and bool(isLocal)== True: observerT_remoteR.append(measure) bucket = "observerT_remoteR_" elif ((direction == "Upstream" and sender_identity == "Observer") or \ (direction == "Downstream" and receiver_identity == "Observer" )) and bool(isLocal)== False: observerR_remoteR.append(measure) bucket = "observerR_remoteR_" assert bucket != ""
def get_skewness(self): """ Get skewness of distribution with error from delta theorem :return: Skewness as a Measure """ self.calc_cent_moments(1, 6) m = self.m if self.cent_moments[2] == 0: val = float('nan') err = float('nan') else: val = self.cent_moments[3] / self.cent_moments[2]**1.5 err = 9 - 6 * m[4] + m[3]**2 * ( 35 + 9 * m[4]) / 4 - 3 * m[3] * m[5] + m[6] if err >= 0: err = (err / self.total_counts)**0.5 else: err = float('nan') return Measure(val, err)
def get_kurtosis(self): """ Get kurtosis of distribution with error from delta theorem :return: Kurtosis as a Measure """ self.calc_cent_moments(1, 8) m = self.m if self.cent_moments[2] == 0: val = float('nan') err = float('nan') else: val = self.cent_moments[4] / self.cent_moments[2]**2 - 3 err = -m[4]**2 + 4 * m[4]**3 + 16 * m[3]**2 * ( 1 + m[4]) - 8 * m[3] * m[5] - 4 * m[4] * m[6] + m[8] if err >= 0: err = (err / self.total_counts)**0.5 else: err = float('nan') return Measure(val, err)
def __init__(self): ''' retrieve the omero image from the id :param id: id of an image ''' self.listFile = [os.path.join(dp, f) for dp, dn, filenames in os.walk(os.getcwd()+'/data') for f in filenames if os.path.splitext(f)[1] == '.tif'] QtCore.QObject.__init__(self) self.parameter=DEFAULT self.id = self.listFile[0] logging.info('Started wood') self.Image = get_image(self.id) self.Image = self.Image*1.0/self.Image.max() self.index = npy.array([self.parameter['FirstLine'],self.parameter['FirstLine']+self.parameter['VisualWidth'],\ self.parameter['FirstColumn'],self.parameter['FirstColumn']+self.parameter['VisualHeight']]) self.updateImg() self.initToShow() self.measure = Measure() ''' if len(self.Image.shape)==3: self.Image=npy.sum(self.Image,axis=2)/3 self.Image = (self.Image-npy.min(self.Image))*255.0/(npy.max(self.Image)-npy.min(self.Image)) self.Image=self.Image.astype(npy.uint8) self.Image = self.Image[0:2000,0:10000] self.set_parameter('DEFAULT') self.mask = get_mask(self.Image,selemMask =self.get_parameter('selemMask'),low_res=self.get_parameter('low_res')) self.labels,self.Seg=get_label(self.Image,seg=True,min_size_th=self.get_parameter('min_size_th')\ ,max_size_th=self.get_parameter('max_size_th')\ ,radius=self.get_parameter('radius')\ ,p0=self.get_parameter('p0')\ ,iter=self.get_parameter('iter')\ ,selemSeg=self.get_parameter('selemSeg')) self.center,self.tree=get_tree_center(self.mask*self.Seg,self.mask*self.labels) self.cellsRows = list() self.selected = npy.zeros((self.center.shape[0],1)) self.compute_Row() ''' logging.info('Finished')
def get_kurt_var(self): """ Get kurtosis*variance of distribution with error from delta theorem :return: Kurtosis*variance as a Measure """ self.calc_cent_moments(1, 8) m = self.m if self.cent_moments[2] == 0: val = float('nan') err = float('nan') else: val = self.cent_moments[4] / self.cent_moments[ 2] - 3 * self.cent_moments[2] err = -9 + 6 * m[4] ** 2 + m[4] ** 3 + 8 * m[3] ** 2 * (5 + m[4]) - 8 * m[3] * m[5] + m[4] * (9 - 2 * m[6]) \ - 6 * m[6] + m[8] if err >= 0: err = (err * self.cent_moments[2]**2 / self.total_counts)**0.5 else: err = float('nan') return Measure(val, err)
def get_k_stat(self, order): """ Calculate k statistic of distribution for given order from central moments !!!! ERRORS ESTIMATED AS SAME AS CUMULANTS VIA DELTA THEOREM !!!!! :param order: Order of k statistic to calculate :return: K statistic value and error as Measure object """ self.calc_cent_moments(1, 2 * order) # Maybe try to find analytical formula later n = self.total_counts cm = self.cent_moments if n < order: # Will get /0 errors print(f'Too few entries ({n}) for {order} order k-statistic!') return Measure(float('NaN'), float('NaN')) if order == 1: val = self.raw_moments[1] err = (cm[2] / n)**0.5 elif order == 2: val = n / (n - 1) * cm[2] err = pow((cm[4] - pow(cm[2], 2)) / n, 0.5) elif order == 3: val = n**2 / ((n - 1) * (n - 2)) * cm[3] err = ((cm[6] - cm[3]**2 + 9 * cm[2]**3 - 6 * cm[2] * cm[4]) / n)**0.5 elif order == 4: val = n**2 * ((n + 1) * cm[4] - 3 * (n - 1) * cm[2]**2) / ((n - 1) * (n - 2) * (n - 3)) err = pow((cm[8] - 12 * cm[6] * cm[2] - 8 * cm[5] * cm[3] - pow(cm[4], 2) + 48 * cm[4] * pow(cm[2], 2) + 64 * pow(cm[3], 2) * cm[2] - 36 * pow(cm[2], 4)) / n, 0.5) elif order == 5: val = n**3 * ((n + 5) * cm[5] - 10 * (n - 1) * cm[2] * cm[3]) / ((n - 1) * (n - 2) * (n - 3) * (n - 4)) err = pow( (cm[10] - pow(cm[5], 2) - 10 * cm[4] * cm[6] + 900 * pow(cm[2], 5) - 20 * cm[3] * cm[7] - 20 * cm[8] * cm[2] + 125 * cm[2] * pow(cm[4], 2) + 200 * cm[4] * pow(cm[3], 2) - 1000 * pow(cm[3] * cm[2], 2) + 160 * cm[6] * pow(cm[2], 2) - 900 * cm[4] * pow(cm[2], 3) + 240 * cm[2] * cm[3] * cm[5]) / n, 0.5) elif order == 6: val = n ** 2 * ((n + 1) * (n ** 2 + 15 * n - 4) * cm[6] - 15 * (n - 1) ** 2 * (n + 4) * cm[2] * cm[4] - 10 * (n - 1) * (n ** 2 - n + 4) * cm[3] ** 2 + 30 * n * (n - 1) * (n - 2) * cm[2] ** 3) \ / ((n - 1) * (n - 2) * (n - 3) * (n - 4) * (n - 5)) err = pow( (-30 * cm[4] * cm[8] + 510 * cm[4] * cm[2] * cm[6] + 1020 * cm[4] * cm[3] * cm[5] + 405 * cm[8] * pow(cm[2], 2) - 2880 * cm[6] * pow(cm[2], 3) - 9720 * cm[3] * cm[5] * pow(cm[2], 2) - 30 * cm[2] * cm[10] + 840 * cm[2] * cm[3] * cm[7] + 216 * cm[2] * pow(cm[5], 2) - 40 * cm[3] * cm[9] + 440 * cm[6] * pow(cm[3], 2) - 3600 * pow(cm[2] * cm[4], 2) - 9600 * cm[2] * cm[4] * pow(cm[3], 2) + 13500 * cm[4] * pow(cm[2], 4) + 39600 * pow(cm[2], 3) * pow(cm[3], 2) + cm[12] - pow(cm[6], 2) - 12 * cm[5] * cm[7] + 225 * pow(cm[4], 3) - 8100 * pow(cm[2], 6) - 400 * pow(cm[3], 4)) / n, 0.5) else: print( f'{order}th order k-statistic not implemented, returning nan!') val = float('nan') err = float('nan') return Measure(val, err)
class Bt: def __init__(self, config, eventSched, httpRequester, ownAddrFunc, peerId, persister, pInMeasure, pOutMeasure, peerPool, connBuilder, connListener, connHandler, choker, torrent, torrentIdent, torrentDataPath, version): ##global stuff self.config = config self.version = version self.peerPool = peerPool self.connBuilder = connBuilder self.connListener = connListener self.connHandler = connHandler self.choker = choker ##own stuff self.log = Logger('Bt', '%-6s - ', torrentIdent) self.torrent = torrent self.torrentIdent = torrentIdent self.log.debug("Creating object persister") self.btPersister = BtObjectPersister(persister, torrentIdent) self.log.debug("Creating measure classes") self.inRate = Measure(eventSched, 60, [pInMeasure]) self.outRate = Measure(eventSched, 60, [pOutMeasure]) self.inRate.stop() self.outRate.stop() self.log.debug("Creating storage class") self.storage = Storage(self.config, self.btPersister, torrentIdent, self.torrent, torrentDataPath) self.log.debug("Creating global status class") self.pieceStatus = PieceStatus(self.torrent.getTotalAmountOfPieces()) self.log.debug("Creating file priority class") self.filePrio = FilePriority(self.btPersister, self.version, self.pieceStatus, self.storage.getStatus(), self.torrent, torrentIdent) self.log.debug("Creating requester class") self.requester = Requester(self.config, self.torrentIdent, self.pieceStatus, self.storage, self.torrent) self.log.debug("Creating tracker requester class") self.trackerRequester = TrackerRequester(self.config, self.btPersister, eventSched, peerId, self.peerPool, ownAddrFunc, httpRequester, self.inRate, self.outRate, self.storage, self.torrent, self.torrentIdent, self.version) self.log.debug("Creating superseeding handler class") self.superSeedingHandler = SuperSeedingHandler(self.torrentIdent, self.btPersister, self.storage.getStatus(), self.pieceStatus) ##callbacks self.log.debug("Adding callbacks") self._addCallbacks() ##status self.state = 'stopped' self.started = False self.paused = True ##lock self.lock = threading.Lock() ##internal functions - callbacks def _addCallbacks(self): ownStatus = self.storage.getStatus() self.persistentStatusCallback = self.config.addCallback((('storage', 'persistPieceStatus'),), ownStatus.enablePersisting) def _removeCallbacks(self): self.config.removeCallback(self.persistentStatusCallback) ##internal functions - start/pause/stop - common def _halt(self, targetState): if self.paused and targetState in ('shutdown', 'remove'): #stopping and already paused, only need to stop the tracker requester and the callbacks self.log.debug("Removing callbacks") self._removeCallbacks() self.log.debug("Stopping tracker requester") self.trackerRequester.stop() else: #either stopping, removing or shutdown and still running or loading self.log.debug("Aborting storage loading just in case") self.storage.abortLoad() if self.started: #were already running self.started = False if targetState == 'stop': self.log.debug("Pausing tracker requester") self.trackerRequester.pause() else: self.log.debug("Removing callbacks") self._removeCallbacks() self.log.debug("Stopping tracker requester") self.trackerRequester.stop() self.log.debug("Removing us from choker") self.choker.removeTorrent(self.torrentIdent) self.log.debug("Removing us from connection builder") self.connBuilder.removeTorrent(self.torrentIdent) self.log.debug("Removing us from connection listener") self.connListener.removeTorrent(self.torrent.getTorrentHash()) self.log.debug("Removing us from connection handler") self.connHandler.removeTorrent(self.torrentIdent) self.log.debug("Stopping transfer measurement") self.inRate.stop() self.outRate.stop() #shutdown/removal specific tasks which need to be done regardless of current status if targetState in ('shutdown', 'remove'): self.log.debug("Removing all infos related to us from connection pool") self.peerPool.clear(self.torrentIdent) if targetState == 'remove': self.log.debug('Removing all persisted objects of this torrent') self.btPersister.removeAll() ##internal functions - start/pause/stop - specific def _start(self, loadSuccess): try: if loadSuccess: #loading was successful, add to handlers self.log.debug("Reseting requester") self.requester.reset() self.log.debug("Starting transfer measurement") self.inRate.start() self.outRate.start() self.log.debug("Adding us to connection handler") self.connHandler.addTorrent(self.torrentIdent, self.torrent, self.pieceStatus, self.inRate, self.outRate, self.storage, self.filePrio, self.requester, self.superSeedingHandler) self.log.debug("Adding us to connection listener") self.connListener.addTorrent(self.torrentIdent, self.torrent.getTorrentHash()) self.log.debug("Adding us to connection builder") self.connBuilder.addTorrent(self.torrentIdent, self.torrent.getTorrentHash()) self.log.debug("Adding us to choker") self.choker.addTorrent(self.torrentIdent, self.storage.getStatus(), self.superSeedingHandler) self.log.debug("Starting tracker requester") self.trackerRequester.start() self.started = True self.state = 'running' except: #something failed - hard self.log.error("Error in load function:\n%s", logTraceback()) ##external functions - state def start(self): #called when torrent is started self.lock.acquire() if self.paused: self.paused = False if self.storage.isLoaded(): self.log.debug("Storage already loaded, skipping hashing") self._start(True) else: self.storage.load(self._start) self.state = 'loading' self.lock.release() def stop(self): #called when torrent is stopped self.lock.acquire() if not self.paused: self._halt('stop') self.paused = True self.state = 'stopped' self.lock.release() def shutdown(self): #called on shutdown self.lock.acquire() self._halt('shutdown') self.paused = False self.state = 'stopped' self.lock.release() def remove(self): #called when torrent is removed self.lock.acquire() self._halt('remove') self.paused = False self.state = 'stopped' self.lock.release() ##external functions - stats def getStats(self, wantedStats): self.lock.acquire() stats = {} if wantedStats.get('state', False): stats['state'] = self.state #connections if wantedStats.get('connections', False): stats.update(self.connHandler.getStats(self.torrentIdent, connDetails=True)) #files if wantedStats.get('files', False): stats['files'] = self.filePrio.getStats() #peers if wantedStats.get('peers', False) or wantedStats.get('connectionAverages', False): #get peer stats connAverages = wantedStats.get('connectionAverages', False) stats.update(self.peerPool.getStats(self.torrentIdent)) stats.update(self.connHandler.getStats(self.torrentIdent, connSummary=True, connAverages=connAverages)) stats.update(self.trackerRequester.getStats(trackerSummary=True)) #normalise peer stats if stats['connectedLeeches'] > stats['knownLeeches']: stats['knownLeeches'] = stats['connectedLeeches'] if stats['connectedSeeds'] > stats['knownSeeds']: stats['knownSeeds'] = stats['connectedSeeds'] if stats['knownLeeches'] + stats['knownSeeds'] > stats['knownPeers']: stats['knownPeers'] = stats['knownLeeches'] + stats['knownSeeds'] elif stats['knownLeeches'] + stats['knownSeeds'] < stats['knownPeers']: stats['knownLeeches'] += stats['knownPeers'] - stats['knownSeeds'] #generate additional conn stats if necessary if connAverages: if stats['knownSeeds'] == 0: stats['knownLeechesPerSeed'] = 0 else: stats['knownLeechesPerSeed'] = (stats['knownLeeches'] * 1.0) / stats['knownSeeds'] #pieces if wantedStats.get('pieceAverages', False): stats.update(self.pieceStatus.getStats(pieceAverages=True)) #progress stats if wantedStats.get('progress', False): stats.update(self.storage.getStats()) #requests if wantedStats.get('requests', False) or wantedStats.get('pieceAverages', False): reqDetails = wantedStats.get('requests', False) pieceAverages = wantedStats.get('pieceAverages', False) stats.update(self.connHandler.getRequesterStats(self.torrentIdent, requestDetails=reqDetails, pieceAverages=pieceAverages)) #tracker if wantedStats.get('tracker', False): stats.update(self.trackerRequester.getStats(trackerDetails=True)) if wantedStats.get('trackerStatus', False): stats.update(self.trackerRequester.getStats(trackerStatus=True)) #transfer stats if wantedStats.get('transfer', False): stats['inRawBytes'] = self.inRate.getTotalTransferedBytes() stats['outRawBytes'] = self.outRate.getTotalTransferedBytes() stats['inPayloadBytes'] = self.inRate.getTotalTransferedPayloadBytes() stats['outPayloadBytes'] = self.outRate.getTotalTransferedPayloadBytes() stats['inRawSpeed'] = self.inRate.getCurrentRate() stats['outRawSpeed'] = self.outRate.getCurrentRate() stats['protocolOverhead'] = (100.0 * (stats['inRawBytes'] + stats['outRawBytes'] - stats['inPayloadBytes'] - stats['outPayloadBytes'])) / max(stats['inPayloadBytes'] + stats['outPayloadBytes'], 1.0) if wantedStats.get('transferAverages', False): stats['avgInRawSpeed'] = self.inRate.getAverageRate() * 1024 stats['avgOutRawSpeed'] = self.outRate.getAverageRate() * 1024 stats['avgInPayloadSpeed'] = self.inRate.getAveragePayloadRate() * 1024 stats['avgOutPayloadSpeed'] = self.outRate.getAveragePayloadRate() * 1024 #torrent stats if wantedStats.get('torrent', False): stats.update(self.torrent.getStats()) stats['superSeeding'] = self.superSeedingHandler.isEnabled() self.lock.release() return stats ##external funcs - actions def setFilePriority(self, fileIds, priority): self.lock.acquire() for fileId in fileIds: self.filePrio.setFilePriority(fileId, priority) self.lock.release() def setFileWantedFlag(self, fileIds, wanted): self.lock.acquire() if self.started: #already running, need to go through the connection handler because of syncing issues self.connHandler.setFileWantedFlag(self.torrentIdent, fileIds, wanted) else: #not running for fileId in fileIds: self.filePrio.setFileWantedFlag(fileId, wanted) self.lock.release() def setSuperSeeding(self, enabled): self.lock.acquire() if not enabled == self.superSeedingHandler.isEnabled(): if self.started: self.connHandler.setSuperSeeding(self.torrentIdent, enabled) else: self.superSeedingHandler.setEnabled(enabled) self.lock.release() ##external funcs - tracker actions def getTrackerInfo(self): self.lock.acquire() trackerInfo = self.trackerRequester.getTrackerInfo() self.lock.release() return trackerInfo def setTrackerInfo(self, newTrackerInfo): self.lock.acquire() self.trackerRequester.setTrackerInfo(newTrackerInfo) self.lock.release() ##external funcs - other def getInfohash(self): self.lock.acquire() infohash = self.torrent.getTorrentHash() self.lock.release() return infohash
def __init__( self, connStatus, scheduler, conn, direction, remotePeerAddr, inMeasure, outMeasure, inMeasureParent, outMeasureParent, outLimiter, inLimiter, msgLenFunc, msgDecodeFunc, msgLengthFieldLen, maxMsgLength, keepaliveMsgFunc, log, ): self.sched = scheduler # connection self.conn = conn self.connIdent = self.conn.fileno() self.connectTime = time() self.direction = direction self.inMsgCount = 0 self.closed = False # peer self.remotePeerAddr = remotePeerAddr # conn status self.connStatus = connStatus self.connStatus.addConn(self.connIdent) # limiter self.outLimiter = outLimiter self.outLimiter.addUser(self.connIdent, callback=self.connStatus.allowedToSend, callbackArgs=[self.connIdent]) self.inLimiter = inLimiter self.inLimiter.addUser(self.connIdent, callback=self.connStatus.allowedToRecv, callbackArgs=[self.connIdent]) # rate if inMeasure is None: self.inRate = Measure(self.sched, 60, [inMeasureParent]) else: self.inRate = inMeasure self.inRate.start() if outMeasure is None: self.outRate = Measure(self.sched, 60, [outMeasureParent]) else: self.outRate = outMeasure self.outRate.start() # data buffer self.inBuffer = [] self.outBufferQueue = deque() self.outBufferMessages = {} self.outBufferMessageId = 0 # messages self.msgLenFunc = msgLenFunc self.msgDecodeFunc = msgDecodeFunc self.msgLengthFieldLen = msgLengthFieldLen self.maxMsgLength = maxMsgLength # log self.log = log # lock self.lock = threading.RLock() # events self.sendTimeoutEvent = self.sched.scheduleEvent(self.timeout, timedelta=300, funcArgs=["send timed out"]) self.recvTimeoutEvent = self.sched.scheduleEvent(self.timeout, timedelta=300, funcArgs=["read timed out"]) self.keepaliveEvent = self.sched.scheduleEvent( self.send, timedelta=100, funcArgs=[keepaliveMsgFunc()], repeatdelta=100 )
class Connection: def __init__( self, connStatus, scheduler, conn, direction, remotePeerAddr, inMeasure, outMeasure, inMeasureParent, outMeasureParent, outLimiter, inLimiter, msgLenFunc, msgDecodeFunc, msgLengthFieldLen, maxMsgLength, keepaliveMsgFunc, log, ): self.sched = scheduler # connection self.conn = conn self.connIdent = self.conn.fileno() self.connectTime = time() self.direction = direction self.inMsgCount = 0 self.closed = False # peer self.remotePeerAddr = remotePeerAddr # conn status self.connStatus = connStatus self.connStatus.addConn(self.connIdent) # limiter self.outLimiter = outLimiter self.outLimiter.addUser(self.connIdent, callback=self.connStatus.allowedToSend, callbackArgs=[self.connIdent]) self.inLimiter = inLimiter self.inLimiter.addUser(self.connIdent, callback=self.connStatus.allowedToRecv, callbackArgs=[self.connIdent]) # rate if inMeasure is None: self.inRate = Measure(self.sched, 60, [inMeasureParent]) else: self.inRate = inMeasure self.inRate.start() if outMeasure is None: self.outRate = Measure(self.sched, 60, [outMeasureParent]) else: self.outRate = outMeasure self.outRate.start() # data buffer self.inBuffer = [] self.outBufferQueue = deque() self.outBufferMessages = {} self.outBufferMessageId = 0 # messages self.msgLenFunc = msgLenFunc self.msgDecodeFunc = msgDecodeFunc self.msgLengthFieldLen = msgLengthFieldLen self.maxMsgLength = maxMsgLength # log self.log = log # lock self.lock = threading.RLock() # events self.sendTimeoutEvent = self.sched.scheduleEvent(self.timeout, timedelta=300, funcArgs=["send timed out"]) self.recvTimeoutEvent = self.sched.scheduleEvent(self.timeout, timedelta=300, funcArgs=["read timed out"]) self.keepaliveEvent = self.sched.scheduleEvent( self.send, timedelta=100, funcArgs=[keepaliveMsgFunc()], repeatdelta=100 ) ##internal functions - socket def _recv(self): # really recv data - or at least try to msgs = [] self.sched.rescheduleEvent(self.recvTimeoutEvent, timedelta=300) wantedBytes = self.conn.getUsedInBufferSpace() allowedBytes = self.inLimiter.claimUnits(self.connIdent, wantedBytes) if not allowedBytes == 0: # may receive something, recv data data = self.conn.recv(allowedBytes) self.inRate.updateRate(len(data)) self.inBuffer.append(data) data = "".join(self.inBuffer) # process data msgLen = self.msgLenFunc(data) while msgLen is not None: msgLen += self.msgLengthFieldLen # because of the length field if msgLen > self.maxMsgLength: # way too large self._fail("message from peer exceeds size limit (%i bytes)" % (self.maxMsgLength,)) msgLen = None elif len(data) < msgLen: # incomplete message msgLen = None else: # finished a message msg = self.msgDecodeFunc(data[:msgLen]) self._gotMessage(msg) msgs.append((self.inMsgCount, msg)) self.inMsgCount += 1 data = data[msgLen:] msgLen = self.msgLenFunc(data) if data == "": # all processed self.inBuffer = [] else: # still data left self.inBuffer = [data] return msgs def _gotMessage(self, msg): pass def _send(self): # really send the buffered data - or at least try to self.sched.rescheduleEvent(self.sendTimeoutEvent, timedelta=300) self.sched.rescheduleEvent(self.keepaliveEvent, timedelta=100) while len(self.outBufferQueue) > 0: messageId = self.outBufferQueue[0] if not messageId in self.outBufferMessages: # message send got aborted self.outBufferQueue.popleft() if len(self.outBufferQueue) == 0: # nothing to send anymore, notify self.connStatus.wantsToSend(False, self.connIdent) else: # something to send message = self.outBufferMessages[messageId] messageLen = len(message[1]) wantedBytes = min(messageLen, self.conn.getFreeOutBufferSpace()) allowedBytes = self.outLimiter.claimUnits(self.connIdent, wantedBytes) if allowedBytes == 0: # may not even send a single byte ... break else: # at least something may be send sendBytes = self.conn.send(message[1][:allowedBytes]) self.outRate.updateRate(sendBytes) message[2] = True if sendBytes < messageLen: # but not all was send message[1] = message[1][sendBytes:] break else: # all was send self.outBufferQueue.popleft() del self.outBufferMessages[messageId] if len(self.outBufferQueue) == 0: # nothing to send anymore, notify self.connStatus.wantsToSend(False, self.connIdent) if message[0] is not None: # execute message[0][0](*message[0][1], **message[0][2]) def _queueSend(self, data, sendFinishedFunc=None, sendFinishedArgs=[], sendFinishedKws={}): if len(self.outBufferQueue) == 0: # first queued item, notify about send interest self.connStatus.wantsToSend(True, self.connIdent) messageId = self.outBufferMessageId if sendFinishedFunc is None: sendFinishedHandle = None else: sendFinishedHandle = (sendFinishedFunc, sendFinishedArgs, sendFinishedKws) self.outBufferMessageId += 1 self.outBufferQueue.append(messageId) self.outBufferMessages[messageId] = [sendFinishedHandle, data, False] return messageId def _abortSend(self, messageId): aborted = False message = self.outBufferMessages.get(messageId, None) if message is not None: # still queued if not message[2]: # send did not start up to now aborted = True del self.outBufferMessages[messageId] return aborted def _fail(self, reason=""): # cause the conn to fail self.log.info("Conn failed: %s", reason) self.conn.close(force=True) def _close(self): # set flag self.closed = True # close conn, update conn state self.conn.close(force=True) self.connStatus.removeConn(self.connIdent) # stop rate measurement self.inRate.stop() self.outRate.stop() # remove conn from limiter self.outLimiter.removeUser(self.connIdent) self.inLimiter.removeUser(self.connIdent) # clear buffers self.inBuffer = [] self.outBufferQueue.clear() self.outBufferMessages.clear() self.outBufferMessageId = 0 # remove events self.sched.removeEvent(self.sendTimeoutEvent) self.sched.removeEvent(self.recvTimeoutEvent) self.sched.removeEvent(self.keepaliveEvent) ##internal functions - other def _getPayloadRatio(self): inPayload = self.inRate.getTotalTransferedPayloadBytes() outPayload = self.outRate.getTotalTransferedPayloadBytes() if inPayload == 0 and outPayload == 0: ratio = 1.0 elif inPayload == 0 and outPayload != 0: ratio = 1.0 / outPayload elif inPayload != 0 and outPayload == 0: ratio = inPayload / 1.0 else: ratio = inPayload / (outPayload * 1.0) return ratio ##external functions - socket def recv(self): self.lock.acquire() msgs = [] if not self.closed: msgs = self._recv() self.lock.release() return msgs def send(self, data): self.lock.acquire() if not self.closed: self._queueSend(data) self.lock.release() def sendEvent(self): self.lock.acquire() if not self.closed: self._send() self.lock.release() def fileno(self): self.lock.acquire() value = self.connIdent self.lock.release() return value def timeout(self, reason): self.lock.acquire() if not self.closed: self._fail(reason) self.lock.release() def close(self): self.lock.acquire() if not self.closed: self._close() self.lock.release() ##external functions - get info def getInRate(self): self.lock.acquire() rate = self.inRate self.lock.release() return rate def getOutRate(self): self.lock.acquire() rate = self.outRate self.lock.release() return rate def getPayloadRatio(self): self.lock.acquire() ratio = self._getPayloadRatio() self.lock.release() return ratio def getRemotePeerAddr(self): self.lock.acquire() value = self.remotePeerAddr self.lock.release() return value def getShortRemotePeerAddr(self): self.lock.acquire() value = self.remotePeerAddr[:10] self.lock.release() return value
def new_meas(self): meas = Measure( angles=self.angles, dists=np.ones(self.M)*self.max_range, idxs=np.zeros(self.M, dtype=int)+-9999999) return meas
def __init__(self, period, simulationTime, units, maintainLastValue=False): Measure.__init__(self, period, simulationTime, units) self.periodicAvgValues = PeriodicAvgValues(period, simulationTime, maintainLastValue) self.__units = units
def main(): conf_path = sys.argv[1] conf = conf_path.split('/')[-1].replace('.yml', '') model, opt = load_model(conf_path) lr_dir = opt['dataroot_LR'] hr_dir = opt['dataroot_GT'] lr_paths = fiFindByWildcard(os.path.join(lr_dir, '*.png')) hr_paths = fiFindByWildcard(os.path.join(hr_dir, '*.png')) this_dir = os.path.dirname(os.path.realpath(__file__)) test_dir = os.path.join(this_dir, '..', 'results', conf) print(f"Out dir: {test_dir}") measure = Measure(use_gpu=False) fname = f'measure_full.csv' fname_tmp = fname + "_" path_out_measures = os.path.join(test_dir, fname_tmp) path_out_measures_final = os.path.join(test_dir, fname) if os.path.isfile(path_out_measures_final): df = pd.read_csv(path_out_measures_final) elif os.path.isfile(path_out_measures): df = pd.read_csv(path_out_measures) else: df = None scale = opt['scale'] pad_factor = 2 for lr_path, hr_path, idx_test in zip(lr_paths, hr_paths, range(len(lr_paths))): lr = imread(lr_path) hr = imread(hr_path) # Pad image to be % 2 h, w, c = lr.shape lq_orig = lr.copy() lr = impad(lr, bottom=int(np.ceil(h / pad_factor) * pad_factor - h), right=int(np.ceil(w / pad_factor) * pad_factor - w)) lr_t = t(lr) heat = opt['heat'] if df is not None and len(df[(df['heat'] == heat) & (df['name'] == idx_test)]) == 1: continue sr_t = model.get_sr(lq=lr_t, heat=heat) sr = rgb(torch.clamp(sr_t, 0, 1)) sr = sr[:h * scale, :w * scale] path_out_sr = os.path.join(test_dir, "{:0.2f}".format(heat).replace('.', ''), "{:06d}.png".format(idx_test)) imwrite(path_out_sr, sr) meas = OrderedDict(conf=conf, heat=heat, name=idx_test) meas['PSNR'], meas['SSIM'], meas['LPIPS'] = measure.measure(sr, hr) lr_reconstruct_rgb = imresize(sr, 1 / opt['scale']) meas['LRC PSNR'] = psnr(lq_orig, lr_reconstruct_rgb) str_out = format_measurements(meas) print(str_out) df = pd.DataFrame([meas]) if df is None else pd.concat([pd.DataFrame([meas]), df]) df.to_csv(path_out_measures + "_", index=False) os.rename(path_out_measures + "_", path_out_measures) df.to_csv(path_out_measures, index=False) os.rename(path_out_measures, path_out_measures_final) str_out = format_measurements(df.mean()) print(f"Results in: {path_out_measures_final}") print('Mean: ' + str_out)
def main(): conf_path = sys.argv[1] conf = conf_path.split('/')[-1].replace('.yml', '') model, opt = load_model(conf_path) lr_dir = opt['dataroot_LR'] # hr_dir = opt['dataroot_GT'] lr_paths = fiFindByWildcard(os.path.join(lr_dir, '*.png')) # hr_paths = fiFindByWildcard(os.path.join(hr_dir, '*.png')) # this_dir = os.path.dirname(os.path.realpath(__file__)) # test_dir = os.path.join(this_dir, '..', 'results', conf) this_dir = opt['path']['root'] test_dir = os.path.join(this_dir, 'results', conf) print(f"Out dir: {test_dir}") measure = Measure(use_gpu=False) fname = f'measure_full.csv' fname_tmp = fname + "_" path_out_measures = os.path.join(test_dir, fname_tmp) path_out_measures_final = os.path.join(test_dir, fname) if os.path.isfile(path_out_measures_final): df = pd.read_csv(path_out_measures_final) elif os.path.isfile(path_out_measures): df = pd.read_csv(path_out_measures) else: df = None scale = opt['scale'] pad_factor = 2 pbar = tqdm(total=len(lr_paths)*opt['n_sample']) print(f"Total {len(lr_paths)} input images, each generates {opt['n_sample']} output images.") for lr_path, idx_test in zip(lr_paths, range(len(lr_paths))): # for lr_path, hr_path, idx_test in zip(lr_paths, hr_paths, range(len(lr_paths))): lr = imread(lr_path) # hr = imread(hr_path) # Pad image to be % 2 h, w, c = lr.shape lq_orig = lr.copy() lr = impad(lr, bottom=int(np.ceil(h / pad_factor) * pad_factor - h), right=int(np.ceil(w / pad_factor) * pad_factor - w)) lr_t = t(lr) heat = opt['heat'] if df is not None and len(df[(df['heat'] == heat) & (df['name'] == idx_test)]) == 1: continue for r in range(opt['n_sample']): sr_t = model.get_sr(lq=lr_t, heat=heat) sr = rgb(torch.clamp(sr_t, 0, 1)) sr = sr[:h * scale, :w * scale] path_out_sr = os.path.join(test_dir, "{:0.2f}".format(heat).replace('.', ''), "{:06d}_sample{:05d}.png".format(idx_test, r)) imwrite(path_out_sr, sr) pbar.update(1)
def main(): conf_path = sys.argv[1] conf = conf_path.split('/')[-1].replace('.yml', '') model, opt = load_model(conf_path) lr_dir = opt['dataroot_LR'] hr_dir = opt['dataroot_GT'] lr_paths = fiFindByWildcard(os.path.join(lr_dir, '*.png')) hr_paths = fiFindByWildcard(os.path.join(hr_dir, '*.png')) # this_dir = os.path.dirname(os.path.realpath(__file__)) # test_dir = os.path.join(this_dir, '..', 'results', conf) test_dir = opt['path']['results_root'] + '_' + conf if not os.path.exists(test_dir): os.makedirs(test_dir) print(f"Out dir: {test_dir}") measure = Measure(use_gpu=False) fname = f'measure_full.csv' fname_tmp = fname + "_" path_out_measures = os.path.join(test_dir, fname_tmp) path_out_measures_final = os.path.join(test_dir, fname) if os.path.isfile(path_out_measures_final): df = pd.read_csv(path_out_measures_final) elif os.path.isfile(path_out_measures): df = pd.read_csv(path_out_measures) else: df = None scale = opt['scale'] pad_factor = 2 for lr_path, hr_path, idx_test in zip(lr_paths, hr_paths, range(len(lr_paths))): print('>>process %s' % (lr_path)) lr = imread(lr_path) hr = imread(hr_path) print('\tlr: ', lr.shape) print('\thr: ', hr.shape) _, img_name = os.path.split(lr_path) # Pad image to be % 2 h, w, c = lr.shape if h >= 338: print('\tskip ...') continue lq_orig = lr.copy() lr = impad(lr, bottom=int(np.ceil(h / pad_factor) * pad_factor - h), right=int(np.ceil(w / pad_factor) * pad_factor - w)) print('\tlr: ', lr.shape) lr_t = t(lr) print('\tlr_t: ', lr_t.shape, type(lr_t)) # heat = opt['heat'] for heat in np.linspace(0, 1, num=11): # Sample a super-resolution for a low-resolution image print('\theat: ', heat) if df is not None and len( df[(df['heat'] == heat) & (df['name'] == idx_test)]) == 1: continue sr_t = model.get_sr(lq=lr_t, heat=heat) print('\tsr_t: ', sr_t.shape, type(sr_t)) sr = rgb(torch.clamp(sr_t, 0, 1)) sr = sr[:h * scale, :w * scale] print('\tsr: ', sr.shape, type(sr)) save_name = '%s_%s.png' % (img_name.replace('.png', ''), str(heat)) # path_out_sr = os.path.join(test_dir, "{:0.2f}".format(heat).replace('.', ''), save_name) path_out_sr = os.path.join(test_dir, save_name) print('\tsave to %s' % (path_out_sr)) imwrite(path_out_sr, sr) # add compare h, w, c = np.shape(hr) lr_resize = cv2.resize(lr, (w, h)) print('\tlr_resize: ', lr_resize.shape) # TypeError: Expected Ptr<cv::UMat> for argument 'img' sr = np.array(sr).astype('uint8') sr = cv2.cvtColor(np.array(sr), cv2.COLOR_RGB2BGR) print('\tsr: ', sr.shape, type(sr), np.min(sr[:]), np.max(sr[:])) hr_copy = np.array(hr).astype('uint8') hr_copy = cv2.cvtColor(np.array(hr_copy), cv2.COLOR_RGB2BGR) print('\thr: ', sr.shape, type(hr_copy), np.min(hr_copy[:]), np.max(hr_copy[:])) start_x = 100 start_y = 100 font_scale = 5 thickness = 6 #lr = lr_resize[:,:,[2,1,0]] lr_copy = np.array(lr_resize).astype('uint8') lr_copy = cv2.cvtColor(np.array(lr_copy), cv2.COLOR_RGB2BGR) color = (0, 0, 255) cv2.putText(lr_copy, 'lr', (start_x, start_y), cv2.FONT_HERSHEY_SIMPLEX, font_scale, color, thickness) cv2.putText(sr, 'sr', (start_x, start_y), cv2.FONT_HERSHEY_SIMPLEX, font_scale, color, thickness) cv2.putText(hr_copy, 'hr', (start_x, start_y), cv2.FONT_HERSHEY_SIMPLEX, font_scale, color, thickness) save_name = 'compare_%s_%s.png' % (img_name.replace('.png', ''), str(heat)) # path_out_sr_compare = os.path.join(test_dir, "{:0.2f}".format(heat).replace('.', ''), save_name) path_out_sr_compare = os.path.join(test_dir, save_name) compare = np.hstack([lr_copy, sr]) compare = np.hstack([compare, hr_copy]) cv2.imwrite(path_out_sr_compare, compare) print('\tsave to %s' % (path_out_sr_compare)) meas = OrderedDict(conf=conf, heat=heat, name=idx_test) meas['PSNR'], meas['SSIM'], meas['LPIPS'] = measure.measure(sr, hr) lr_reconstruct_rgb = imresize(sr, 1 / opt['scale']) meas['LRC PSNR'] = psnr(lq_orig, lr_reconstruct_rgb) str_out = format_measurements(meas) print(str_out) df = pd.DataFrame([meas]) if df is None else pd.concat( [pd.DataFrame([meas]), df]) df.to_csv(path_out_measures + "_", index=False) os.rename(path_out_measures + "_", path_out_measures) df.to_csv(path_out_measures, index=False) os.rename(path_out_measures, path_out_measures_final) str_out = format_measurements(df.mean()) print(f"Results in: {path_out_measures_final}") print(f'Mean: ' + str_out)
def measure(name, type): from Measure import Measure return Measure(name, type)
def get_mean(xs): return Measure(np.mean(xs), np.std(xs) / np.sqrt(len(xs)))