def main(): snesorDict = {'imu': 'LSM6DS3TR-C'} readObj = ReadData(snesorDict) # outputDataSigma = multiprocessing.Array('f', [0] * len(snesorDict) * 24) outputDataSigma = None magBg = multiprocessing.Array('f', [0] * 6) outputData = multiprocessing.Array('f', [0] * len(snesorDict) * 24) state = multiprocessing.Array('f', [0, 0, 0, 1, 0, 0, 0]) # Wait a second to let the port initialize # readObj.send() # receive data in a new process pRec = Process(target=readObj.receive, args=(outputData, magBg, outputDataSigma)) pRec.daemon = True pRec.start() pTrack3D = multiprocessing.Process(target=track3D, args=(state, )) pTrack3D.daemon = True pTrack3D.start() mp = MahonyPredictor(q=state[3:], Kp=100, Ki=0.01, dt=0.002) while True: # print("a={}, w={}".format(np.round(outputData[:3], 2), np.round(outputData[3:6], 2))) mp.getGyroOffset(outputData[3:6]) mp.IMUupdate(outputData[:3], outputData[3:6]) state[3:] = mp.q time.sleep(0.08)
def runReadData(printBool, maxIter=50): ''' 跑实际的数据来实现定位 :param printBool: 【bool】是否打印输出 :param maxIter: 【int】最大迭代次数 :return: ''' snesorDict = {'imu': 'LSM6DS3TR-C', 'magSensor': 'AK09970d'} readObj = ReadData(snesorDict) # 创建读取数据的对象 outputData = multiprocessing.Array('f', [0] * len(snesorDict) * 24) magBg = multiprocessing.Array('f', [0] * 6) state0 = multiprocessing.Array('f', [0, 0, 0.01, 1, 0, 0, 0]) readObj.send() pRec = Process(target=readObj.receive, args=(outputData, magBg, None)) # pRec.daemon = True pRec.start() time.sleep(2) pTrack3D = multiprocessing.Process(target=track3D, args=(state0, )) pTrack3D.daemon = True pTrack3D.start() while True: measureData = np.concatenate((outputData[:3], outputData[6:9])) LM(state0, measureData, 7, maxIter, printBool) time.sleep(0.1)
def insert(self, fileRoot, project, file, arg): Read = ReadData(fileRoot + arg) DataClass, Data = Read.readCsvInput() Insert = InsertData(self.user, self.host) Insert.dataName = arg[:file] Insert.dataClass = DataClass Insert.data = Data Insert.createTable(project) Insert.insertData(project) return arg, DataClass
def __init__(self, file): """ Intialize: Instaces file, Distance Matrix, and size """ self.file = file self.instance = ReadData(self.file) self.size = self.instance.size self.dis_mat = self.instance.GetDistanceMat() self.time_read = self.instance.time_to_read self.time_algo = 0
def main(): snesorDict = {'imu': 'LSM6DS3TR-C'} readObj = ReadData(snesorDict) outputDataSigma = None magBg = multiprocessing.Array('f', [0] * 6) outputData = multiprocessing.Array('f', [0] * len(snesorDict) * 24) state = multiprocessing.Array('f', [0, 0, 0, 1, 0, 0, 0]) # Wait a second to let the port initialize # readObj.send() # receive data in a new process pRec = Process(target=readObj.receive, args=(outputData, magBg, outputDataSigma)) pRec.daemon = True pRec.start() time.sleep(0.5) pTrack3D = multiprocessing.Process(target=track3D, args=(state, )) pTrack3D.daemon = True pTrack3D.start() i = 0 bw = np.zeros(3) qEKF = QEKF() while True: for j in range(4): # print("w={}".format(np.round(outputData[3+6*j:6*(j+1)], 2))) if i < 100: bw += outputData[3 + 6 * j:6 * (j + 1)] i += 1 if i == 100: bw /= i qEKF.bw = bw print("get gyroscope bias:{}deg/s".format(bw)) else: w = outputData[3 + 6 * j:6 * (j + 1)] wb = w - bw qEKF.F = qEKF.Fx(qEKF.dt, wb) print('time={:.4f}: wb={}, q={}'.format( time.time(), np.round(qEKF.wb, 2), np.round(qEKF.x, 3))) qEKF.predict() qNorm = np.linalg.norm(qEKF.x) qEKF.x = qEKF.x / qNorm state[3:7] = qEKF.x[:] aNorm = np.linalg.norm(outputData[6 * j:6 * j + 3]) qEKF.z = np.array(outputData[6 * j:6 * j + 3]) / aNorm qEKF.update(qEKF.z, HJacobian, Hx, qEKF.R) qNorm = np.linalg.norm(qEKF.x) qEKF.x = qEKF.x / qNorm state[3:7] = qEKF.x[:] time.sleep(0.037)
def initilizeData(self): chosenFeatures = self.chosenFeatures.get() feature1 = int(chosenFeatures[1]) feature2 = int(chosenFeatures[6]) learnRate = float(self.learnRate.get()) epochsNo = int(self.epochsNo.get()) bias = self.bias.get() rd = ReadData() rd.readData() featureX = self.returnFeature(feature1, rd) featureY = self.returnFeature(feature2, rd) return (featureX, featureY)
def main(args): # config batch_size = 10 nb_epoch = 1000 train_file = "dataset/train.csv" pred_file = "dataset/test.csv" colList = [ "LotFrontage", "MSSubClass", "LotArea", "YearRemodAdd", "MasVnrArea", "BsmtFinSF1", "BsmtUnfSF", "TotalBsmtSF", "1stFlrSF", "BedroomAbvGr", "SalePrice" ] # [x1, x2, ..., xn, y] save_dir = "./result/" # read dataset rd = ReadData() train_df = rd.readCSV(train_file) pred_df = rd.readCSV(pred_file) t_data = rd.getCol(train_df, colList) t_data = rd.preprocess(t_data, colList) p_data = rd.getCol(pred_df, colList) p_data = rd.preprocess(p_data, colList) p_data = np.asarray(rd.getCol(pred_df, colList[:-1])) x_data = np.asarray(rd.getCol(t_data, colList[:-1])) y_data = np.asarray(rd.getCol(t_data, [colList[-1]])) # create model md = Model() input_shape = x_data.shape output_shape = 1 md.create_model(input_shape, output_shape) # train """ md.train(x_data, y_data, batch_size, nb_epoch, verbose=1) md.save(save_dir, save_dir) """ # predict md.predict(p_data, save_dir)
def manageTrainingFeatures(self): #initilize X1 & X2 & X3 & X4 & Y rd = ReadData() rd.readData() # Reading first chunk of data self.training_features['X1'] = rd.IrisX1[0:30] self.training_features['X2'] = rd.IrisX2[0:30] self.training_features['X3'] = rd.IrisX3[0:30] self.training_features['X4'] = rd.IrisX4[0:30] self.training_features['Y'] = [1 for i in range(0, 30)] # Reading second chunk of data self.training_features['X1'].extend(rd.IrisX1[50:80]) self.training_features['X2'].extend(rd.IrisX2[50:80]) self.training_features['X3'].extend(rd.IrisX3[50:80]) self.training_features['X4'].extend(rd.IrisX4[50:80]) self.training_features['Y'].extend([2 for i in range(50, 80)]) # Reading third chunk of data self.training_features['X1'].extend(rd.IrisX1[100:130]) self.training_features['X2'].extend(rd.IrisX2[100:130]) self.training_features['X3'].extend(rd.IrisX3[100:130]) self.training_features['X4'].extend(rd.IrisX4[100:130]) self.training_features['Y'].extend([3 for i in range(100, 130)]) self.testing_features['X1'] = rd.IrisX1[30:50] self.testing_features['X2'] = rd.IrisX2[30:50] self.testing_features['X3'] = rd.IrisX3[30:50] self.testing_features['X4'] = rd.IrisX4[30:50] self.testing_features['Y'] = [1 for i in range(30, 50)] # Reading second chunk of data self.testing_features['X1'].extend(rd.IrisX1[80:100]) self.testing_features['X2'].extend(rd.IrisX2[80:100]) self.testing_features['X3'].extend(rd.IrisX3[80:100]) self.testing_features['X4'].extend(rd.IrisX4[80:100]) self.testing_features['Y'].extend([2 for i in range(80, 100)]) # Reading third chunk of data self.testing_features['X1'].extend(rd.IrisX1[130:150]) self.testing_features['X2'].extend(rd.IrisX2[130:150]) self.testing_features['X3'].extend(rd.IrisX3[130:150]) self.testing_features['X4'].extend(rd.IrisX4[130:150]) self.testing_features['Y'].extend([3 for i in range(130, 150)])
from readData import ReadData from imageReg import ImageReg from interact import Interact import SimpleITK as sitk # Change working directory such that it can access data os.chdir("..") # Print current working directory cwd = os.getcwd() print(cwd) # Paths pct_path = ".\\Patients\\HN-CHUM-001\\08-27-1885-TomoTherapy Patient Disease-00441\\112161818-kVCT Image Set-62659\\000000.dcm" dvf_path = "E:\\Mphys\\ElastixReg\\DVF\\HN-CHUM-001\\deformationField.nii" petct_path = ".\\Patients\\HN-CHUM-001\\08-27-1885-PANC. avec C.A. SPHRE ORL tte et cou -TP-74220\\3-StandardFull-07232" struct_path = '.\\Patients\\HN-CHUM-001\\08-27-1885-TomoTherapy Patient Disease-00441\\114120634-TomoTherapy Structure Set-68567\\000000.dcm' ReadData = ReadData() ImageReg = ImageReg() Interact = Interact() def read_dicom(dicom_path): # Function that reads a dicom file and writes to a text file dataset = ReadData.read_dicom(dicom_path) # vector_grid = dataset.DeformableRegistrationSequence[1].DeformableRegistrationGridSequence[0].VectorGridData # vector_grid = np.array(vector_grid).astype(np.float64) # # with open("dvf.raw", "wb") as f: # f.write(vector_grid) ReadData.write_dicom(dataset, "image_test")
#!/usr/bin/env python # coding: utf-8 from readData import ReadData import numpy as np import cv2 import math path = '/home/aviad/Desktop/src/data/Images/odo360nodoor/odo360nodoor_orginal' images = ReadData(path).exportNameImages() print len(images) # prevImg = cv2.imread(path + '/' + images[0], 0) # nextImg = cv2.imread(path + '/' + images[1], 0) prevImg = cv2.imread(images[0], 0) nextImg = cv2.imread(images[1], 0) def createLineIterator(P1, P2, img): imageH = img.shape[0] imageW = img.shape[1] P1X = P1[0] P1Y = P1[1] P2X = P2[0] P2Y = P2[1] # difference and absolute difference between points # used to calculate slope and relative location between points dX = P2X - P1X
plt.plot(progress) plt.ylabel('Distance') plt.xlabel('Generation') plt.subplot(212) plt.plot(progress_min) plt.ylabel('Minimum Distance') plt.xlabel('Generation') plt.show() print("Minimum distance :", min_dist) print("Best route :", bestRoute) #INPUT import sys if len(sys.argv) < 2: print("need inpute file") sys.exit(1) r = ReadData(sys.argv[1]) print(r.size) size = r.size dist_matrix = r.GetDistanceMat() cityList = [] for i in range(size): cityList.append(City(label=i + 1, distance_list=dist_matrix[i])) geneticAlgorithmPlot(population=cityList, popSize=500, eliteSize=100, mutationRate=0.01, generations=100)
'Implied Volatility put JD'] = callJumpDiffusion, putJumpDiffusion, impVolJDCall, impVolJDPut self.df['Call Price SVJD'], self.df['Put Price SVJD'], self.df[ 'Implied Volatility call SVJD'], self.df[ 'Implied Volatility put SVJD'] = callStoVolStoJump, putStoVolStoJump, impVolSVJDCall, impVolSVJDPut self.df.to_csv('M:/Master thesis/Data/splitdata/results/6000.csv') print('Estimation time: ', time.clock() - start_time, "seconds") return self.df if __name__ == "__main__": path = 'M:/Master thesis/Data/splitdata/simulationparameters_1000.csv' header = None index_col = None df = pd.DataFrame(ReadData(path, header, index_col).readFile()) #General rf = 0.012986 iterations = 100000 periods = 200 tick = 0.025 #1/2 of the smalles tick stize on the spx #For Stochastic Volatility longvol = 0.10 gamma = 0.5 kappa = 3.5 rho = -0.7 #For JumpDiffusion lambda_j = 0.5 #Jump frequency
from points import * from readData import ReadData import csv from kmeans import * # read the data first r = ReadData('exercise-1.csv') data = r.read() # print (data) # make points array points = [] for arr in data: p = Point(arr[0], arr[1]) points.append(p) # generate random centroids k = Kmeans(2, points) k.gen_random_centroids() while not k.converge(): k.group_points() k.reassign() for c in k.centroids: x = [] y = [] for p in k.data: if p.get_centroid().get_x() == c.get_x() and p.get_centroid().get_y( ) == c.get_y():
from readData import ReadData from SA2opt import SimAnneal import matplotlib.pyplot as plt import random import numpy as np import sys if len(sys.argv)<2: print("need inpute file") sys.exit(1) filename = sys.argv[1] D = ReadData(filename) if __name__ == "__main__": # coords = [[random.uniform(-1000, 1000), random.uniform(-1000, 1000)] for i in range(100)] sa = SimAnneal(D.GetDistanceMat(),filename)#, stopping_iter=2) sa.anneal() #sa.batch_anneal() #sa.visualize_routes() sa.plot_learning()
def slope(b): return 2 * (b - 4) w1 = numpy.random.randn() w2 = numpy.random.randn() b = numpy.random.randn() o = NN(3,1,w1,w2,b) print(o) print(b) for i in range(10): b = b - .1 * slope(b) print(b) dataset = ReadData() dataset.readData() X1_training = dataset.IrisX1[0:30] X1_training.extend(dataset.IrisX1[50:80]) X1_training.extend(dataset.IrisX1[100:130]) vector = [X1_training[0],X1_training[1]] print(vector) x = [1 for i in range (0,5)] x.extend([0 for i in range(1,5)]) x.extend([2 for i in range(1,5)]) print(x) #pi = PlotIris() #pi.plot(rd.IrisX1, rd.IrisX2, 'X1', 'X2') ''' IrisX1 = [] IrisX2 = []