def test(): left, right = -1, 1 f1 = lambda t : sin(exp(2*t)) # f1 = lambda t : exp(-5*(t-0.5)**2) - exp(-10*(t+0.1)**2) + exp(-t**2) f2 = lambda t : sin(1.0/(t**3)) if t != 0 else 0 f3 = lambda t : (1 if t > 0 else -1) * exp(t**2) main_grid = generate_uniform_grid(left, right, 10000) small_grid = generate_uniform_grid(left, right, 10) large_grid = generate_uniform_grid(left, right, 100) # larger_grid = generate_uniform_grid(left, right, 1000) plt.rc('text', usetex=True) for (f, f_name, filename) in [(f1, r'$\sin(e^{2t})$', 'spline_plot1'), (f2, r'$\sin(t^{-3})$', 'spline_plot2'), (f3, r'$sign(t) e^{t^2}$', 'spline_plot3')]: main_f = tabulate(f, main_grid) small_f = tabulate(interpolate(tabulate(f, small_grid)), main_grid) large_f = tabulate(interpolate(tabulate(f, large_grid)), main_grid) # larger_f = tabulate(interpolate(tabulate(f, larger_grid)), main_grid) plt.title(f_name) plt.plot(main_grid, main_f.values) plt.plot(main_grid, small_f.values) plt.plot(main_grid, large_f.values, 'k') # plt.plot(main_grid, larger_f.values, 'm', label='sign(t)') # plt.legend() plt.savefig('img/' + filename + '.eps', bbox_inches='tight', dpi=300) plt.clf() plt.title(f_name) plt.plot(main_grid, np.array(main_f.values) - np.array(large_f.values)) plt.savefig('img/' + filename + '_error.eps', bbox_inches='tight', dpi=300) plt.clf()
def test2(): tabulated_rho = read_tabulated_function_from_file('txts/tabulated_rho.txt') tabulated_integral = integrate_tabulated(interpolate(tabulated_rho), 0, 1, 1000) integral_interpolation = interpolate(tabulated_integral) save_interpolation_to_file(integral_interpolation, 'txts/U_interpolation.txt')
def lk_tracker(Ig, Jg, Jgdx, Jgdy, tracking_point): tx = tracking_point[0] ty = tracking_point[1] w_size = [100, 100] T = estimate_T.estimate_T(Jgdx, Jgdy, tx, ty, w_size) e = estimate_e.estimate_e(Ig, Jg, Jgdx, Jgdy, tx, ty, w_size) # Find an appropiate value, d d_tot = np.zeros((2, 1)) #print(d_tot) # d = T^-1 * e d = np.linalg.solve(T, e) counter = 0 interpolator_Jg = RectBivariateSpline(np.arange(Jg.shape[0]), np.arange(Jg.shape[1]), Jg) interpolator_Jgdx = RectBivariateSpline(np.arange(Jgdx.shape[0]), np.arange(Jgdx.shape[1]), Jgdx) interpolator_Jgdy = RectBivariateSpline(np.arange(Jgdy.shape[0]), np.arange(Jgdy.shape[1]), Jgdy) while np.linalg.norm(d) > 0.0001 and counter < 100: d_tot = d_tot + d dx = d_tot[0][0] dy = d_tot[1][0] Jg_interpolated = interpolate(interpolator_Jg, Jg, dx, dy) Jgdx_interpolated = interpolate(interpolator_Jgdx, Jgdx, dx, dy) Jgdy_interpolated = interpolate(interpolator_Jgdy, Jgdy, dx, dy) T = estimate_T.estimate_T(Jgdx_interpolated, Jgdy_interpolated, tx, ty, w_size) e = estimate_e.estimate_e(Ig, Jg_interpolated, Jgdx_interpolated, Jgdy_interpolated, tx, ty, w_size) d = np.linalg.solve(T, e) #print("COUNTER", counter) #print("D_TOT", d_tot) counter = counter + 1 return (dx, dy)
def K(t, gamma): dE = deltaE(t, gamma) Q = [ip.interpolate(Td, Qd[i], 4, t) for i in range(len(Qd))] return \ [2 * 2.415 * 10**-3 * (Q[i+1] / Q[i]) * t**(3/2) * math.exp(-(E[i] - dE[i]) * 11603 / t) for i in range(len(E))]
def render_video(): file_name = time.strftime('%y_%m_%d_%H_%M_%S') video = CvVideo(tracer.screen_width, tracer.screen_height, file_name) # render.ray_tracer.c_ positions = [frame.position for frame in frames] up = [frame.up for frame in frames] back = [frame.back for frame in frames] pos = interpolate(positions, range(len(positions)), 30) up = interpolate(up, range(len(positions)), 30) back = interpolate(back, range(len(positions)), 30) for index, (center, up, back) in enumerate(zip(pos, up, back)): print(index + 1) * 1.0 / len(pos) * 100, '%' print center, up, back render.ray_tracer.c_center = center render.ray_tracer.set_up_and_back(up, back) start_rendering(1) video.write(render.ray_tracer.get_current_mat())
def build_instances(filename, master_dir, instance_dir, italic=False): """Write and return UFOs from the instances defined in a .glyphs file.""" from interpolation import interpolate designspace_path = filename.replace('.glyphs', '.designspace') master_ufos, instance_data = load_to_ufos( filename, italic, include_instances=True) return interpolate( master_ufos, master_dir, instance_dir, designspace_path, instance_data)
def _generate_path(self, departure): self.path_segments = self.navigator.generate_path( departure, num_segments=10, novelty=self._get_slider_value("novelty"), extension=self._get_slider_value("extension"), location_preference=self._get_slider_value("location_preference")) self.path = interpolation.interpolate( self.path_segments, resolution=100) self.path_followers = [ PathFollower(self.path, dynamics=dynamics.constant_dynamics()), PathFollower(self.path, dynamics=dynamics.sine_dynamics()), ]
def main(dir_path, output_dir): ''' Run Pipeline of processes on file one by one. ''' files = os.listdir(dir_path) for file_name in files: file_dataframe = pd.read_csv(os.path.join(dir_path, file_name)) cols = ['high', 'open', 'low', 'close', 'volume', 'adj_close'] file_dataframe = interpolate(file_dataframe, cols) file_dataframe = normalize(file_dataframe, cols) file_dataframe.to_csv( os.path.join(output_dir, file_name), encoding='utf-8')
def main(dir_path, output_dir): ''' Run Pipeline of processes on file one by one. ''' files = os.listdir(dir_path) for file_name in files: file_dataframe = pd.read_csv(os.path.join(dir_path, file_name)) cols = ['high', 'open', 'low', 'close', 'volume', 'adj_close'] file_dataframe = interpolate(file_dataframe, cols) file_dataframe = normalize(file_dataframe, cols) file_dataframe.to_csv(os.path.join(output_dir, file_name), encoding='utf-8')
def moveLoop(req): global prev_p frq = 30 rate = rospy.Rate(frq) cur_time = 0 new_p = [req.x, req.y, req.z, req.qx, req.qy, req.qz, req.qw] for i in range(int(frq * req.time)): cur_p = [] for j in range(0, 7): cur_p.append( interpolate(prev_p[j], new_p[j], cur_time, req.time, req.type)) ps = createPoseStamped(cur_p) pose_pub.publish(ps) path_pub.publish(createPath(ps)) cur_time += float(1) / frq rate.sleep() prev_p = new_p
interpolator_Jg = RectBivariateSpline(np.arange(Jg.shape[0]), np.arange(Jg.shape[1]), Jg) interpolator_Jgdx = RectBivariateSpline(np.arange(Jgdx.shape[0]), np.arange(Jgdx.shape[1]), Jgdx) interpolator_Jgdy = RectBivariateSpline(np.arange(Jgdy.shape[0]), np.arange(Jgdy.shape[1]), Jgdy) while np.linalg.norm(d) > 0.0001 and counter < 10: d_tot = d_tot + d dx = d_tot[0][0] dy = d_tot[1][0] Jg_interpolated = interpolate(interpolator_Jg, Jg, dx, dy) Jgdx_interpolated = interpolate(interpolator_Jgdx, Jgdx, dx, dy) Jgdy_interpolated = interpolate(interpolator_Jgdy, Jgdy, dx, dy) T = estimate_T.estimate_T(Jgdx_interpolated, Jgdy_interpolated, 52, 114, w_size) e = estimate_e.estimate_e(Ig, Jg_interpolated, Jgdx_interpolated, Jgdy_interpolated, 52, 114, w_size) d = np.linalg.solve(T, e) d_diff = d_tot - np.asarray(d_true) #print("COUNTER", counter) #print("D_TOT", d_tot)
df2 = pd.read_csv(filenames[1]) #calculate PC_open and PC_close df2['shift_close'] = df2['Close'].shift(-1) df2['shift_open'] = df2['Open'].shift(-1) df2['PC_open'] = (df2['shift_open'] - df2['Open']) / df2['Open'] df2['PC_close'] = (df2['Open'] - df2['Close']) / df2['Close'] #df2['PC_open']=(df2['shift_open']-df2['Open'])/df2['Open'] #df2['PC_close']=(df2['shift_close']-df2['Close'])/df2['Close'] #df2.drop(['shift_open'],1) df2 = df2.set_index('Date') #print(df2.head(1)) df2 = interpolate(df2, list(df2)) df2 = normalize(df2, list(df2)) df3 = df1.join(df2, how="inner") #df3=pd.concat([df1,df2],axis='1',join='inner') #print(df3.head(1),df3.columns,df3.describe()) #print(df3.index.values) #print(df1.shape,df2.shape,df3.shape) #splitter=filenames[0].split('/') #filename=splitter[-1] #print(filename[:-3]) #print(df3[[0]]) #fix first col
array = vtk.vtkDoubleArray() array.SetNumberOfComponents(1) array.SetNumberOfTuples(grid.GetNumberOfPoints()) idx = 0 for i in range(0, len(gradient_map)): for j in range(0, len(gradient_map[i])): array.SetValue(idx, gradient_map[i][j]) idx = idx + 1 grid.GetPointData().AddArray(array) return grid data = np.random.rand(20) injter.interpolate(data, 1000) file_name = 'face.jpg' img = cv2.imread(file_name) img2gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) cv2.imshow('img', img) cv2.waitKey() img_laplace = cv2.Laplacian(img2gray, cv2.CV_64F) cv2.imshow('img', img_laplace) cv2.waitKey() shifted = shift_laplace(img_laplace) cv2.imshow('img', shifted) cv2.waitKey() ##### VTK CRAP
x_vec = [] for i in range(Nx + 1): x_local = (la / 4) * ((1 - m.cos(i * m.pi / (Nx + 1))) + (1 - m.cos( (i + 1) * m.pi / (Nx + 1)))) x_vec.append(x_local) z_vec = [] for i in range(Nz + 1): z_local = -(Ca / 4) * ((1 - m.cos(i * m.pi / (Nz + 1))) + (1 - m.cos( (i + 1) * m.pi / (Nz + 1)))) z_vec.append(z_local) z_vec = z_vec[::-1] ## Creating some new arrays to test the interpolation with abcd = interpolate(x_vec, z_vec, filename) qx = FI.integrate_z1(x_vec, z_vec, abcd, len(x_vec) - 1) qx2 = FI.integrate_z2(x_vec, z_vec, abcd, len(x_vec) - 1, zsc) qsy = FI.integratefinal(x_vec, qx, x_vec[-1], 1) qmz = FI.integratefinal(x_vec, qx, x_vec[-1], 2) qt = FI.integratefinal(x_vec, qx2, x_vec[-1], 1) qvy1 = FI.integratefinal(x_vec, qx, x1, 4) qvy2 = FI.integratefinal(x_vec, qx, x2, 4) qvyII = FI.integratefinal(x_vec, qx, xII, 4) qvy3 = FI.integratefinal(x_vec, qx, x3, 4) qtheta1 = FI.integratefinal(x_vec, qx2, x1, 2) qtheta2 = FI.integratefinal(x_vec, qx2, x2, 2) qthetaII = FI.integratefinal(x_vec, qx2, xII, 2)
def main(Args): ''' Main function for stock price prediction :param args: arguments acquired from command lines(refer to ParseArgs() for list of args) ''' stockFilePath = Args.stockfilepath newsFilePath = Args.newsfilepath nCpuCores = Args.ncpucores testStockFilePath = Args.teststockfilepath testNewsFilePath = Args.testnewsfilepath modelTimeSeries = Args.modeltimeseries modelNews = Args.modelnews Logger.debug( "StockDataPath: {}, NewsDataPath: {}, NCpuCores: {}, TestStockFilePath: {}, TestNewsFilePath: {}" .format(stockFilePath, newsFilePath, nCpuCores, testStockFilePath, testNewsFilePath)) #Time Series Analysis dataFrame = pd.read_csv(stockFilePath, parse_dates=True, index_col="date") testDataFrame = pd.read_csv(testStockFilePath, parse_dates=True, index_col="date") colsToInterpolate = ['open', 'high', 'low', 'close', 'adj_close', 'volume'] #Interpolate dataFrame = interpolate(dataFrame, colsToInterpolate) testDataFrame = interpolate(testDataFrame, colsToInterpolate) #Preprocessing dataFrame = preprocessing(dataFrame) attributes = dataFrame.drop('close', axis=1) target = dataFrame.loc[:, 'close'] testDataFrame = preprocessing(testDataFrame) testAttributes = testDataFrame.drop('close', axis=1) testTarget = testDataFrame.loc[:, 'close'] #Normalization - converting values to comparable range attributes['volume'] /= 100000 testAttributes['volume'] /= 100000 #Predictions and errors of different algorithms for news errorTimeSeries, predictionTimeSeries = predictFromTimeSeries( attributes, target, testAttributes, testTarget, nCpuCores) #News Analysis newsDataFrame = pd.read_csv(newsFilePath, parse_dates=True, index_col='date') testNewsDataFrame = pd.read_csv(testNewsFilePath, parse_dates=True, index_col='date') #Cleaning of textual data newsAttributes, newsTarget = clean(newsDataFrame) newsTestAttributes, newsTestTarget = clean(testNewsDataFrame) #Embeddings newsAttributes, newsTestAttributes = embeddings(newsAttributes, newsTestAttributes) #Predictions and errors of different algorithms for news errorNews, predictionNews = predictFromNews(newsAttributes, newsTarget, newsTestAttributes, newsTestTarget, nCpuCores) combinePredictions(errorTimeSeries, errorNews, predictionTimeSeries, predictionNews, testTarget)
import interpolation RADAR_FILE1_path="/home/Radar/data/RAW/VAN/2010-08-08/201008081800_VAN.PPI1_A.raw" RADAR_FILE2_path="/home/Radar/data/RAW/VAN/2010-08-08/201008081810_VAN.PPI3_A.raw" timestep=11 #images='images/' #morh='morph/' filename='VAN10008081800-1810' output='/home/Radar/Int_Data/' #interpolation.interpolate(RADAR_FILE1_path,RADAR_FILE2_path,timestep,images,morh,filename,0,['DBZ2','HCLASS2','KDP2','VEL2','RHOHV2'],output) interpolation.interpolate(RADAR_FILE1_path,RADAR_FILE2_path,timestep,filename,0,['DBZ2','HCLASS2','KDP2','VEL2','RHOHV2'],output)
def predictDay(timeSeriesDataFrame, newsDataFrame, modelTimeSeries, modelNews): ''' Predicts the stock price of a certain day as per the given inputs :param timeSeriesDataFrame: data frame containing data of the time series inputs of the days for which prediction is desired :param newsDataFrame: data frame containing the news inputs of the days for which prediction is desired :param modelTimeSeries: trained model for prediction from time series if provided :param modelNews: trained model for prediction from news if provided :return: the final predicted value ''' #loading the dictionaries of Mean Absolute Errors with open('..\errors.json', 'r') as errorFile: errors = json.load(errorFile) errorTimeSeries = errors['TimeSeries'] errorNews = errors['News'] with open(str('..\\features.json'), 'r') as featureFile: features = json.load(featureFile) #Selecting best time series model if not given by user if not modelTimeSeries: minKeyTimeSeries = min(errorTimeSeries, key=errorTimeSeries.get) modelName = getModelFromKey(minKeyTimeSeries) timeSeriesModel = joblib.load( str("..\\Models\\" + modelName + "TimeSeries.sav")) timeSeriesError = errorTimeSeries[minKeyTimeSeries] timeSeriesFeatures = features[minKeyTimeSeries] else: timeSeriesModel = joblib.load(modelTimeSeries) key = getKeyFromModel(modelTimeSeries) timeSeriesError = errorTimeSeries[key] timeSeriesFeatures = features[key] #Selecting best model predicting from news if not given by user if not modelNews: minKeyNews = min(errorNews, key=errorNews.get) modelName = getModelFromKey(minKeyNews) newsModel = joblib.load(str("..\\Models\\" + modelName + "News.sav")) newsError = errorNews[minKeyNews] else: newsModel = joblib.load(modelNews) newsError = errorNews[getKeyFromModel(modelNews)] #processing dataframe colsToInterpolate = ['open', 'high', 'low', 'close', 'adj_close', 'volume'] timeSeriesDataFrame = interpolate(timeSeriesDataFrame, colsToInterpolate) timeSeriesDataFrame = preprocessing(timeSeriesDataFrame) timeSeriesDataFrame = timeSeriesDataFrame.drop('close', axis=1) timeSeriesDataFrame['volume'] /= 100000 #prediction of stock price using time series model timeSeriesPrediction = timeSeriesModel.predict( timeSeriesDataFrame[timeSeriesFeatures]) timeSeriesPrediction = pd.Series(timeSeriesPrediction, index=timeSeriesDataFrame.index) #newsDataFrame = newsDataFrame.drop('close',axis=1) #cleaning of textual data newsDataFrame = cleanForPrediction(newsDataFrame) #finding the word embeddings of textual data newsData = embeddings(newsDataFrame) newsData = newsData[0] newsDataFrame = pd.DataFrame(newsData, index=newsDataFrame.index) #prediction of stock price using news model newsPrediction = newsModel.predict(newsDataFrame) newsPrediction = pd.Series(newsPrediction, index=newsDataFrame.index) #combining the predictions value1 = timeSeriesPrediction * newsError value2 = newsPrediction * timeSeriesError value = value1.add(value2) value /= (timeSeriesError + newsError) print(value) return
x.append(x_local) z = [] for i in range(Nz + 1): z_local = (Ca / 4) * ((1 - m.cos(i * m.pi / (Nz + 1))) + (1 - m.cos( (i + 1) * m.pi / (Nz + 1)))) z.append(z_local) ### Creating some new arrays to test the interpolation with x_new = np.linspace(x[0], x[-1], 100) z_new = np.linspace(z[0], z[-1], 100) ### Doing the actual interpolation with Scipy! ### ################################################ ### Call the created interolation function result = interpolate(x_new, z_new)[0][3] ### Plot it... plt.subplot(131) plt.title("Current model") plt.imshow(result, cmap='gnuplot2') ### Check it by making scipy do the exact same kind of interpolation ### ################################################## ### First interpolate in x-direction, as we did result_np = np.zeros((len(x_new), len(z))) for j in range(len(z)): y = d_x[:, j] ### This specific interpolation is also 1d cubic interpolation!! tck = sc.interpolate.splrep(x, y, s=0) y_new = sc.interpolate.splev(x_new, tck, der=0) for i in range(len(y_new)):
altitude = [0,0] velocity = 0 host = None energy = 0 def potential(self,alt): return -constants['G'] * self.host.mass / (alt + self.host.radius) # mission from alt_from to alt_to, attaining a velocity of vel or else orbital # launching from and orbiting round host, with extra energy E def __init__(self, alt_from, alt_to, vel=False, host=None, E=0): self.altitude = [alt_from, alt_to] if host: self.host = host self.velocity = constants['G'] * self.host.mass / (self.altitude[1] + self.host.radius) if vel: self.velocity = vel**2 self.energy = .5*self.velocity self.energy += self.potential(self.altitude[1]) - self.potential(self.altitude[0]) self.energy += E # Defaults and Predefined Data Sun = body(1.9891e30, 6.96342e8) Earth = body(5.972e24, 6373319, 'earth.csv') Moon = body(7.3477e22, 1.7371e6) CD=interpolate([[0,0.3],[0.2,0.25],[0.5,0.2],[0.78,0.25],[0.84,0.30],[1.0,0.38],[1.2,0.5], [1.5,0.56],[2.0,0.52],[2.5,0.45],[3.0,0.43],[3.5,0.42],[4.0,0.42],[1e26,0.42]]) mission.host = Earth LEO = mission(0, 1160000)
plt_cut_y[l][j].append(ncuts[l + 2].tolist()) # Create 'len(plt_cut_y)' figures with cuts plotted in 'ncols' columns cuts.plot_cuts(len(plt_cut_y), ncols, plt_cut_x, plt_cut_y) ############################# COLORMAPS ######################################## if prm.make_cm: print("Plotting colormaps...") # Interpolating and transposing i,j -> j,i z_old = [[None for j in range(len(values_local[0][0]) - 4)] for i in range(num_plots)] for i in range(nrows): for j in range(ncols): # x[ij_to_arg[i,j]], y[ij_to_arg[i,j]], z_old[ij_to_arg[i,j]] = interpolation.interpolate(values_local[ij_to_arg[i,j]]) x[ij_to_arg[j, i]], y[ij_to_arg[j, i]], z_old[ij_to_arg[ j, i]] = interpolation.interpolate(values_local[ij_to_arg[i, j]]) # Reshaping z[numplots][numfigs] -> z[numfigs][numplots] z = [[None for i in range(num_plots)] for j in range(0, len(values_local[0][0]) - 4)] for i in range(nrows): for j in range(ncols): for fg in range(len(values_local[0][0]) - 4): z[fg][ij_to_arg[i, j]] = z_old[ij_to_arg[i, j]][fg] # Getting minimum and maximum for each row zmin, zmax = ([None for i in range(len(values_local[0][0]) - 4)] for _ in range(2)) for k in range(len(values_local[0][0]) - 4): zmin[k] = [
metavar='PATH') parser.add_argument('-plotdir', type=str, default='', help='Folder to hold output plots', metavar='DIRPATH') parser.add_argument('-out', type=str, help='Name of vtk output file') args = parser.parse_args() if args.plotdir and not os.path.isdir(args.plotdir): print('plotdir doesn\'t exist or is not a directory') exit() data = file_loading.load_data_from_file(args.file) ans2 = interpolation.interpolate(data, args.grid, args.p) ans = ans2['values'] x = np.array([i[0] for i in data['points']] + [i[0] for i in data['internal_points']] + [i[0] for i in ans2['points']]) y = np.array([i[1] for i in data['points']] + [i[1] for i in data['internal_points']] + [i[1] for i in ans2['points']]) Sxx = np.array(data['Sxx'] + data['inSxx'] + ans['Sxx']) Syy = np.array(data['Syy'] + data['inSyy'] + ans['Syy']) Sxy = np.array(data['Sxy'] + data['inSxy'] + ans['Sxy']) Szz = np.array(data['Szz'] + data['inSzz'] + ans['Szz']) SE = np.array(data['SE'] + data['inSE'] + ans['SE']) UX = np.array(data['UX'] + data['inUX'] + ans['UX'])
name=str(start)+"-"+str(end) filename=site+"_"+date+'_'+'hours'+str(start)+'-'+str(end)+'_TS_'+str(step) fo=open(filename+".log","w") print "starting interpolation:" print "site: "+site print "timestep: "+str(timesteps) print "name: "+name print "Sweep: "+str(sweep) print "RADAR_FILE1: "+RADAR_FILE1 print "RADAR_FILE2: "+RADAR_FILE2 print "filename: "+filename tmp = interpolation.interpolate(RADAR_FILE1,RADAR_FILE2,timesteps,filename,sweep,['DBZ2','HCLASS2','KDP2','RHOHV2','VEL2','ZDR2'],data_path,fo) if tmp is None: print "Error in interpolation" continue #write log fo.write("site: "+site+"\n") fo.write("timestep: "+str(timesteps)+"\n") fo.write("name: "+name+"\n") fo.write("Sweep: "+str(sweep)+"\n") fo.write("RADAR_FILE1: "+RADAR_FILE1+"\n") fo.write("RADAR_FILE2: "+RADAR_FILE2+"\n") fo.write("filename: "+filename) fo.close()
def _set_particles(self, locations): """ ** Initializes particles location, velocity, and additional fields.** Inputs: - pyticleClass and an array of particle locations """ particles = container() locations = np.atleast_2d(locations) if self.opt.useLL: particles.lon = locations[:, 0] particles.lat = locations[:, 1] x, y = self.grid.proj(particles.lon, particles.lat) particles.x = x particles.y = y else: particles.x = locations[:, 0] particles.y = locations[:, 1] particles.xpt = particles.x particles.ypt = particles.y particles.indomain = self.grid.finder.__call__(particles.x, particles.y) if np.sum(particles.indomain != -1) == 0: print('No particles are initially in the domain.') sys.exit() if '3D' in self.opt.gridDim: particles.z = locations[:,2] particles.zpt = particles.z #Find particle height particles.hpt = interpolate(self, self.grid.h, particles) particles.ept = interpolate(self, self.grid.zeta[self.time.starttime,], \ particles) # If particles are above the water place them in the water particles.zpt = np.min([particles.zpt, particles.ept], axis=0) # If a particles is within cutoff (default - 1cm) of bottom stop movement particles.inwater = (particles.zpt + particles.hpt) > self.opt.cutoff # And if they are at the bottom put them at the bottom not below particles.zpt = np.max([particles.zpt, -particles.hpt], axis=0) # Finally update the sigma position of the particle # for layer interpolation of the velocity particles.sigpt = np.divide(particles.zpt, \ -1*(particles.hpt + particles.ept)) else: # If 2D then particles are always *vertically* in the water column particles.inwater = (particles.x * 0 + 1).astype(bool) particles.time = self.time.time particles.npts = len(particles.x) # Run interp code here to get particle velocities here particles.u = interpolate(self, self.grid.u[self.time.starttime,], particles) particles.v = interpolate(self, self.grid.v[self.time.starttime,], particles) if '3D' in self.opt.gridDim: particles.w = interpolate(self, self.grid.ww[self.time.starttime,], \ particles) particles.loop = 0 return particles
import quandl import math import numpy as np from sklearn import preprocessing, cross_validation, svm from sklearn.linear_model import LinearRegression from sklearn.ensemble import RandomForestClassifier import matplotlib.pyplot as plt from interpolation import interpolate from normalisation import normalize df = quandl.get('WIKI/GOOGL', api_key='pYaKjEHyu4Tje_VTzHu6', start_date='2016-12-22', end_date='2018-03-23') df = interpolate(df, list(df)) df = normalize(df, list(df)) result = df.corr(method='pearson', min_periods=1) print(result) result = df.cov() print(result)
def _interpolate_segment(self, segment): try: return interpolation.interpolate(segment, self._args.interpolation_resolution) except interpolation.InterpolationException as exception: print "WARNING: interpolation failed: %s" % exception return segment
def _set_particles(self, locations): """ ** Initializes particles location, velocity, and additional fields.** Inputs: - pyticleClass and an array of particle locations """ particles = container() locations = np.atleast_2d(locations) if self.opt.useLL: particles.lon = locations[:, 0] particles.lat = locations[:, 1] x, y = self.grid.proj(particles.lon, particles.lat) particles.x = x particles.y = y else: particles.x = locations[:, 0] particles.y = locations[:, 1] particles.xpt = particles.x particles.ypt = particles.y particles.indomain = self.grid.finder.__call__(particles.x, particles.y) if np.sum(particles.indomain != -1) == 0: print('No particles are initially in the domain.') sys.exit() if '3D' in self.opt.gridDim: particles.z = locations[:, 2] particles.zpt = particles.z #Find particle height particles.hpt = interpolate(self, self.grid.h, particles) particles.ept = interpolate(self, self.grid.zeta[self.time.starttime,], \ particles) # If particles are above the water place them in the water particles.zpt = np.min([particles.zpt, particles.ept], axis=0) # If a particles is within cutoff (default - 1cm) of bottom stop movement particles.inwater = (particles.zpt + particles.hpt) > self.opt.cutoff # And if they are at the bottom put them at the bottom not below particles.zpt = np.max([particles.zpt, -particles.hpt], axis=0) # Finally update the sigma position of the particle # for layer interpolation of the velocity particles.sigpt = np.divide(particles.zpt, \ -1*(particles.hpt + particles.ept)) else: # If 2D then particles are always *vertically* in the water column particles.inwater = (particles.x * 0 + 1).astype(bool) particles.time = self.time.time particles.npts = len(particles.x) # Run interp code here to get particle velocities here particles.u = interpolate(self, self.grid.u[self.time.starttime, ], particles) particles.v = interpolate(self, self.grid.v[self.time.starttime, ], particles) if '3D' in self.opt.gridDim: particles.w = interpolate(self, self.grid.ww[self.time.starttime,], \ particles) particles.loop = 0 return particles