def get_ray_out(ray_out_dir): file_titles = os.listdir(ray_out_dir) mode_name = 'mode7' #worker_, file_titles.sort(key = lambda x: int(x.split('worker_')[1][:-4])) raylist = [] fcount = 0 for filename in file_titles: if '.ray' in filename and mode_name in filename: if '_0.ray' in filename: pass else: print(filename) raylist += read_rayfile(os.path.join(ray_out_dir, filename)) fcount+=1 if fcount == 16: break return raylist
def getBdir(ray_start, ray_datenum, rayfile_directory, thetas, phis, md, select_random=False): positions = ray_start directions = [(0, 0, 0)] freqs = [15e3] # going to run a ray real quick single_run_rays(ray_datenum, positions, directions, freqs, rayfile_directory, md, runmodeldump=False) # Load all the rayfiles in the output directory ray_out_dir = rayfile_directory + '/' + dt.datetime.strftime( ray_datenum, '%Y-%m-%d %H:%M:%S') file_titles = os.listdir(ray_out_dir) # create empty lists to fill with ray files and damp files raylist = [] for filename in file_titles: if '.ray' in filename and str(md) in filename and str( 'Main') in filename: print(filename) raylist += read_rayfile(os.path.join(ray_out_dir, filename)) # get b direction for this ray for r in raylist: B0 = [r['B0'].x[0], r['B0'].y[0], r['B0'].z[0]] # vec in T in SM car coordinates # create unit vector Bmag = np.sqrt(r['B0'].x[0]**2 + r['B0'].y[0]**2 + r['B0'].z[0]**2) Bunit = [r['B0'].x[0] / Bmag, r['B0'].y[0] / Bmag, r['B0'].z[0] / Bmag] # now we have Bunit in SM car # let's put it in spherical (easier for changing the wavenormal) sph_dir = convert2([Bunit], ray_datenum, 'SM', 'car', ['Re', 'Re', 'Re'], 'SM', 'sph', ['Re', 'deg', 'deg']) # also return resonance angle, can be useful for initializing rays from ray_plots import stix_parameters R, L, P, S, D = stix_parameters( r, 0, r['w']) # get stix params for initial time point resangle = np.arctan(np.sqrt(-P / S)) converted_dirs = [] # if select random was chosen, thetas and phis are passed in as list of zeros of length nrays if select_random == True: nrays = len(thetas) hemi_mult = thetas[0] thetas = [] phis = [] resangle_deg = resangle * 180 / np.pi for n in range(0, nrays): # sample theta as concentric circles around the z axis, max at resonance angle thetas.append((random.random() * (resangle_deg - 3))) # uniform azimuth around the z axis phis.append(random.random() * 360) if Bunit[0] == 0 or Bunit[2] == 0: r1 = [1, (-1 * Bunit[0] - Bunit[2]) / Bunit[1], 1] else: r1 = [1, 1, (-1 * Bunit[1] - Bunit[0]) / Bunit[2]] r1 = np.array(r1) / np.linalg.norm(np.array(r1)) r2 = np.cross(r1, Bunit) T_rotate = np.column_stack((r1, r2, Bunit)) #ax = plt.axes(projection='3d') for th, ph in zip(thetas, phis): r = 1 / (np.cos(th * D2R)) cone_vec = np.array([ r * np.sin(th * D2R) * np.cos(ph * D2R), r * np.sin(th * D2R) * np.sin(ph * D2R), r * np.cos(th * D2R) ]) cone_vec = np.matmul(T_rotate, np.transpose(cone_vec)) if hemi_mult == 180: zsign = -1 else: zsign = 1 cone_vec = cone_vec / np.linalg.norm(cone_vec) converted_dirs.append(zsign * cone_vec) #ax.quiver(0,0,0,zsign*cone_vec[0],zsign*cone_vec[1],zsign*cone_vec[2],length=1) #ax.quiver(0,0,0,Bunit[0],Bunit[1],Bunit[2],length=1.25) #ax.axes.set_xlim3d(left=0, right=1.5) #ax.axes.set_ylim3d(bottom=0, top=1.5) #ax.axes.set_zlim3d(bottom=0, top=1.5) #plt.show() #plt.close() # add theta and phi as desired else: for theta, phi in zip(thetas, phis): new_dir = [ sph_dir[0][0], sph_dir[0][1] + theta, sph_dir[0][2] + phi ] converted_dir = convert2([new_dir], ray_datenum, 'SM', 'sph', ['Re', 'deg', 'deg'], 'SM', 'car', ['Re', 'Re', 'Re']) converted_dirs.append(converted_dir[0]) return converted_dirs, resangle, thetas, phis # returns unit vector of directions corresponding to input theta and phi vals
f = open(th_file) thetas_save = [] for line in f: thetas_save.append(float(line)) f.close() mode_name = 'mode' + str(md) + '.' raylist = [] r_savex = [] r_savey = [] # use mode name to avoid workers of the same label for filename in file_titles: if '.ray' in filename and mode_name in filename: raylist += read_rayfile(os.path.join(ray_out_dir, filename)) print(filename) # lets chunk into a time vector t = np.linspace(0, 0.4, num=1) t = [0.2] imgs = [] # we need the positions of the satellites -- use the sat class dsx = sat() # define a satellite object dsx.catnmbr = 44344 # provide NORAD ID dsx.time = ray_datenum # set time dsx.getTLE_ephem() # get TLEs nearest to this time -- sometimes this will lag # propagate the orbit! setting sec=0 will give you just the position at that time dsx.propagatefromTLE(sec=0,
def find_crossings( ray_dir='/shared/users/asousa/WIPP/rays/2d/nightside/gcpm_kp0/', mlt=0, tmax=10, dt=0.1, lat_low=None, f_low=200, f_hi=30000, center_lon=None, lon_spacing=None, itime=datetime.datetime(2010, 1, 1, 0, 0, 0), lat_step_size=1, n_sub_freqs=10, Llims=[1.2, 8], L_step=0.2, dlat_fieldline=1, frame_directory=None, DAMP_THRESHOLD=1e-3): # Constants Hz2Rad = 2. * np.pi D2R = np.pi / 180. R2D = 180. / np.pi H_IONO_BOTTOM = 1e5 H_IONO_TOP = 1e6 R_E = 6371e3 C = 2.997956376932163e8 # DAMP_THRESHOLD = 1e-3 # Threshold below which we don't log a crossing lat_hi = lat_low + lat_step_size Lshells = np.arange(Llims[0], Llims[1], L_step) L_MARGIN = L_step / 2.0 # print "doing Lshells ", Lshells # Coordinate transform tools xf = xflib.xflib( lib_path= '/shared/users/asousa/WIPP/WIPP_stencils/python/methods/libxformd.so') t = np.arange(0, tmax, dt) itime = datetime.datetime(2010, 1, 1, 0, 0, 0) # Find available rays d = os.listdir(ray_dir) freqs = sorted([int(f[2:]) for f in d if f.startswith('f_')]) d = os.listdir(os.path.join(ray_dir, 'f_%d' % freqs[0])) lons = sorted([float(f[4:]) for f in d if f.startswith('lon_')]) d = os.listdir(os.path.join(ray_dir, 'f_%d' % freqs[0], 'lon_%d' % lons[0])) lats = sorted([float(s.split('_')[2]) for s in d if s.startswith('ray_')]) # Latitude spacing: latln_pairs = [(lat_low, lat_hi)] # Adjacent frequencies to iterate over freqs = [f for f in freqs if f >= f_low and f <= f_hi] freq_pairs = zip(freqs[0:-1], freqs[1:]) #--------------- Load and interpolate the center longitude entries ------------------------------------ center_data = dict() for freq in freqs: logging.info("Loading freq: %d" % freq) for lat in [lat_low, lat_hi]: lon = center_lon filename = os.path.join(ray_dir, 'f_%d' % freq, 'lon_%d' % lon, 'ray_%d_%d_%d.ray' % (freq, lat, lon)) # print filename rf = read_rayfile(filename)[0] filename = os.path.join(ray_dir, 'f_%d' % freq, 'lon_%d' % lon, 'damp_%d_%d_%d.ray' % (freq, lat, lon)) df = read_damp(filename)[0] t_cur = t[t <= rf['time'].iloc[-1]] # Interpolate onto our new time axis: x = interpolate.interp1d(rf['time'], rf['pos']['x']).__call__(t_cur) / R_E y = interpolate.interp1d(rf['time'], rf['pos']['y']).__call__(t_cur) / R_E z = interpolate.interp1d(rf['time'], rf['pos']['z']).__call__(t_cur) / R_E d = interpolate.interp1d(df['time'], df['damping'], bounds_error=False, fill_value=0).__call__(t_cur) v = interpolate.interp1d(df['time'], rf['vgrel'], axis=0).__call__(t_cur) vmag = np.linalg.norm(v, axis=1) B = interpolate.interp1d(rf['time'], rf['B0'], axis=0).__call__(t_cur) Bnorm = np.linalg.norm(B, axis=1) Bhat = B / Bnorm[:, np.newaxis] stixR, stixL, stixP = calc_stix_parameters(rf, t_cur) n = interpolate.interp1d(df['time'], rf['n'], axis=0).__call__(t_cur) mu = np.linalg.norm(n, axis=1) # kvec = n*rf['w']/C # kz = -1.0*np.sum(kvec*Bhat, axis=1) # dot product of rows # kx = np.linalg.norm(kvec + Bhat*kz[:,np.newaxis], axis=1) # psi = R2D*np.arctan2(-kx, kz) # kvec = n*rf['w']/C # kz = np.sum(kvec*Bhat, axis=1) # dot product of rows # kx = np.linalg.norm(kvec - Bhat*kz[:,np.newaxis], axis=1) # psi = np.arctan2(kx, kz) # psi = R2D*np.arctan2(kx, kz) kvec = n * rf['w'] / C kz = np.sum(kvec * Bhat, axis=1) # dot product of rows kx = np.linalg.norm(np.cross(kvec, Bhat), axis=1) # Cross product of rows psi = np.arctan2(kx, kz) # Stash it somewhere: key = (freq, lat, lon) curdata = dict() # Flatten out any longitude variation, just to be sure: curdata['pos'] = flatten_longitude_variation(np.vstack([x, y, z]), itime, xf=xf) # curdata['pos'] = np.vstack([x,y,z]) curdata['damp'] = d curdata['nt'] = len(t_cur) curdata['stixR'] = stixR curdata['stixP'] = stixP curdata['stixL'] = stixL curdata['mu'] = mu curdata['psi'] = psi curdata['vgrel'] = vmag center_data[key] = curdata #------------ Rotate center_longitude rays to new longitudes --------------------------- logging.info("Rotating to new longitudes") ray_data = dict() for key in center_data.keys(): for lon in [ center_lon - lon_spacing / 2., center_lon + lon_spacing / 2. ]: newkey = (key[0], key[1], lon) dlon = lon - key[2] d = dict() d['pos'] = rotate_latlon(center_data[key]['pos'], itime, 0, dlon, xf) d['damp'] = center_data[key]['damp'] d['stixR'] = center_data[key]['stixR'] d['stixL'] = center_data[key]['stixL'] d['stixP'] = center_data[key]['stixP'] d['mu'] = center_data[key]['mu'] d['psi'] = center_data[key]['psi'] d['vgrel'] = center_data[key]['vgrel'] ray_data[newkey] = d # ------------------ Set up field lines ---------------------------- logging.info("Setting up EA grid") fieldlines = gen_EA_array(Lshells, dlat_fieldline, lon, itime, L_MARGIN, xf=xf) #----------- Step through and fill in the voxels (the main event) --------------------- logging.info("Starting interpolation") lat_pairs = [(lat_low, lat_hi)] lon_pairs = [(center_lon - lon_spacing / 2., center_lon + lon_spacing / 2.) ] # output space nfl = len(fieldlines) nlons = 1 nt = len(t) n_freq_pairs = len(freq_pairs) data_total = np.zeros([nfl, n_freq_pairs, nlons, nt]) lon1 = center_lon - lon_spacing / 2. lon2 = center_lon + lon_spacing / 2. for t_ind in np.arange(nt - 1): # Per frequency data_cur = np.zeros(nfl) logging.info("t = %g" % (t_ind * dt)) for freq_ind, (f1, f2) in enumerate(freq_pairs): # print "doing freqs between ", f1, "and", f2 # Loop over adjacent sets: if n_sub_freqs == 0: ff = np.arange(0, (f2 - f1), 1) # This version for uniform in frequency else: ff = np.arange(0, n_sub_freqs, 1) # This version for constant steps per pair nf = len(ff) fine_freqs = f1 + (f2 - f1) * ff / nf # print fine_freqs for lat1, lat2 in lat_pairs: k0 = (f1, lat1, lon1) k1 = (f1, lat2, lon1) k2 = (f2, lat1, lon1) k3 = (f2, lat2, lon1) k4 = (f1, lat1, lon2) k5 = (f1, lat2, lon2) k6 = (f2, lat1, lon2) k7 = (f2, lat2, lon2) clat = (lat1 + lat2) / 2. f_center = (f1 + f2) / 2. tmax_local = min( np.shape(ray_data[k0]['pos'])[1], np.shape(ray_data[k1]['pos'])[1], np.shape(ray_data[k2]['pos'])[1], np.shape(ray_data[k3]['pos'])[1], np.shape(ray_data[k4]['pos'])[1], np.shape(ray_data[k5]['pos'])[1], np.shape(ray_data[k6]['pos'])[1], np.shape(ray_data[k7]['pos'])[1]) if (t_ind < tmax_local - 1): points_4d = np.hstack([ np.vstack([ ray_data[k0]['pos'][:, t_ind:t_ind + 2], np.zeros([1, 2]) ]), np.vstack([ ray_data[k1]['pos'][:, t_ind:t_ind + 2], np.zeros([1, 2]) ]), np.vstack([ ray_data[k2]['pos'][:, t_ind:t_ind + 2], np.ones([1, 2]) * nf ]), np.vstack([ ray_data[k3]['pos'][:, t_ind:t_ind + 2], np.ones([1, 2]) * nf ]), np.vstack([ ray_data[k4]['pos'][:, t_ind:t_ind + 2], np.zeros([1, 2]) ]), np.vstack([ ray_data[k5]['pos'][:, t_ind:t_ind + 2], np.zeros([1, 2]) ]), np.vstack([ ray_data[k6]['pos'][:, t_ind:t_ind + 2], np.ones([1, 2]) * nf ]), np.vstack([ ray_data[k7]['pos'][:, t_ind:t_ind + 2], np.ones([1, 2]) * nf ]) ]) voxel_vol = voxel_vol_nd(points_4d) * pow(R_E, 3.) # damps_2d = np.hstack([ray_data[k0]['damp'][t_ind:t_ind+2], # ray_data[k1]['damp'][t_ind:t_ind+2], # ray_data[k2]['damp'][t_ind:t_ind+2], # ray_data[k3]['damp'][t_ind:t_ind+2]]) # damping_avg = np.mean(damps_2d) damping_pts = np.hstack([ ray_data[kk]['damp'][t_ind:t_ind + 2] for kk in [k0, k1, k2, k3, k4, k5, k6, k7] ]) damp_interp = interpolate.NearestNDInterpolator( points_4d.T, damping_pts) points_2d = np.hstack([ np.vstack([ ray_data[k4]['pos'][[0, 2], t_ind:t_ind + 2], np.zeros([1, 2]) ]), np.vstack([ ray_data[k5]['pos'][[0, 2], t_ind:t_ind + 2], np.zeros([1, 2]) ]), np.vstack([ ray_data[k6]['pos'][[0, 2], t_ind:t_ind + 2], np.ones([1, 2]) * nf ]), np.vstack([ ray_data[k7]['pos'][[0, 2], t_ind:t_ind + 2], np.ones([1, 2]) * nf ]) ]) # We really should interpolate these 16 corner points instead of just averaging them. stixR_pts = np.hstack([ ray_data[kk]['stixR'][t_ind:t_ind + 2] for kk in [k0, k1, k2, k3, k4, k5, k6, k7] ]) stixL_pts = np.hstack([ ray_data[kk]['stixL'][t_ind:t_ind + 2] for kk in [k0, k1, k2, k3, k4, k5, k6, k7] ]) stixP_pts = np.hstack([ ray_data[kk]['stixP'][t_ind:t_ind + 2] for kk in [k0, k1, k2, k3, k4, k5, k6, k7] ]) mu_pts = np.hstack([ ray_data[kk]['mu'][t_ind:t_ind + 2] for kk in [k0, k1, k2, k3, k4, k5, k6, k7] ]) psi_pts = np.hstack([ ray_data[kk]['psi'][t_ind:t_ind + 2] for kk in [k0, k1, k2, k3, k4, k5, k6, k7] ]) vel_pts = np.hstack([ ray_data[kk]['vgrel'][t_ind:t_ind + 2] for kk in [k0, k1, k2, k3, k4, k5, k6, k7] ]) stixR_interp = interpolate.NearestNDInterpolator( points_4d.T, stixR_pts) stixL_interp = interpolate.NearestNDInterpolator( points_4d.T, stixL_pts) stixP_interp = interpolate.NearestNDInterpolator( points_4d.T, stixP_pts) mu_interp = interpolate.NearestNDInterpolator( points_4d.T, mu_pts) psi_interp = interpolate.NearestNDInterpolator( points_4d.T, psi_pts) vel_interp = interpolate.NearestNDInterpolator( points_4d.T, vel_pts) # tri = Delaunay(points_2d.T, qhull_options='QJ') tri = Delaunay(points_4d.T, qhull_options='QJ') # Loop through the output fieldlines for fl_ind, fl in enumerate(fieldlines): ix = np.arange(0, len(fl['pos'])) ief = np.arange(0, nf) px, pf = np.meshgrid( ix, ief, indexing='ij' ) # in 3d, ij gives xyz, xy gives yxz. dumb. # newpoints = np.hstack([fl['pos'][px.ravel(),:][:,[0,2]], np.atleast_2d(ff[pf.ravel()]).T]) newpoints = np.hstack([ fl['pos'][px.ravel(), :], np.atleast_2d(ff[pf.ravel()]).T ]) mask = (tri.find_simplex(newpoints) >= 0) * 1.0 # mask = mask.reshape([len(ix), len(ief)]) # Entries in newpoints are inside the volume if mask is nonzero # (Mask gives the index of the triangular element which contains it) # for row in newpoints[mask > 0]: # print "L:", fl['L'], xf.sm2rllmag(row[:-1], itime) # fieldlines[fl_ind]['crossings'].append(xf.sm2rllmag(row[:-1], itime)) mask = mask.reshape([len(ix), len(ief)]) minds = np.nonzero(mask) if len(minds[0]) > 0: # unscaled_pwr = (damping_avg/voxel_vol) hit_lats = fl['lat'][minds[0]] hit_freqs = fine_freqs[minds[1]] # # print "t = ", t_ind, "L = ", fl['L'] # print hit_lats, hit_freqs # hit latitude, hit frequency (indices) for hl, hf in zip(minds[0], minds[1]): cur_pos = np.hstack([fl['pos'][hl, :], ff[hf]]) psi = psi_interp(cur_pos)[0] mu = mu_interp(cur_pos)[0] damp = damp_interp(cur_pos)[0] vel = vel_interp(cur_pos)[0] * C # [unitless][m/s][1/m^3] ~ 1/m^2/sec. Multiply by total input energy. if (damp > DAMP_THRESHOLD): pwr_scale_factor = damp * vel / voxel_vol tt = np.round(100. * t_ind * dt) / 100. fieldlines[fl_ind]['crossings'][hl].append( (tt, fine_freqs[hf], pwr_scale_factor, psi, mu, damp, vel)) # fl['crossings'].append([fl['L'], fl['lat'][hl], t_ind*dt, fine_freqs[hf]]) # # Stix parameters are functions of the background medium only, # # but we'll average them because we're grabbing them from the # # rays at slightly different locations within the cell. # # print np.shape(fl['pos']) fieldlines[fl_ind]['stixR'][ hl] += stixR_interp(cur_pos)[0] fieldlines[fl_ind]['stixL'][ hl] += stixL_interp(cur_pos)[0] fieldlines[fl_ind]['stixP'][ hl] += stixP_interp(cur_pos)[0] fieldlines[fl_ind]['hit_counts'][hl] += 1 # logging.info("finished with interpolation") logging.info("finished with interpolation") # Average the background medium parameters: for fl_ind, fl in enumerate(fieldlines): for lat_ind in range(len(fl['crossings'])): n_hits = fl['hit_counts'][lat_ind] if n_hits > 0: # print fl['L'], ":", fl['lat'][lat_ind],": hit count: ", fl['hit_counts'][lat_ind] # average stixR, stixL, stixP fl['stixP'][lat_ind] /= n_hits fl['stixR'][lat_ind] /= n_hits fl['stixL'][lat_ind] /= n_hits fl['hit_counts'][lat_ind] = 1 out_data = dict() out_data['fieldlines'] = fieldlines out_data['time'] = t out_data['Lshells'] = Lshells out_data['lat_low'] = lat_low out_data['lat_hi'] = lat_hi out_data['fmin'] = f_low out_data['fmax'] = f_hi out_data['freq_pairs'] = freq_pairs return out_data
def dopp_delay(nrays, rayfile_directory, tnt_times_shift, dur_shift, startf_shift, stopf_shift): find_tle_time = tnt_times_shift[0] find_tle_time = find_tle_time.replace(tzinfo=dt.timezone.utc) # get angle defs thetas, phis = antenna_MC(nrays) thetas = [] for nr in range(nrays//2): th = random.randrange(60, 90) th = random.randrange(-90, -60) thetas.append(th) phis = np.zeros(nrays) # change dirs to SR interface cwd = os.getcwd() os.chdir('/home/rileyannereid/workspace/SR_interface') # define a satellite object dsx = sat() dsx.catnmbr = 44344 dsx.time = find_tle_time dsx.getTLE_ephem() vpm = sat() vpm.catnmbr = 45120 vpm.time = find_tle_time vpm.getTLE_ephem() # loop through tnt times tnt_dop = [] tnt_t = [] #record all shifts alldop = [] allsec = [] allthetas = [] for tim, dur, strf, stpf in zip(tnt_times_shift, dur_shift, startf_shift, stopf_shift): pulse_t = [] pulse_freqs = [] if dur == 150: pulse_t.append(tim) pulse_t.append(tim + dt.timedelta(microseconds=75e3)) pulse_t.append(tim + dt.timedelta(microseconds=150e3)) pulse_freqs.append(strf) pulse_freqs.append(strf+100) pulse_freqs.append(strf-100) elif dur == 250: # exclude large f ramps pulse_t.append(tim) pulse_t.append(tim+dt.timedelta(microseconds=dur*1e3)) pulse_freqs.append(strf) pulse_freqs.append(stpf) # loop through 'pulses' pulse_dop = [] pulse_tdelay = [] for t_time, freq in zip(pulse_t, pulse_freqs): dsx.time = t_time vpm.time = t_time vpm.propagatefromTLE(sec=0, orbit_dir='future', crs='SM', carsph='car', units=['m','m','m']) dsx.propagatefromTLE(sec=0, orbit_dir='future', crs='SM', carsph='car', units=['m','m','m']) ray_start = dsx.pos ray_start_vel = dsx.vel[0] ray_end_vel = vpm.vel[0] # returns a vector of directions (thetas and phis must be same length) directions = getBdir(ray_start, t_time, rayfile_directory, thetas, phis) positions = [ray_start[0] for n in range(nrays)] freqs = [freq for n in range(nrays)] single_run_rays(t_time, positions, directions, freqs, rayfile_directory) # Load all the rayfiles in the output directory ray_out_dir = rayfile_directory + '/'+dt.datetime.strftime(t_time, '%Y-%m-%d %H:%M:%S') file_titles = os.listdir(ray_out_dir) # create empty lists to fill with ray files and damp files raylist = [] for filename in file_titles: if '.ray' in filename: raylist += read_rayfile(os.path.join(ray_out_dir, filename)) doppler_shifted = [] time_shifted = [] for ri, r in enumerate(raylist): rn = r['n'] first_ind = rn.index[0] final_n = rn.index[-1] rtime = r['time'] # check for bad rays if rtime[final_n] < 0.01: # likely did not propagate then (NEED TO CONFIRM THIS) continue # go to next ray # initial shift nmag = np.sqrt(rn.x[first_ind]**2 + rn.y[first_ind]**2 + rn.z[first_ind]**2) vmag = np.sqrt(ray_start_vel[0]**2 + ray_start_vel[1]**2 + ray_start_vel[2]**2) n_d0t_v = (rn.x[first_ind]*ray_start_vel[0] + rn.y[first_ind]*ray_start_vel[1] + rn.z[first_ind]*ray_start_vel[2]) fshift = freq * (1 - n_d0t_v/C) # final shift nmag = np.sqrt(rn.x[final_n]**2 + rn.y[final_n]**2 + rn.z[final_n]**2) vmag = np.sqrt(ray_end_vel[0]**2 + ray_end_vel[1]**2 + ray_end_vel[2]**2) n_d0t_v = (rn.x[final_n]*ray_end_vel[0] + rn.y[final_n]*ray_end_vel[1] + rn.z[final_n]*ray_end_vel[2]) fshift = fshift * (1 - n_d0t_v/C) doppler_shifted.append(fshift/1e3) time_shifted.append(dt.timedelta(seconds=rtime[final_n]) + t_time) allsec.append(rtime[final_n]) alldop.append(fshift-freq) allthetas.append(thetas[ri]) ray = r pulse_dop.append(doppler_shifted) pulse_tdelay.append(time_shifted) # last level tnt_dop.append(pulse_dop) tnt_t.append(pulse_tdelay) print('tnt time is', tim) return tnt_dop, tnt_t, alldop, allsec, allthetas, ray