def tempo_search(db, Key, tempo): """ :: Static tempo-invariant search Returns search results for query resampled over a range of tempos. """ if not db.configCheck(): print "Failed configCheck in query spec." print db.configQuery return None prop = 1.0 / tempo # the proportion of original samples required for new tempo qconf = db.configQuery.copy() X = db.retrieve_datum(Key) P = db.retrieve_datum(Key, powers=True) X_m = pylab.mat(X.mean(0)) X_resamp = pylab.array(adb.resample_vector(X - pylab.mat(pylab.ones(X.shape[0])).T * X_m, prop)) X_resamp += pylab.mat(pylab.ones(X_resamp.shape[0])).T * X_m P_resamp = pylab.array(adb.resample_vector(P, prop)) seqStart = int(pylab.around(qconf["seqStart"] * prop)) qconf["seqStart"] = seqStart seqLength = int(pylab.around(qconf["seqLength"] * prop)) qconf["seqLength"] = seqLength tmpconf = db.configQuery db.configQuery = qconf res = db.query_data(featData=X_resamp, powerData=P_resamp) res_resorted = adb.sort_search_result(res.rawData) db.configQuery = tmpconf return res_resorted
def tempo_search(db, Key, tempo): """ :: Static tempo-invariant search Returns search results for query resampled over a range of tempos. """ if not db.configCheck(): print "Failed configCheck in query spec." print db.configQuery return None prop = 1. / tempo # the proportion of original samples required for new tempo qconf = db.configQuery.copy() X = db.retrieve_datum(Key) P = db.retrieve_datum(Key, powers=True) X_m = pylab.mat(X.mean(0)) X_resamp = pylab.array( adb.resample_vector(X - pylab.mat(pylab.ones(X.shape[0])).T * X_m, prop)) X_resamp += pylab.mat(pylab.ones(X_resamp.shape[0])).T * X_m P_resamp = pylab.array(adb.resample_vector(P, prop)) seqStart = int(pylab.around(qconf['seqStart'] * prop)) qconf['seqStart'] = seqStart seqLength = int(pylab.around(qconf['seqLength'] * prop)) qconf['seqLength'] = seqLength tmpconf = db.configQuery db.configQuery = qconf res = db.query_data(featData=X_resamp, powerData=P_resamp) res_resorted = adb.sort_search_result(res.rawData) db.configQuery = tmpconf return res_resorted
def gabor_patch( sigma_deg=2, radius_deg=6, px_deg=50, sf_cyc_deg=2, phase_deg=0, #phase of cosine in degrees contrast=1.0): """Return a gabor patch texture of the given dimensions and parameters.""" height = width = radius_deg * px_deg x = pylab.linspace(-radius_deg, radius_deg, width) X, Y = pylab.meshgrid(x, x) L = pylab.exp(-(X**2 + Y**2) / sigma_deg**2) #gaussian envelope #use around to round towards zero, otherwise you will get banding artifacts #dtype must be int for proper conversion to int and init of image data #I = pylab.array(-pylab.zeros(X.size)*max_range + neutral_gray, dtype='int') I = pylab.array(pylab.around( contrast * pylab.cos(2 * pylab.pi * (sf_cyc_deg) * X + phase_deg * pylab.pi / 180.) * L * max_range) + neutral_gray, dtype='int').ravel() IA = pylab.ones(I.size * 2, dtype='int') * 255 IA[:-1:2] = I #Need alpha=255 otherwise image is mixed with background #Data format for image http://www.pyglet.org/doc/programming_guide/accessing_or_providing_pixel_data.html data = array.array('B', IA) gabor = pyglet.image.ImageData(width, height, 'IA', data.tostring()) return gabor
def resample_vector(data, prop): """ :: resample the columns of data by a factor of prop e.g. 0.75, 1.25,...) """ new_features = resample(data, pylab.around(data.shape[0] * prop)) return new_features
def set_seq_length(self, seq_length, query_duration): """ :: check if we are using duration (in seconds) and set seq_length according to adb.delta_time, otherwise just use seq_length """ if query_duration: seq_length = int( pylab.around( query_duration / self.adb.delta_time ) ) if not seq_length: print("ERROR: You must specify a sequence length or query_duration") raise return seq_length
def set_seq_length(self, seq_length, query_duration): """ :: check if we are using duration (in seconds) and set seq_length according to adb.delta_time, otherwise just use seq_length """ if query_duration: seq_length = int( pylab.around( query_duration / self.adb.delta_time ) ) if not seq_length: print "ERROR: You must specify a sequence length or query_duration" raise return seq_length
def utc_to_onboardTime(utc_date): """Function which converts a date in utc into onboard time (GPS) in seconds and rounds to nearest 10th of a second. Arguments: utc_date (:obj:`ephem.Date`): The date as a ephem.Date object. Returns: (float): Onboard GPS time in seconds. """ utc_TimeObject = astropy.time.Time(utc_date.datetime(), scale="utc") onboardGPSTime = around(utc_TimeObject.gps, 1) return onboardGPSTime
def initialize_search(self, t_chan, seq_length, tempo=1.0): """ Initialize search parameters: include list for t_chan, exclude list for !t_chan, Key format is nnnnnncc where nnnnnn is the track id, and cc is the tc id. """ adb = self.adb if tempo != 1.0: seq_lower_bound = int(pylab.around(seq_length / tempo)) else: seq_lower_bound = seq_length #print "sequence-lower-bound = ", seq_lower_bound gt_orig_keys, gt_orig_len = self.get_gt_lists(t_chan) gt_orig = zip(gt_orig_keys, gt_orig_len) gt_list_keys, gt_list_len = self.lower_bound_list_by_length( gt_orig, seq_lower_bound, tempo) gt_list = zip(gt_list_keys, gt_list_len) #print "GT query / retrieval list length = ", len(gt_list_keys) tc_keys, tc_lens = self.get_adb_lists(t_chan) excl_keys, excl_lengths = self.upper_bound_list_by_length( zip(tc_keys, tc_lens), seq_lower_bound) #print "Database exclude list length = ", len(excl_keys) includeKeys = list(pylab.setdiff1d(tc_keys, excl_keys)) adb.configQuery[ 'absThres'] = 0.0 # We'll take care of probability threshold in distance adb.configQuery['accumulation'] = 'track' # per-track accumulation adb.configQuery['npoints'] = 1 # closest matching shingle adb.configQuery['ntracks'] = len( includeKeys) # How many tracks to report adb.configQuery['distance'] = 'euclidean' adb.configQuery['radius'] = 0.0 adb.configQuery['seqLength'] = seq_length adb.configQuery['seqStart'] = 0 adb.configQuery['exhaustive'] = True # all sub-sequences search adb.configQuery['hopSize'] = 1 # all sub-sequences search with hop 1 adb.configQuery[ 'includeKeys'] = includeKeys # include the non GT_ITEMs in search adb.configQuery['excludeKeys'] = [ ] #excludeKeys # exclude the GT_ITEM from search if not self.adb.configCheck(): print "Invalid query configuartion" raise return gt_list, gt_orig
def segment(data, dt, interval): """ `reshape with one floating point index` reshape the given, one-dimensional array `data` with the given `interval`. `interval` can be a floating number; the result of the reshape is aligned to the nearest possible integer value of the shift. """ segment_len = int(p.ceil(interval / dt)) n_segments = len(data) // segment_len result = p.empty((n_segments, segment_len)) # "fuzzy reshape" for i in range(n_segments): offset = int(p.around(i * interval / dt)) result[i, :] = data[offset:offset + segment_len] return result
def initialize_search(self, seq_length, tempo=1.0): """ :: Initializes the evaluation loop search parameters build sequence length lower-bound list of included GT items >= seq_length build sequence length upper-bound list of excluded database items < seq_length set adb.configQuery parameters based on seq_length, tempo, and ground_truth returns gt_lower_bound_list, gt_orig_list """ if tempo != 1.0: seq_lower_bound = int(pylab.around(seq_length / tempo)) else: seq_lower_bound = seq_length print "sequence-lower-bound = ", seq_lower_bound gt_orig_keys, gt_orig_len = self.get_gt_lists() gt_orig = zip(gt_orig_keys, gt_orig_len) gt_list_keys, gt_list_len = self.lower_bound_list_by_length( gt_orig, seq_lower_bound, tempo) gt_list = zip(gt_list_keys, gt_list_len) print "GT query / retrieval list length = ", len(gt_list_keys) excl_keys, excl_lengths = self.upper_bound_list_by_length( self.adb.liszt(), seq_lower_bound) print "Database exclude list length = ", len(excl_keys) if len(excl_keys): self.adb.configQuery['excludeKeys'] = list(excl_keys) else: self.adb.configQuery['excludeKeys'] = [] self.adb.configQuery['seqStart'] = 0 self.adb.configQuery['seqLength'] = seq_length self.adb.configQuery['accumulation'] = 'track' self.adb.configQuery['distance'] = 'euclidean' self.adb.configQuery['radius'] = 0.0 self.adb.configQuery['ntracks'] = len(self.adb.liszt()) self.adb.configQuery['npoints'] = 1 if not self.adb.configCheck(): print "Invalid query configuartion" raise return gt_list, gt_orig
def initialize_search(self, seq_length, tempo=1.0): """ :: Initializes the evaluation loop search parameters build sequence length lower-bound list of included GT items >= seq_length build sequence length upper-bound list of excluded database items < seq_length set adb.configQuery parameters based on seq_length, tempo, and ground_truth returns gt_lower_bound_list, gt_orig_list """ if tempo!=1.0: seq_lower_bound=int(pylab.around(seq_length/tempo)) else: seq_lower_bound=seq_length print "sequence-lower-bound = ", seq_lower_bound gt_orig_keys, gt_orig_len = self.get_gt_lists() gt_orig = zip(gt_orig_keys, gt_orig_len) gt_list_keys, gt_list_len = self.lower_bound_list_by_length(gt_orig, seq_lower_bound, tempo) gt_list = zip(gt_list_keys, gt_list_len) print "GT query / retrieval list length = ", len(gt_list_keys) excl_keys, excl_lengths = self.upper_bound_list_by_length(self.adb.liszt(), seq_lower_bound) print "Database exclude list length = ", len(excl_keys) if len(excl_keys): self.adb.configQuery['excludeKeys']=list(excl_keys) else: self.adb.configQuery['excludeKeys']=[] self.adb.configQuery['seqStart']=0 self.adb.configQuery['seqLength']=seq_length self.adb.configQuery['accumulation']='track' self.adb.configQuery['distance']='euclidean' self.adb.configQuery['radius']=0.0 self.adb.configQuery['ntracks']=len(self.adb.liszt()) self.adb.configQuery['npoints']=1 if not self.adb.configCheck(): print "Invalid query configuartion" raise return gt_list, gt_orig
def initialize_search(self, t_chan, seq_length, tempo=1.0): """ Initialize search parameters: include list for t_chan, exclude list for !t_chan, Key format is nnnnnncc where nnnnnn is the track id, and cc is the tc id. """ adb = self.adb if tempo!=1.0: seq_lower_bound=int(pylab.around(seq_length/tempo)) else: seq_lower_bound=seq_length #print "sequence-lower-bound = ", seq_lower_bound gt_orig_keys, gt_orig_len = self.get_gt_lists(t_chan) gt_orig = zip(gt_orig_keys, gt_orig_len) gt_list_keys, gt_list_len = self.lower_bound_list_by_length(gt_orig, seq_lower_bound, tempo) gt_list = zip(gt_list_keys, gt_list_len) #print "GT query / retrieval list length = ", len(gt_list_keys) tc_keys, tc_lens = self.get_adb_lists(t_chan) excl_keys, excl_lengths = self.upper_bound_list_by_length(zip(tc_keys, tc_lens), seq_lower_bound) #print "Database exclude list length = ", len(excl_keys) includeKeys=list(pylab.setdiff1d(tc_keys, excl_keys)) adb.configQuery['absThres']=0.0 # We'll take care of probability threshold in distance adb.configQuery['accumulation']='track' # per-track accumulation adb.configQuery['npoints']=1 # closest matching shingle adb.configQuery['ntracks']=len(includeKeys) # How many tracks to report adb.configQuery['distance']='euclidean' adb.configQuery['radius']=0.0 adb.configQuery['seqLength']=seq_length adb.configQuery['seqStart']=0 adb.configQuery['exhaustive']=True # all sub-sequences search adb.configQuery['hopSize']=1 # all sub-sequences search with hop 1 adb.configQuery['includeKeys']=includeKeys # include the non GT_ITEMs in search adb.configQuery['excludeKeys']=[] #excludeKeys # exclude the GT_ITEM from search if not self.adb.configCheck(): print "Invalid query configuartion" raise return gt_list, gt_orig
j = 0 while (j < len(eta)): i = 0 while (i < len(r)): rconcatenated[i + j * len(r)] = r[i] i += 1 j += 1 vals = pylab.zeros_like(x) chi2 = sum(dist2(x)**2) print "beginning least squares fit, chi^2 initial: ", chi2 vals, mesg = leastsq(dist2, x) # round fitted numbers digits = 3 vals = pylab.around(vals, digits) chi2 = sum(dist2(vals)**2) print "original fit complete, chi^2: %g" % chi2 toprint = True for i in range(len(x)): print "vals[%i]: %.*f\t x[%i]: %g" % (i, digits, vals[i], i, x[i]) g = dist(vals) gdifference = dist2(vals) chisq = (gdifference**2).sum() maxerr = abs(gdifference).max() etamaxerr = 0 rmaxerr = 0 for i in xrange(len(gdifference)):
def cs_hsigma(eta): return pylab.around((1-eta/2)/(1-eta)**3 - 1, 2)
j = 0 while (j < len(eta)): i = 0 while (i < len(r)): rconcatenated[i + j*len(r)] = r[i] i += 1 j += 1 vals = pylab.zeros_like(x) chi2 = sum(dist2(x)**2) print "beginning least squares fit, chi^2 initial: ", chi2 vals, mesg = leastsq(dist2, x) # round fitted numbers digits = 3 vals = pylab.around(vals, digits) chi2 = sum(dist2(vals)**2) print "original fit complete, chi^2: %g" % chi2 toprint = True for i in range(len(x)): print "vals[%i]: %.*f\t x[%i]: %g" %(i, digits, vals[i], i, x[i]) g = dist(vals) gdifference = dist2(vals) chisq = (gdifference**2).sum() maxerr = abs(gdifference).max() etamaxerr = 0 rmaxerr = 0 for i in xrange(len(gdifference)):
files.sort() chi = [] Cv = [] Smean = [] Emean = [] Ts = [] J1 = 0 N = 0 for f in files: n,state,J,T = fileio.parsefilename(f) J1 = J N = n Ts = [pl.around(0.01*(x+1),3) for x in range(0,500)] ns = [20] Js = [1] states = [1] #This is where results could be filtered according to parameters if necessary if fileio.checkparameters([ns,states,Js,Ts],[n,state,J,T]): print("Current file: %s" % f) sys.stdout.flush() Etotals,Stotals = fileio.readdata(join("results",f)) Eaverages = pl.array(Etotals) / n**2 Saverages = pl.array(Stotals) / n**2 chi.append(1/T*pl.var(Saverages)) Cv.append(1/T**2*pl.var(Eaverages)) Smean.append(pl.absolute(pl.mean(Saverages))) Emean.append(pl.mean(Eaverages))
def interp(datain,lonsin,latsin,lonsout,latsout,checkbounds=False,mode='nearest',cval=0.0,order=3): """ dataout = interp(datain,lonsin,latsin,lonsout,latsout,mode='constant',cval=0.0,order=3) interpolate data (datain) on a rectilinear lat/lon grid (with lons=lonsin lats=latsin) to a grid with lons=lonsout, lats=latsout. datain is a rank-2 array with 1st dimension corresponding to longitude, 2nd dimension latitude. lonsin, latsin are rank-1 Numeric arrays containing longitudes and latitudes of datain grid in increasing order (i.e. from Greenwich meridian eastward, and South Pole northward) lonsout, latsout are rank-2 Numeric arrays containing lons and lats of desired output grid (typically a native map projection grid). If checkbounds=True, values of lonsout and latsout are checked to see that they lie within the range specified by lonsin and latsin. Default is False, and values outside the borders are handled in the manner described by the 'mode' parameter (default mode='nearest', which means the nearest boundary value is used). See section 20.2 of the numarray docs for information on the 'mode' keyword. See numarray.nd_image.map_coordinates documentation for information on the other optional keyword parameters. The order keyword can be 0 for nearest neighbor interpolation (nd_image only allows 1-6) - if order=0 bounds checking is done even if checkbounds=False. """ # lonsin and latsin must be monotonically increasing. if lonsin[-1]-lonsin[0] < 0 or latsin[-1]-latsin[0] < 0: raise ValueError, 'lonsin and latsin must be increasing!' # optionally, check that lonsout,latsout are # within region defined by lonsin,latsin. # (this check is always done if nearest neighbor # interpolation (order=0) requested). if checkbounds or order == 0: if min(pylab.ravel(lonsout)) < min(lonsin) or \ max(pylab.ravel(lonsout)) > max(lonsin) or \ min(pylab.ravel(latsout)) < min(latsin) or \ max(pylab.ravel(latsout)) > max(latsin): raise ValueError, 'latsout or lonsout outside range of latsin or lonsin' # compute grid coordinates of output grid. delon = lonsin[1:]-lonsin[0:-1] delat = latsin[1:]-latsin[0:-1] if max(delat)-min(delat) < 1.e-4 and max(delon)-min(delon) < 1.e-4: # regular input grid. xcoords = (len(lonsin)-1)*(lonsout-lonsin[0])/(lonsin[-1]-lonsin[0]) ycoords = (len(latsin)-1)*(latsout-latsin[0])/(latsin[-1]-latsin[0]) else: # irregular (but still rectilinear) input grid. lonsoutflat = pylab.ravel(lonsout) latsoutflat = pylab.ravel(latsout) ix = pylab.searchsorted(lonsin,lonsoutflat)-1 iy = pylab.searchsorted(latsin,latsoutflat)-1 xcoords = pylab.zeros(ix.shape,'f') ycoords = pylab.zeros(iy.shape,'f') for n,i in enumerate(ix): if i < 0: xcoords[n] = -1 # outside of range on lonsin (lower end) elif i >= len(lonsin)-1: xcoords[n] = len(lonsin) # outside range on upper end. else: xcoords[n] = float(i)+(lonsoutflat[n]-lonsin[i])/(lonsin[i+1]-lonsin[i]) xcoords = pylab.reshape(xcoords,lonsout.shape) for m,j in enumerate(iy): if j < 0: ycoords[m] = -1 # outside of range of latsin (on lower end) elif j >= len(latsin)-1: ycoords[m] = len(latsin) # outside range on upper end else: ycoords[m] = float(j)+(latsoutflat[m]-latsin[j])/(latsin[j+1]-latsin[j]) ycoords = pylab.reshape(ycoords,latsout.shape) coords = [ycoords,xcoords] # interpolate to output grid using numarray.nd_image spline filter. if order: return nd_image.map_coordinates(datain,coords,mode=mode,cval=cval,order=order) else: # nearest neighbor interpolation if order=0. # uses index arrays, so first convert to numarray. datatmp = pylab.array(datain,datain.typecode()) xi = pylab.around(xcoords).astype('i') yi = pylab.around(ycoords).astype('i') return datatmp[yi,xi]
lats = latitudes[:] lons = longitudes[:] lons, lats = meshgrid(lons,lats) # unpack 2-meter temp forecast data. t2mvar = data['tmp2m'] missval = t2mvar.missing_value t2m = t2mvar[:,:,:] if missval < 0: t2m = ma.masked_values(where(t2m>-1.e20,t2m,1.e20), 1.e20) else: t2m = ma.masked_values(where(t2m<1.e20,t2m,1.e20), 1.e20) t2min = amin(t2m.compressed()); t2max= amax(t2m.compressed()) print t2min,t2max clevs = frange(around(t2min/10.)*10.-5.,around(t2max/10.)*10.+5.,4) print clevs[0],clevs[-1] llcrnrlat = 22.0 urcrnrlat = 48.0 latminout = 22.0 llcrnrlon = -125.0 urcrnrlon = -60.0 standardpar = 50.0 centerlon=-105. # create Basemap instance for Lambert Conformal Conic projection. m = Basemap(llcrnrlon=llcrnrlon,llcrnrlat=llcrnrlat, urcrnrlon=urcrnrlon,urcrnrlat=urcrnrlat, rsphere=6371200., resolution='l',area_thresh=5000.,projection='lcc', lat_1=standardpar,lon_0=centerlon) x, y = m(lons, lats)
def interp(datain,lonsin,latsin,lonsout,latsout,checkbounds=False,mode='nearest',cval=0.0,order=3): """ dataout = interp(datain,lonsin,latsin,lonsout,latsout,mode='constant',cval=0.0,order=3) interpolate data (datain) on a rectilinear lat/lon grid (with lons=lonsin lats=latsin) to a grid with lons=lonsout, lats=latsout. datain is a rank-2 array with 1st dimension corresponding to longitude, 2nd dimension latitude. lonsin, latsin are rank-1 arrays containing longitudes and latitudes of datain grid in increasing order (i.e. from Greenwich meridian eastward, and South Pole northward) lonsout, latsout are rank-2 arrays containing lons and lats of desired output grid (typically a native map projection grid). If checkbounds=True, values of lonsout and latsout are checked to see that they lie within the range specified by lonsin and latsin. Default is False, and values outside the borders are handled in the manner described by the 'mode' parameter (default mode='nearest', which means the nearest boundary value is used). See section 20.2 of the numarray docs for information on the 'mode' keyword. See numarray.nd_image.map_coordinates documentation for information on the other optional keyword parameters. The order keyword can be 0 for nearest neighbor interpolation (nd_image only allows 1-6) - if order=0 bounds checking is done even if checkbounds=False. """ # lonsin and latsin must be monotonically increasing. if lonsin[-1]-lonsin[0] < 0 or latsin[-1]-latsin[0] < 0: raise ValueError, 'lonsin and latsin must be increasing!' # optionally, check that lonsout,latsout are # within region defined by lonsin,latsin. # (this check is always done if nearest neighbor # interpolation (order=0) requested). if checkbounds or order == 0: if min(pylab.ravel(lonsout)) < min(lonsin) or \ max(pylab.ravel(lonsout)) > max(lonsin) or \ min(pylab.ravel(latsout)) < min(latsin) or \ max(pylab.ravel(latsout)) > max(latsin): raise ValueError, 'latsout or lonsout outside range of latsin or lonsin' # compute grid coordinates of output grid. delon = lonsin[1:]-lonsin[0:-1] delat = latsin[1:]-latsin[0:-1] if max(delat)-min(delat) < 1.e-4 and max(delon)-min(delon) < 1.e-4: # regular input grid. xcoords = (len(lonsin)-1)*(lonsout-lonsin[0])/(lonsin[-1]-lonsin[0]) ycoords = (len(latsin)-1)*(latsout-latsin[0])/(latsin[-1]-latsin[0]) else: # irregular (but still rectilinear) input grid. lonsoutflat = pylab.ravel(lonsout) latsoutflat = pylab.ravel(latsout) ix = pylab.searchsorted(lonsin,lonsoutflat)-1 iy = pylab.searchsorted(latsin,latsoutflat)-1 xcoords = pylab.zeros(ix.shape,'f') ycoords = pylab.zeros(iy.shape,'f') for n,i in enumerate(ix): if i < 0: xcoords[n] = -1 # outside of range on lonsin (lower end) elif i >= len(lonsin)-1: xcoords[n] = len(lonsin) # outside range on upper end. else: xcoords[n] = float(i)+(lonsoutflat[n]-lonsin[i])/(lonsin[i+1]-lonsin[i]) xcoords = pylab.reshape(xcoords,lonsout.shape) for m,j in enumerate(iy): if j < 0: ycoords[m] = -1 # outside of range of latsin (on lower end) elif j >= len(latsin)-1: ycoords[m] = len(latsin) # outside range on upper end else: ycoords[m] = float(j)+(latsoutflat[m]-latsin[j])/(latsin[j+1]-latsin[j]) ycoords = pylab.reshape(ycoords,latsout.shape) coords = [ycoords,xcoords] # interpolate to output grid using numarray.nd_image spline filter. if order: return nd_image.map_coordinates(datain,coords,mode=mode,cval=cval,order=order) else: # nearest neighbor interpolation if order=0. # uses index arrays, so first convert to numarray. datatmp = pylab.array(datain,datain.typecode()) xi = pylab.around(xcoords).astype('i') yi = pylab.around(ycoords).astype('i') return datatmp[yi,xi]
files.sort() chi = [] Cv = [] Smean = [] Emean = [] Ts = [] J1 = 0 N = 0 for f in files: n, state, J, T = fileio.parsefilename(f) J1 = J N = n Ts = [pl.around(0.01 * (x + 1), 3) for x in range(0, 500)] ns = [20] Js = [1] states = [1] #This is where results could be filtered according to parameters if necessary if fileio.checkparameters([ns, states, Js, Ts], [n, state, J, T]): print("Current file: %s" % f) sys.stdout.flush() Etotals, Stotals = fileio.readdata(join("results", f)) Eaverages = pl.array(Etotals) / n**2 Saverages = pl.array(Stotals) / n**2 chi.append(1 / T * pl.var(Saverages)) Cv.append(1 / T**2 * pl.var(Eaverages)) Smean.append(pl.absolute(pl.mean(Saverages))) Emean.append(pl.mean(Eaverages))
def cs_hsigma(eta): return pylab.around((1 - eta / 2) / (1 - eta)**3 - 1, 2)
def Mode120_date_calculator(): #if(True): Logger = logging.getLogger(Logger_name()) log_timestep = 3600 "Simulation length and timestep" duration = Timeline_params()['duration'] Logger.info('Duration set to: ' + str(duration) + ' s') timestep = Mode120_calculator_defaults()['timestep'] #In seconds Logger.info('timestep set to: ' + str(timestep) + ' s') timesteps = int(floor(duration / timestep)) date = Timeline_params()['start_time'] Logger.info('date set to: ' + str(date)) "Get relevant stars" result = Vizier(columns=['all'], row_limit=200).query_constraints( catalog='I/239/hip_main', Vmag=Mode120_calculator_defaults()['Vmag']) star_cat = result[0] ROWS = star_cat[0][:].count() stars = [] stars_dec = zeros((ROWS, 1)) stars_ra = zeros((ROWS, 1)) "Insert stars into Pyephem" for t in range(ROWS): s = "{},f|M|F7,{},{},{},2000" s = s.format(star_cat[t]['HIP'], deg2HMS(ra=star_cat[t]['_RA.icrs']), deg2HMS(dec=star_cat[t]['_DE.icrs']), star_cat[t]['Vmag']) stars.append(ephem.readdb(s)) stars[t].compute(epoch='2018') stars_dec[t] = stars[t].dec stars_ra[t] = stars[t].ra Logger.debug('List of stars used: ' + str(star_cat)) "Calculate unit-vectors of stars" stars_x = cos(stars_dec) * cos(stars_ra) stars_y = cos(stars_dec) * sin(stars_ra) stars_z = sin(stars_dec) stars_r = array([stars_x, stars_y, stars_z]) stars_r = stars_r.transpose() "Prepare the excel file output" star_list_excel = [] star_list_excel.append(['Name;']) star_list_excel.append(['t1;']) star_list_excel.append(['t2;']) star_list_excel.append(['long1;']) star_list_excel.append(['lat1;']) star_list_excel.append(['long2;']) star_list_excel.append(['lat2;']) star_list_excel.append(['mag;']) star_list_excel.append(['H_offset;']) star_list_excel.append(['V_offset;']) star_list_excel.append(['H_offset2;']) star_list_excel.append(['V_offset2;']) star_list_excel.append(['e_Hpmag;']) star_list_excel.append(['Hpscat;']) star_list_excel.append(['o_Hpmag;']) star_list_excel.append(['Classification;']) "Prepare the output" star_list = [] "Pre-allocate space" lat_MATS = zeros((timesteps, 1)) long_MATS = zeros((timesteps, 1)) altitude_MATS = zeros((timesteps, 1)) g_ra_MATS = zeros((timesteps, 1)) g_dec_MATS = zeros((timesteps, 1)) x_MATS = zeros((timesteps, 1)) y_MATS = zeros((timesteps, 1)) z_MATS = zeros((timesteps, 1)) r_MATS = zeros((timesteps, 3)) r_FOV = zeros((timesteps, 3)) r_FOV_unit_vector = zeros((timesteps, 3)) r_FOV_norm = zeros((timesteps, 3)) r_azi_norm = zeros((timesteps, 3)) stars_r_V_offset_plane = zeros((ROWS, 3)) stars_r_H_offset_plane = zeros((ROWS, 3)) stars_vert_offset = zeros((timesteps, ROWS)) stars_hori_offset = zeros((timesteps, ROWS)) stars_offset = zeros((timesteps, ROWS)) normal_orbital = zeros((timesteps, 3)) r_V_offset_normal = zeros((timesteps, 3)) r_H_offset_normal = zeros((timesteps, 3)) pitch_sensor_array = zeros((timesteps, 1)) star_counter = 0 spotted_star_name = [] spotted_star_timestamp = [] spotted_star_timecounter = [] skip_star_list = [] MATS_p = zeros((timesteps, 1)) MATS_P = zeros((timesteps, 1)) angle_between_orbital_plane_and_star = zeros((timesteps, ROWS)) "Constants" R_mean = 6371 #Earth radius Logger.info('Earth radius used [km]: ' + str(R_mean)) U = 398600.4418 #Earth gravitational parameter FOV_altitude = Mode120_calculator_defaults( )['default_pointing_altitude'] / 1000 #Altitude at which MATS center of FOV is looking Logger.info('FOV_altitude set to [km]: ' + str(FOV_altitude)) pointing_adjustment = 3 #Angle in degrees that the pointing can be adjusted V_FOV = Mode120_calculator_defaults()['V_FOV'] #0.91 is actual V_FOV H_FOV = Mode120_calculator_defaults()['H_FOV'] #5.67 is actual H_FOV Logger.info('V_FOV set to [degrees]: ' + str(V_FOV)) Logger.info('H_FOV set to [degrees]: ' + str(H_FOV)) pitch_offset_angle = 0 yaw_offset_angle = 0 Logger.info('TLE used: ' + getTLE()[0] + getTLE()[1]) MATS = ephem.readtle('MATS', getTLE()[0], getTLE()[1]) Logger.info('') Logger.info('Start of simulation of MATS for Mode120') ################## Start of Simulation ######################################## "Loop and calculate the relevant angle of each star to each direction of MATS's FOV" for t in range(timesteps): current_time = ephem.Date(date + ephem.second * timestep * t) MATS.compute(current_time) (lat_MATS[t], long_MATS[t], altitude_MATS[t], g_ra_MATS[t], g_dec_MATS[t]) = (MATS.sublat, MATS.sublong, MATS.elevation / 1000, MATS.g_ra, MATS.g_dec) R = lat_2_R(lat_MATS[t]) z_MATS[t] = sin(g_dec_MATS[t]) * (altitude_MATS[t] + R) x_MATS[t] = cos(g_dec_MATS[t]) * (altitude_MATS[t] + R) * cos( g_ra_MATS[t]) y_MATS[t] = cos(g_dec_MATS[t]) * (altitude_MATS[t] + R) * sin( g_ra_MATS[t]) r_MATS[t, 0:3] = [x_MATS[t], y_MATS[t], z_MATS[t]] #Semi-Major axis of MATS, assuming circular orbit MATS_p[t] = norm(r_MATS[t, 0:3]) #Orbital Period of MATS MATS_P[t] = 2 * pi * sqrt(MATS_p[t]**3 / U) #Estimated pitch or elevation angle for MATS pointing pitch_sensor_array[t] = array( arccos( (R_mean + FOV_altitude) / (R + altitude_MATS[t])) / pi * 180) pitch_sensor = pitch_sensor_array[t][0] if (t * timestep % log_timestep == 0): Logger.debug('') Logger.debug('log_timestep: ' + str(log_timestep)) Logger.debug('timestep: ' + str(timestep)) Logger.debug('t (loop iteration number): ' + str(t)) Logger.debug('Current time: ' + str(current_time)) Logger.debug('Semimajor axis in km: ' + str(MATS_p[t])) Logger.debug('Orbital Period in s: ' + str(MATS_P[t])) Logger.debug('Vector to MATS [km]: ' + str(r_MATS[t, 0:3])) Logger.debug('Latitude in radians: ' + str(lat_MATS[t])) Logger.debug('Longitude in radians: ' + str(long_MATS[t])) Logger.debug('Altitude in km: ' + str(altitude_MATS[t])) Logger.debug('FOV pitch in degrees: ' + str(pitch_sensor)) if (t != 0): ############# Calculations of orbital and pointing vectors ############ "Vector normal to the orbital plane of MATS" normal_orbital[t, 0:3] = cross(r_MATS[t], r_MATS[t - 1]) normal_orbital[t, 0:3] = normal_orbital[t, 0:3] / norm( normal_orbital[t, 0:3]) "Rotate 'vector to MATS', to represent pointing direction, includes vertical offset change (Parallax is negligable)" rot_mat = rot_arbit( -pi / 2 + (-pitch_sensor + pitch_offset_angle) / 180 * pi, normal_orbital[t, 0:3]) r_FOV[t, 0:3] = (r_MATS[t] @ rot_mat) / 2 "Rotate yaw of pointing direction, meaning to rotate around the vector to MATS" rot_mat = rot_arbit(yaw_offset_angle / 180 * pi, r_MATS[t, 0:3]) r_FOV[t, 0:3] = (r_FOV[t, 0:3] @ rot_mat) r_FOV_unit_vector[t, 0:3] = r_FOV[t, 0:3] / norm(r_FOV[t, 0:3]) '''Rotate 'vector to MATS', to represent vector normal to satellite H-offset plane, which will be used to project stars onto it which allows the H-offset of stars to be found''' rot_mat = rot_arbit((-pitch_sensor) / 180 * pi, normal_orbital[t, 0:3]) r_H_offset_normal[t, 0:3] = (r_MATS[t] @ rot_mat) r_H_offset_normal[t, 0:3] = r_H_offset_normal[t, 0:3] / norm( r_H_offset_normal[t, 0:3]) "If pointing direction has a Yaw defined, Rotate yaw of normal to pointing direction H-offset plane, meaning to rotate around the vector to MATS" rot_mat = rot_arbit(yaw_offset_angle / 180 * pi, r_MATS[t, 0:3]) r_H_offset_normal[t, 0:3] = (r_H_offset_normal[t, 0:3] @ rot_mat) r_H_offset_normal[t, 0:3] = r_H_offset_normal[t, 0:3] / norm( r_H_offset_normal[t, 0:3]) "Rotate orbital plane normal to make it into pointing V-offset plane normal" r_V_offset_normal[t, 0:3] = (normal_orbital[t, 0:3] @ rot_mat) r_V_offset_normal[t, 0:3] = r_V_offset_normal[t, 0:3] / norm( r_V_offset_normal[t, 0:3]) if (t * timestep % log_timestep == 0 or t == 1): Logger.debug('Pointing direction of FOV: ' + str(r_FOV_unit_vector[t, 0:3])) Logger.debug('Orthogonal direction to H-offset plane: ' + str(r_H_offset_normal[t, 0:3])) Logger.debug('Orthogonal direction to V-offset plane: ' + str(r_V_offset_normal[t, 0:3])) Logger.debug('Orthogonal direction to the orbital plane: ' + str(normal_orbital[t, 0:3])) Logger.debug('') # '''Rotate 'vector to MATS', to represent vector normal to satellite yaw plane, # which will be used to rotate the yaw of the pointing''' # rot_mat = rot_arbit((-pitch_sensor)/180*pi, normal_orbital[t,0:3]) # r_azi_norm[t,0:3] = (r_MATS[t] @ rot_mat) # r_azi_norm[t,0:3] = r_azi_norm[t,0:3] / norm(r_azi_norm[t,0:3]) # # "Rotate horizontal offset of pointing direction, around satellite yaw plane" # rot_mat = rot_arbit(yaw_offset_angle/180*pi, r_azi_norm[t,0:3]) # r_FOV[t,0:3] = (r_FOV[t,0:3] @ rot_mat) # r_FOV_unit_vector[t,0:3] = r_FOV[t,0:3]/norm(r_FOV[t,0:3])/2 # # "Rotate orbital plane normal to match pointing V-offset plane normal" # r_V_offset_normal[t,0:3] = (normal_orbital[t,0:3] @ rot_mat) # # '''Rotate pointing vector 90 degrees in the pointing elevation plane to get a vector, # which is normal to pointing azimuth plane''' # rot_mat = rot_arbit(pi/2, r_V_offset_normal[t,0:3]) # r_FOV_norm[t,0:3] = (r_FOV[t,0:3] @ rot_mat) # r_FOV_norm[t,0:3] = r_FOV_norm[t,0:3] / norm(r_FOV_norm[t,0:3]) ############# End of Calculations of orbital and pointing vectors ##### ###################### Star-mapper #################################### "Check position of stars relevant to pointing direction" for x in range(ROWS): "Skip star if it is not visible during this epoch" if (stars[x].name in skip_star_list): continue "Check if a star has already been spotted during this orbit." if (stars[x].name in spotted_star_name): '''Check if not enough time has passed so that the star has not left FOV''' if ((current_time - spotted_star_timestamp[spotted_star_name.index( stars[x].name)]) < ephem.second * (V_FOV * 2 * MATS_P[t] / 360)): '''Check if enough time has passed so that the star is roughly in the same direction as original FOV and save lat,long, Hoffset, Voffset and time. Otherwise skip star.''' if ((t - spotted_star_timecounter[spotted_star_name.index( stars[x].name)]) * timestep == around( MATS_P[t] * (pitch_offset_angle + V_FOV / 2) / 360)): "Project 'star vectors' ontop pointing H-offset and V-offset plane" stars_r_V_offset_plane[x] = stars_r[0][x] - dot( stars_r[0][x], r_V_offset_normal[ t, 0:3]) * r_V_offset_normal[t, 0:3] stars_r_H_offset_plane[x] = stars_r[0][x] - ( (dot(stars_r[0][x], r_H_offset_normal[t]) * r_H_offset_normal[t]) / ((norm(r_H_offset_normal[t]))**2)) "Dot product to get the Vertical and Horizontal angle offset of the star in the FOV" stars_vert_offset[t][x] = arccos( dot(r_FOV[t], stars_r_V_offset_plane[x]) / (norm(r_FOV[t]) * norm(stars_r_V_offset_plane[x]))) / pi * 180 stars_hori_offset[t][x] = arccos( dot(r_FOV[t], stars_r_H_offset_plane[x]) / (norm(r_FOV[t]) * norm(stars_r_H_offset_plane[x]))) / pi * 180 "Determine sign of off-set angle where positive V-offset angle is when looking at higher altitude" if (dot(cross(r_FOV[t], stars_r_V_offset_plane[x]), r_V_offset_normal[t, 0:3]) > 0): stars_vert_offset[t][ x] = -stars_vert_offset[t][x] if (dot(cross(r_FOV[t], stars_r_H_offset_plane[x]), r_H_offset_normal[t]) > 0): stars_hori_offset[t][ x] = -stars_hori_offset[t][x] star_list_excel[2].append(str(current_time) + ';') star_list_excel[5].append( str(float(long_MATS[t] / pi * 180)) + ';') star_list_excel[6].append( str(float(lat_MATS[t] / pi * 180)) + ';') star_list_excel[10].append( str(stars_hori_offset[t][x]) + ';') star_list_excel[11].append( str(stars_vert_offset[t][x]) + ';') continue "If enough time has passed (half an orbit), the star can be removed from the exception list" elif ((current_time - spotted_star_timestamp[spotted_star_name.index( stars[x].name)]) >= ephem.second * (180 * MATS_P[t] / 360)): spotted_star_timestamp.pop( spotted_star_name.index(stars[x].name)) spotted_star_timecounter.pop( spotted_star_name.index(stars[x].name)) spotted_star_name.remove(stars[x].name) "Total angle offset of the star compared to MATS's FOV" stars_offset[t][x] = arccos( dot(r_FOV[t], stars_r[0][x]) / (norm(r_FOV[t]) * norm(stars_r[0][x]))) / pi * 180 "Project 'star vectors' ontop pointing H-offset and V-offset plane" stars_r_V_offset_plane[x] = stars_r[0][x] - ( dot(stars_r[0][x], r_V_offset_normal[t, 0:3]) * r_V_offset_normal[t, 0:3]) stars_r_H_offset_plane[x] = stars_r[0][x] - ( dot(stars_r[0][x], r_H_offset_normal[t]) * r_H_offset_normal[t]) "Dot product to get the Vertical and Horizontal angle offset of the star in the FOV" stars_vert_offset[t][x] = arccos( dot(r_FOV[t], stars_r_V_offset_plane[x]) / (norm(r_FOV[t]) * norm(stars_r_V_offset_plane[x]))) / pi * 180 stars_hori_offset[t][x] = arccos( dot(r_FOV[t], stars_r_H_offset_plane[x]) / (norm(r_FOV[t]) * norm(stars_r_H_offset_plane[x]))) / pi * 180 "Determine sign of off-set angle where positive V-offset angle is when looking at higher altitude" if (dot(cross(r_FOV[t], stars_r_V_offset_plane[x]), r_V_offset_normal[t, 0:3]) > 0): stars_vert_offset[t][x] = -stars_vert_offset[t][x] if (dot(cross(r_FOV[t], stars_r_H_offset_plane[x]), r_H_offset_normal[t]) > 0): stars_hori_offset[t][x] = -stars_hori_offset[t][x] "To be able to skip stars far outside the orbital plane of MATS" angle_between_orbital_plane_and_star[t][x] = arccos( dot(stars_r[0][x], stars_r_V_offset_plane[x]) / norm(stars_r_V_offset_plane[x])) / pi * 180 "For first loop of stars, make exception list for stars not visible during this epoch" if (t == 1 and abs(angle_between_orbital_plane_and_star[t][x]) > H_FOV / 2 + (duration * 2) / (365 * 24 * 3600) * 360): Logger.debug( 'Skip star: ' + stars[x].name + ', with H-offset of: ' + str(angle_between_orbital_plane_and_star[t][x]) + ' degrees') skip_star_list.append(stars[x].name) continue "Check if star is in FOV" if (abs(stars_vert_offset[t][x]) < V_FOV / 2 and abs(stars_hori_offset[t][x]) < H_FOV / 2): #print('Star number:',stars[x].name,'is visible at',stars_vert_offset[t][x],'degrees VFOV and', \ #stars_hori_offset[t][x],'degrees HFOV','during',ephem.Date(current_time)) if (t % log_timestep == 0): Logger.debug('Star: ' + stars[x].name + ', with H-offset: ' + str(stars_hori_offset[t][x]) + ' V-offset: ' + str(stars_vert_offset[t][x]) + ' in degrees is visible') "Add the spotted star to the exception list and timestamp it" spotted_star_name.append(stars[x].name) spotted_star_timestamp.append(current_time) spotted_star_timecounter.append(t) "Log all relevent data for the star" star_list_excel[0].append(stars[x].name + ';') star_list_excel[1].append(str(current_time) + ';') star_list_excel[3].append( str(float(long_MATS[t] / pi * 180)) + ';') star_list_excel[4].append( str(float(lat_MATS[t] / pi * 180)) + ';') star_list_excel[7].append(str(stars[x].mag) + ';') star_list_excel[8].append( str(stars_hori_offset[t][x]) + ';') star_list_excel[9].append( str(stars_vert_offset[t][x]) + ';') star_list_excel[12].append( str(star_cat[x]['e_Hpmag']) + ';') star_list_excel[13].append( str(star_cat[x]['Hpscat']) + ';') star_list_excel[14].append( str(star_cat[x]['o_Hpmag']) + ';') star_list_excel[15].append( str(star_cat[x]['SpType']) + ';') "Log data of star relevant to filtering process" star_list.append({ 'Date': str(current_time), 'V-offset': stars_vert_offset[t][x], 'H-offset': stars_hori_offset[t][x], 'long_MATS': float(long_MATS[t] / pi * 180), 'lat_MATS': float(lat_MATS[t] / pi * 180), 'Vmag': stars[x].mag, 'Name': stars[x].name }) star_counter = star_counter + 1 ######################### End of star_mapper ############################# ########################## Optional plotter ########################################### ''' from mpl_toolkits.mplot3d import axes3d "Orbital points to plot" points_2_plot_start = 0#0*24*120 points_2_plot = points_2_plot_start+200 "Plotting of orbit and FOV" fig = figure(1) ax = fig.add_subplot(111,projection='3d') ax.set_xlim3d(-7000, 7000) ax.set_ylim3d(-7000, 7000) ax.set_zlim3d(-7000, 7000) ax.scatter(x_MATS[points_2_plot_start:points_2_plot],y_MATS[points_2_plot_start:points_2_plot],z_MATS[points_2_plot_start:points_2_plot]) ax.scatter(r_FOV[points_2_plot_start:points_2_plot,0],r_FOV[points_2_plot_start:points_2_plot,1],r_FOV[points_2_plot_start:points_2_plot,2]) "Plotting of stars and FOV unit-vectors" fig = figure(2) ax = fig.add_subplot(111,projection='3d') ax.scatter(stars_r[0][:,0],stars_r[0][:,1],stars_r[0][:,2]) ax.scatter(r_FOV_unit_vector[points_2_plot_start:points_2_plot,0],r_FOV_unit_vector[points_2_plot_start:points_2_plot,1],r_FOV_unit_vector[points_2_plot_start:points_2_plot,2]) ax.scatter(r_V_offset_normal[points_2_plot_start:points_2_plot,0]/2, r_V_offset_normal[points_2_plot_start:points_2_plot,1]/2, r_V_offset_normal[points_2_plot_start:points_2_plot,2]/2) ax.scatter(normal_orbital[points_2_plot_start:points_2_plot,0]/2, normal_orbital[points_2_plot_start:points_2_plot,1]/2, normal_orbital[points_2_plot_start:points_2_plot,2]/2) ax.scatter(r_H_offset_normal[points_2_plot_start:points_2_plot,0]/2, r_H_offset_normal[points_2_plot_start:points_2_plot,1]/2, r_H_offset_normal[points_2_plot_start:points_2_plot,2]/2) ''' ########################### END of Optional plotter ######################################## "Write spotted stars to file" with open('MATS_Visible_Stars.csv', 'w', newline='') as write_file: writer = csv.writer(write_file, dialect='excel-tab') writer.writerows(star_list_excel) Logger.debug('Visible star list to be filtered:') Logger.debug(str(star_list)) Logger.debug('') Logger.debug('Exit ' + str(__name__)) Logger.debug('') return (star_list)