示例#1
0
def tab1_update_addStrID():
    global dftmp, tab1_tim_square, timestart, database_dir, tab1_selected_SRC_dspec_squarez
    if tab1_selected_SRC_dspec_square:
        time0, time1 = dftmp['time'].min() + timestart, dftmp['time'].max() + timestart
        freq0, freq1 = dftmp['freq'].min(), dftmp['freq'].max()
        date_char = jdutil.jd_to_datetime(timestart / 3600. / 24.)
        date_char = date_char.strftime('%Y-%b-%d')
        t0_char = jdutil.jd_to_datetime(time0 / 3600. / 24.)
        t0_char = t0_char.strftime('%H:%M:%S') + '.{:03d}'.format(int(round(t0_char.microsecond / 1e3)))
        t1_char = jdutil.jd_to_datetime(time1 / 3600. / 24.)
        t1_char = t1_char.strftime('%H:%M:%S') + '.{:03d}'.format(int(round(t1_char.microsecond / 1e3)))
        time0, time1 = (time0 / 86400. - 2400000.5) * 24. * 3600., (time1 / 86400. - 2400000.5) * 24. * 3600.
        StrID = pd.DataFrame(dict(time=[[time0, time1, time1, time0]], freq=[[freq0, freq0, freq1, freq1]],
            str_id=[[tab1_input_StrID.value]], date=[[date_char]], timeran=[[t0_char + '~' + t1_char]],
            freqran=[["{:.3f}~{:.3f} GHz".format(freq0, freq1)]]))
        StrIDList = pd.read_json(database_dir + event_id + 'StrID_list_tmp.json')
        StrIDList = pd.concat([StrIDList, StrID])
        StrIDList = StrIDList.sort_values(by='timeran', ascending=1)
        StrIDList.index = range(StrIDList.index.size)
        StrIDList.to_json(database_dir + event_id + 'StrID_list_tmp.json')
        StrIDList['time'] = [ll - tab1_tim_square[0] for ll in StrIDList['time']]
        tab1_render_patch.data_source.data = ColumnDataSource(StrIDList).data
        tab1_Div_Tb.text = """<p>added <b>""" + tab1_input_StrID.value + """</b>  to the list</p>"""
    else:
        tab1_Div_Tb.text = """<p><b>Warning: No time & freq range selected.
def BckGrdDataGrab(hdf):
	dat=tables.openFile(hdf)
	zen = dat.root.SplineMPEMod.col('zenith')
	azi = dat.root.SplineMPEMod.col('azimuth')
	err = dat.root.ReScaled_Paraboloid_Sigma_SplineMPEMod.col('value')
	nch = dat.root.FinalLevelNch.col('value')
	run = dat.root.I3EventHeader.col('Run')
	time_mjd = dat.root.timeMJD.col('value')
	time_jd = time_mjd + 2400000.5
	time=[]
	for temp in time_jd:
		time.append(jdutil.jd_to_datetime(temp))
	time = numpy.array(time)
	ontime_bool = numpy.ones(len(nch)) == 1.
	for indy,event_time in enumerate(time_mjd):
		flip_bool=False
		jdevent=event_time+astrodate.MJD_0
		for burst in grbs.grbs:
			jdburst_start = astrodate.JulianDate(burst.t_start)
			jdburst_end = astrodate.JulianDate(burst.t_end)
			cleared = False
			if jdevent - jdburst_start.jd < -0.0833:
				cleared=True
			if jdevent - jdburst_end.jd > 0.0833:
				cleared=True
			if not cleared:
				flip_bool = True
		if flip_bool:
			ontime_bool[indy] = False

	data_vars = Vars()
	data_vars.zenith = zen[ontime_bool]
	data_vars.azimuth = azi[ontime_bool]
	data_vars.error = err[ontime_bool]
	data_vars.nchan = nch[ontime_bool]
	data_vars.timeMJD = time_mjd[ontime_bool]
	data_vars.run = run[ontime_bool]
	data_vars.time = time[ontime_bool]
        data_vars.eproxy = 5.0*data_vars.nchan
	return data_vars
def DataGrab(hdf):
	dat=tables.openFile(hdf)
	zen = dat.root.SplineMPEMod.col('zenith')
	azi = dat.root.SplineMPEMod.col('azimuth')
	err = dat.root.ReScaled_Paraboloid_Sigma_SplineMPEMod.col('value')
	nch = dat.root.FinalLevelNch.col('value')
	run = dat.root.I3EventHeader.col('Run')
	time_mjd = dat.root.timeMJD.col('value')
	time_jd = time_mjd + 2400000.5
	time=[]
	for temp in time_jd:
		time.append(jdutil.jd_to_datetime(temp))
	time = numpy.array(time)
	data_vars = Vars()
	data_vars.zenith = zen
	data_vars.azimuth = azi
	data_vars.error = err
	data_vars.nchan = nch
	data_vars.run = run
	data_vars.timeMJD = time_mjd
	data_vars.time = time
	data_vars.eproxy = 5.0*data_vars.nchan
	return data_vars
示例#4
0
 def Add_gattrs(ff):
     lon_min = min(data['LON'])
     lon_max = max(data['LON'])
     lat_min = min(data['LAT'])
     lat_max = max(data['LAT'])
     start_time = min(data.data['Julian'])
     end_time = max(data.data['Julian'])
     dur_time = (end_time - start_time) * 24.0 * 3600.0
     start_time = jdutil.jd_to_datetime(start_time)
     start_time_s = "%s-%02d-%02dT%02d:%02d:%02dZ" % (
         start_time.year, start_time.month, start_time.day, start_time.hour,
         start_time.minute, start_time.second)
     end_time = jdutil.jd_to_datetime(end_time)
     end_time_s = "%s-%02d-%02dT%02d:%02d:%02dZ" % (
         end_time.year, end_time.month, end_time.day, end_time.hour,
         end_time.minute, end_time.second)
     version = out_file.split('_')[1]
     #start_time_s = time.strftime(time_fmt,time.gmtime(float(start_time)))
     #end_time_s = time.strftime(time_fmt,time.gmtime(float(end_time)))
     ff.ncei_template_version = "NCEI_NetCDF_Point_Template_v2.0"
     ff.featureType = "point"
     ff.title = "International Comprehensive Ocean-Atmosphere Data Set (ICOADS) %s data collected from %s to %s." % (
         version, start_time_s, end_time_s)
     ff.summary = "This file contains ICOADS %s data in netCDF4 format collected from %s to %s. The International Comprehensive Ocean-Atmosphere Data Set (ICOADS) offers surface marine data spanning the past three centuries, and simple gridded monthly summary products for 2-degree latitude x 2-degree longitude boxes back to 1800 (and 1degreex1degree boxes since 1960)--these data and products are freely distributed worldwide. As it contains observations from many different observing systems encompassing the evolution of measurement technology over hundreds of years, ICOADS is probably the most complete and heterogeneous collection of surface marine data in existence." % (
         version, start_time_s, end_time_s)
     ff.keywords = get_keywords(data)
     ff.Conventions = "CF-1.6, ACDD-1.3"
     ff.id = out_file.split('.nc')[0].replace('IMMA1', 'ICOADS')
     ff.naming_authority = "gov.noaa.ncei"
     #ff.source = "http://rda.ucar.edu/data/ds548.0/imma1_r3.0.0/%s.tar" %out_file.split('-')[0]
     ff.source = "%s.gz" % out_file.split('.nc')[0]
     ff.processing_level = "Restructured from IMMA1 format to NetCDF4 format."
     ff.acknowledgement = "Conversion of ICOADS data from IMMA1 to netCDF format by NCEI is supported by the NOAA Big Earth Data Initiative (BEDI)."
     ff.license = "These data may be redistributed and used without restriction."
     ff.standard_name_vocabulary = "CF Standard Name Table v31"
     ff.date_created = time.strftime(time_fmt, time.gmtime())
     ff.creator_name = "NCEI"
     ff.creator_email = "*****@*****.**"
     ff.creator_url = "https://www.ncei.noaa.gov/"
     ff.institution = "National Centers for Environmental Information (NCEI), NOAA"
     ff.project = "International Comprehensive Ocean-Atmosphere Data Set (ICOADS) Project"
     ff.publisher_name = "NCEI"
     ff.publisher_email = "*****@*****.**"
     ff.publisher_url = "https://www.ncei.noaa.gov/"
     ff.geospatial_bounds = "POLYGON ((%.4f %.4f,%.4f %.4f,%.4f %.4f,%.4f %.4f,%.4f %.4f))" % (
         lon_min, lat_min, lon_min, lat_max, lon_max, lat_max, lon_max,
         lat_min, lon_min, lat_min)
     ff.geospatial_bounds_crs = "EPSG:4326"
     ff.geospatial_lat_min = float("%.4f" % (lat_min))
     ff.geospatial_lat_max = float("%.4f" % (lat_max))
     ff.geospatial_lon_min = float("%.4f" % (lon_min))
     ff.geospatial_lon_max = float("%.4f" % (lon_max))
     ff.geospatial_lat_units = "degrees_north"
     ff.geospatial_lon_units = "degrees_east"
     ff.time_coverage_start = start_time_s
     ff.time_coverage_end = end_time_s
     ff.time_coverage_duration = 'P' + duration(dur_time)
     ff.time_coverage_resolution = "vary"
     ff.uuid = str(uuid.uuid4())
     ff.sea_name = "World-Wide Distribution"
     ff.creator_type = "group"
     ff.creator_institution = "NOAA National Centers for Environmental Information (NCEI)"
     ff.publisher_type = "institution"
     ff.publisher_institution = "NOAA National Centers for Environmental Information (NCEI)"
     ff.program = ""
     ff.contributor_name = "Zhankun Wang; ICOADS team"
     ff.contributor_role = "ICOADS Data Conversion to NetCDF; ICOADS IMMA1 Data Provider"
     ff.date_modified = time.strftime(time_fmt, time.gmtime())
     ff.date_issued = time.strftime(time_fmt, time.gmtime())
     ff.date_metadata_modified = time.strftime(time_fmt, time.gmtime())
     ff.product_version = "ICOADS %s netCDF4" % version
     ff.keywords_vocabulary = "Global Change Master Directory (GCMD) 2015. GCMD Keywords, Version 8.1."
     ff.cdm_data_type = 'Point'
     #ff.metadata_link = 'http://rda.ucar.edu/datasets/ds548.0/#!docs'
     ff.metadata_link = ''
     if len(set(data.data['IM'])) == 1:
         ff.IMMA_Version = str(data.data['IM'][0])
     else:
         print('%s: check IMMA version' % out_file)
     if len(set(data.data['RN1'])) == 1:
         ff.Release_Number_Primary = str(data.data['RN1'][0])
     else:
         print('%s: check Release_Number_Primary' % out_file)
     if len(set(data.data['RN2'])) == 1:
         ff.Release_Number_Secondary = str(data.data['RN2'][0])
     else:
         print('%s: check Release_Number_Secondary' % out_file)
     if len(set(data.data['RN3'])) == 1:
         ff.Release_Number_Tertiary = str(data.data['RN3'][0])
     else:
         print('%s: check Release_Number_Tertiary' % out_file)
     if len(set(data.data['RSA'])) == 1:
         ff.Release_status_indicator = str(data.data['RSA'][0])
     else:
         print('%s: check RSA' % out_file)
     #ff.comment = ""
     ff.references = 'http://rda.ucar.edu/datasets/ds548.0/docs/R3.0-citation.pdf'
     ff.history = time.strftime(time_fmt, time.gmtime(
     )) + ": Converted from IMMA1 format to netCDF4 format by Z.W. "
示例#5
0
文件: dspec.py 项目: dvesecky/suncasa
def plt_dspec_old(mspath=None,
                  specfile=None,
                  pol='I',
                  dmin=None,
                  dmax=None,
                  fig=None):
    # Set up variables
    if pol != 'RR' and pol != 'LL' and pol != 'RRLL' and pol != 'I' and pol != 'V' and pol != 'IV':
        print "Please enter 'RR', 'LL', 'RRLL', 'I', 'V', 'IV' for pol"
        return 0

    if not mspath:
        mspath = '.'
    specdata = np.load(mspath + '/' + specfile)
    spec = specdata['spec']
    tim = specdata['tim']
    freq = specdata['freq']

    # setup plot parameters
    print 'ploting dynamic spectrum...'
    # mask the channels from 512 to 519 (no observation)
    spec = np.ma.array(spec)
    spec[:, 512:519, :] = np.ma.masked
    spec_med = np.median(np.absolute(spec))
    # set the time axis
    ntim = tim.shape
    if ntim[0] < 20:
        xticks = np.arange((ntim[0] - 1) / 2 + 1) * 2
    elif ntim[0] < 100:
        xticks = np.arange((ntim[0] - 1) / 20 + 1) * 20
    elif ntim[0] < 600:
        xticks = np.arange((ntim[0] - 1) / 100 + 1) * 100
    elif ntim[0] < 2400:
        xticks = np.arange((ntim[0] - 1) / 400 + 1) * 400
    elif ntim[0] < 12000:
        # 1 min per step
        tstart = np.fix(tim[0] / 60.) + 1
        xstart = np.abs(tim - tstart * 60.).argmin()
        xticks = np.arange(ntim[0] / 1200) * 1200 + xstart
    elif ntim[0] > 12000:
        xticks = np.arange(ntim[0] / 6000 + 1) * 6000
    nticks = xticks.shape
    xticktims = []
    for i in range(nticks[0]):
        # xticktim0=qa.time(qa.quantity(tim[xticks[i]],'s'))
        tims = tim[xticks[i]]  # in seconds
        tims_jd = jdutil.mjd_to_jd(tims / 3600. / 24.)  # to julian date
        tims_dt = jdutil.jd_to_datetime(tims_jd)
        tims_dt2 = tims_dt + datetime.timedelta(
            seconds=round(tims_dt.microsecond / 1e6))
        tims_char = tims_dt2.strftime('%H:%M:%S')
        xticktims.append(tims_char)
    xticks = list(xticks)
    # do the plot
    f = plt.figure(figsize=(10, 6), dpi=100)
    if not dmin:
        dmin = spec_med / 20.
    if not dmax:
        dmax = spec_med * 5.
    if pol != 'RRLL' and pol != 'IV':
        ax = f.add_subplot(111)
        if pol == 'RR':
            spec_plt = np.absolute(spec[0, :, :])
        elif pol == 'LL':
            spec_plt = np.absolute(spec[1, :, :])
        elif pol == 'I':
            spec_plt = (np.absolute(spec[0, :, :]) +
                        np.absolute(spec[1, :, :])) / 2.
        elif pol == 'V':
            spec_plt = (np.absolute(spec[0, :, :]) -
                        np.absolute(spec[1, :, :])) / 2.
        # ax.imshow(spec_plt,aspect='auto',origin='lower',extent=[0,ntim[0]-1,np.min(freq)/1e6,np.max(freq)/1e6],\
        #          vmin=dmin,vmax=dmax,interpolation='none')
        # ax.set_xticks(xticks)
        # ax.set_xticklabels(xticktims)
        tim_ = tim - tim[0]
        freqghz = freq / 1e9
        ax.pcolormesh(tim_,
                      freqghz,
                      spec_plt,
                      cmap='jet',
                      vmin=dmin,
                      vmax=dmax)
        ax.set_xlim([tim_[0], tim_[-1]])
        ax.set_ylim([freqghz[0], freqghz[-1]])
        ax.set_xlabel('Time (pixel)')
        ax.set_ylabel('Frequency (GHz)')
        ax.set_title('VLA dynamic spectrum for pol ' + pol)
        ax.set_autoscale_on(False)
    else:
        R_plot = np.absolute(spec[0, :, :])
        L_plot = np.absolute(spec[1, :, :])
        I_plot = (Rplot + Lplot) / 2.
        V_plot = (Rplot - Lplot) / 2.
        if pol == 'RRLL':
            spec_plt_1 = R_plot
            spec_plt_2 = L_plot
        if pol == 'IV':
            spec_plt_1 = I_plot
            spec_plt_2 = V_plot

        ax1 = f.add_subplot(211)
        ax1.imshow(spec_plt_1, aspect='auto', origin='lower',
                   extent=[0, ntim[0] - 1, np.min(freq) / 1e6, np.max(freq) / 1e6], \
                   vmin=dmin, vmax=dmax, interpolation='none')
        ax1.set_xticks(xticks)
        ax1.set_xticklabels(xticktims)
        ax1.set_xlabel('Universal Time (50 ms/pixel)')
        ax1.set_ylabel('Frequency (MHz)')
        ax2.set_title('VLA dynm spec for pol ' + pol[0])
        ax1.set_autoscale_on(False)
        ax2 = f.add_subplot(212)
        ax2.imshow(spec_plt_2, aspect='auto', origin='lower',
                   extent=[0, ntim[0] - 1, np.min(freq) / 1e6, np.max(freq) / 1e6], \
                   vmin=dmin, vmax=dmax, interpolation='none')
        ax2.set_xticks(xticks)
        ax2.set_xticklabels(xticktims)
        ax2.set_xlabel('Universal Time (50 ms/pixel)')
        ax2.set_ylabel('Frequency (MHz)')
        ax2.set_title('VLA dynm spec for pol ' + pol[-1])
        ax2.set_autoscale_on(False)
    if fig:
        figfile = specfile[:(specfile.find('spec'))] + pol + '.pdf'
        f.savefig(mspath + '/' + figfile)
    # plt.close()
    return (f, ax)
示例#6
0
                           freq=[[freq[0], freq[0], freq[-1], freq[-1]]],
                           str_id=[['U01']])

    with open('structure_id_list', 'wb') as f:
        pickle.dump(StructureIdList, f)

    # pb = progressbar(idx_tim.size, "*")
    # for ii in range(8405,len(idx_selec[0])):
    # for ii in range(0,idx_tim.size):

    timestrs = []
    for ii in range(len(xx)):
        t_int = 0.05
        f_int = 2.0  # MHz
        t0 = xx[ii]  # -0.5*t_int
        timestr0 = jdutil.jd_to_datetime(t0 / 3600. / 24.)
        timestr = timestr0.strftime('%Y-%m-%dT%H%M%S') + '.{:03d}'.format(
            int(round(timestr0.microsecond / 1e3)))
        timestrs.append(
            timestr0.strftime('%H:%M:%S') +
            '.{:03d}'.format(int(round(timestr0.microsecond / 1e3))))
        f0 = yy[ii] * 1e3
        freqstr = '{:d}MHz'.format(int(round(f0)))
        fits_local.append(structure_id + '_' + timestr + '_' + freqstr +
                          '.slfcal.image.cutout.fits')
        fits_global.append(structure_id + '_' + timestr + '_' + freqstr +
                           '.slfcal.image.fits')

    Gauss_params = []
    start_timestamp = time.time()
示例#7
0
    elif tab1_pol == 'V':
        tab1_spec_plt = (tab1_spec[0, bl_index, :, :] - tab1_spec[1, bl_index, :, :]) / 2.
    tab1_dtim = tab1_tim - tab1_tim[0]
    # dmax = np.amax(tab1_spec_plt)
    # dmin = np.amin(tab1_spec_plt)
    # tab1_spec_plt = (tab1_spec_plt - dmin) / (dmax - dmin) * 255.


load_specdata(specfile)
TOOLS = "pan,wheel_zoom,box_zoom,reset,save"
'''create the dynamic spectrum plot'''
tab1_p_dspec = figure(tools=TOOLS, webgl=config_plot['plot_config']['WebGL'],
    plot_width=config_plot['plot_config']['tab1']['dspec_wdth'],
    plot_height=config_plot['plot_config']['tab1']['dspec_hght'], x_range=(tab1_dtim[0], tab1_dtim[-1]),
    y_range=(tab1_freq[0], tab1_freq[-1]), toolbar_location="above")
tim0_char = jdutil.jd_to_datetime((tab1_tim[0] / 3600. / 24. + 2400000.5) * 86400. / 3600. / 24.)
tim0_char = tim0_char.strftime('%Y-%b-%d %H:%M:%S') + '.{}'.format(round(tim0_char.microsecond / 1e3) * 1e3)[0:4]
tab1_p_dspec.axis.visible = True
tab1_p_dspec.title.text = "Dynamic spectrum"
tab1_p_dspec.xaxis.axis_label = 'Seconds since ' + tim0_char
tab1_p_dspec.yaxis.axis_label = 'Frequency [GHz]'
tab1_p_dspec.border_fill_color = "whitesmoke"
tab1_p_dspec.axis.major_tick_out = 0
tab1_p_dspec.axis.major_tick_in = 5
tab1_p_dspec.axis.minor_tick_out = 0
tab1_p_dspec.axis.minor_tick_in = 3
tab1_p_dspec.axis.major_tick_line_color = "white"
tab1_p_dspec.axis.minor_tick_line_color = "white"

tab1_SRC_dspec = ColumnDataSource(data={'data': [tab1_spec_plt], 'xx': [tab1_dtim], 'yy': [tab1_freq]})
tab1_r_dspec = tab1_p_dspec.image(image="data", x=tab1_dtim[0], y=tab1_freq[0], dw=tab1_dtim[-1] - tab1_dtim[0],
示例#8
0
#tau = data['tau']
#secz = data['secz']
#plt.clf()
#plt.ion()
#plt.figure()
#plt.plot(secz,tau,'o')
#plt.ylabel('tau')
#plt.xlabel('secz')
#plt.xlim(1.85,1.95)
#plt.ylim(0.64,0.67)


# make datetime vector from JD

dtime = [jdutil.jd_to_datetime(jd) for jd in data['jd']]

#for i in range(len(data['jd'])):
#    dt = np.append(dtime,jdutil.jd_to_datetime(data['jd'][i]))
dates = mpl.dates.date2num(dtime)
    
xpos = data['xpos']
ypos = data['ypos']
mx = np.median(xpos)
my = np.median(ypos)

plt.ion()
fig = plt.figure(1,figsize=(8.5,11))
gs = gridspec.GridSpec(5, 1,wspace=0)

ax1 = plt.subplot(gs[0, 0])    
示例#9
0
def jdate(day):
    datet = jd.jd_to_datetime(day)
    return datet.strftime('%Y-%m-%d')