コード例 #1
0
ファイル: coordUtils.py プロジェクト: doby123/davitpy
def coordConv(lon, lat, altitude, start, end, dateTime=None):
    """coordConv has been renamed coord_conv and dateTime has been 
        renamed date_time for PEP 8 compliance.  Please use those 
        from now on.  Also altitude is now optional.
    """
    from davitpy.utils.coordUtils import coord_conv
    print "coordConv has been renamed coord_conv and dateTime has"
    print "been renamed date_time for PEP 8 compliance.  Please use"
    print "those from now on.  Also altitude is now optional."
    return coord_conv(lon, lat, start, end, altitude=altitude, 
                      date_time=dateTime)
コード例 #2
0
def coordConv(lon, lat, altitude, start, end, dateTime=None):
    """coordConv has been renamed coord_conv and dateTime has been 
        renamed date_time for PEP 8 compliance.  Please use those 
        from now on.  Also altitude is now optional.
    """
    from davitpy.utils.coordUtils import coord_conv
    print "coordConv has been renamed coord_conv and dateTime has"
    print "been renamed date_time for PEP 8 compliance.  Please use"
    print "those from now on.  Also altitude is now optional."
    return coord_conv(lon,
                      lat,
                      start,
                      end,
                      altitude=altitude,
                      date_time=dateTime)
コード例 #3
0
ファイル: coordUtils.py プロジェクト: BChrisler/davitpy_bcc
def coordConv(lon, lat, altitude, start, end, dateTime=None):
    """deprecated function, please use coord_conv


    Notes
    ----
    coordConv has been renamed coord_conv and dateTime has been
    renamed date_time for PEP 8 compliance.  Please use those
    from now on.  Also altitude is now optional.

    """
    from davitpy.utils.coordUtils import coord_conv
    logging.warning("coordConv has been renamed coord_conv and dateTime has")
    logging.warning("been renamed date_time for PEP 8 compliance.  Please use")
    logging.warning("those from now on.  Also altitude is now optional.")
    return coord_conv(lon, lat, start, end, altitude=altitude,
                      date_time=dateTime)
コード例 #4
0
                                                            15.)
                        ltm = local_dt.time()

                        # convert local time to degrees. e.g. 0 (or 360) degree is midnight,
                        # 180 degrees is noon time.
                        lonc_ltm.append(
                            (ltm.hour + ltm.minute / 60. + ltm.second / 3600.)
                            * 15.)
                    lonc = lonc_ltm

                else:

                    # convert from geo to mlt degress
                    lonc, latc = coord_conv(lonc,
                                            latc,
                                            "geo",
                                            "mlt",
                                            altitude=t_c_alt,
                                            date_time=date_time)

                lonc = [(round(x, 2)) % 360 for x in lonc]
                latc = [round(x, 2) for x in latc]

                # convert to string
                latc = json.dumps(latc)
                lonc = json.dumps(lonc)

                # update into the db
                if stay_in_geo:
                    command = "UPDATE {tb} SET geo_ltc='{lonc}', geo_azmc='{azm_txt}'\
                               WHERE datetime = '{dtm}'"
コード例 #5
0
ファイル: fan.py プロジェクト: doby123/davitpy
def plotFan(sTime,rad,interval=60,fileType='fitex',param='velocity',filtered=False ,\
        scale=[],channel=None,coords='geo',colors='lasse',gsct=False,fov=True,edgeColors='face',lowGray=False,fill=True,\
        velscl=1000.,legend=True,overlayPoes=False,poesparam='ted',poesMin=-3.,poesMax=0.5, \
        poesLabel=r"Total Log Energy Flux [ergs cm$^{-2}$ s$^{-1}$]",overlayBnd=False, \
        show=True,png=False,pdf=False,dpi=500,tFreqBands=[]):

    """A function to make a fan plot
    
    **Args**:
        * **sTime** (`datetime <http://tinyurl.com/bl352yx>`_): the start time you want to plot
        * **rad** (list): a list of 3 letter radar codes, e.g. ['bks'], e.g. ['bks','wal','gbr']
        * **[interval]** (int): the the time period to be plotted, in seconds.  default = 60
        * **[fileType]** (str): the file type to plot, valid inputs are 'fitex','fitacf', 'lmfit'.  default = 'fitex'
        * **[param]** (str): the parameter to be plotted, valid inputs are 'velocity', 'power', 'width', 'elevation', 'phi0'.  default = 'velocity'
        * **[filtered]** (boolean): a flag indicating whether the data should be boxcar filtered.  default = False
        * **[scale]** (list): the min and max values of the color scale, i.e. [min,max].  If this is set to [], then default values will be used
        * **[channel] (char)**: the channel for which to plot data.  default = 'a'
        * **[coords]** (str): the coordinate system to use; valid 
            inputs are anything handled by coord_conv (see 
            davitpy.utils.get_coord_dict).  Default:  geo
        * **[colors]** (str): the color map to use, valid inputs are 'lasse', 'aj'.  default = 'lasse'
        * **[gsct]** (boolean): a flag indicating whether to plot ground scatter as gray.  default = False
        * **[fov]**  (boolean): a flag indicating whether to overplot the radar fields of view.  default = True
        * **[edgeColors]** (str): edge colors of the polygons, default = 'face'
        * **[lowGray]** (boolean): a flag indicating whether to plot low velocities in gray.  default = False
        * **[fill]** (boolean): a flag indicating whether to plot filled or point RB cells.  default = True
        * **[velscl]** (float): the velocity to use as baseline for velocity vector length, only applicable if fill = 0.  default = 1000
        * **[legend]** (boolean): a flag indicating whether to plot the legend, only applicable if fill = 0.  default = True
        * **[overlayPoes]** (boolean): a flag indicating whether to overlay poes data.  default = False
        * **[poesparam]** (str): the poes parameter to plot.  default = 'ted'.  available params can be found in :class:`gme.sat.poes.poesRec`
        * **[poesMin]** (float): the min value for the poes data color scale.  default = -3.
        * **[poesMax]**  (float): the max value for the poes data color scale.  default = 0.5
        * **[poesLabel]** (str): the label for the poes color bar.  default = r"Total Log Energy Flux [ergs cm$^{-2}$ s$^{-1}$]"
        * **[overlayBnd]** (boolean): a flag indicating whether to plot an auroral boundary determined from fitting poes data.  default = False
        * **[show]** (boolean): a flag indicating whether to display the figure on the screen.  This can cause problems over ssh.  default = True
        * **[pdf]** (boolean): a flag indicating whether to output to a pdf file.  default = False.  WARNING: saving as pdf is slow
        * **[png]** (boolean): a flag indicating whether to output to a png file.  default = False
        * **[dpi]** (int): dots per inch if saving as png.  default = 300
        * **[tFreqBands]** (list): upper and lower bounds of frequency in kHz to be used.  Must be unset (or set to []) or have a pair for each radar, and for any band set to [] the default will be used.  default = [[8000,20000]], [[8000,20000],[8000,20000]], etc.
    **Returns**:
        * Nothing

    **Example**:
        ::
        
            import datetime as dt
            pydarn.plotting.fan.plotFan(dt.datetime(2013,3,16,16,30),['fhe','fhw'],param='power',gsct=True)
            pydarn.plotting.fan.plotFan(dt.datetime(2013,3,16,16,30),['fhe','fhw'],param='power',gsct=True,tFreqBands=[[10000,11000],[]])

    Written by AJ 20121004
    Modified by Matt W. 20130717
    """
    from davitpy import pydarn
    from davitpy import gme
    import datetime as dt, pickle
    from matplotlib.backends.backend_pdf import PdfPages
    
    import davitpy.models.aacgm as aacgm
    import os, copy
    from davitpy.utils.coordUtils import coord_conv

    tt = dt.datetime.now()
    
    #check the inputs
    assert(isinstance(sTime,dt.datetime)),'error, sTime must be a datetime object'
    assert(isinstance(rad,list)),"error, rad must be a list, eg ['bks'] or ['bks','fhe']"
    for r in rad:
        assert(isinstance(r,str) and len(r) == 3),'error, elements of rad list must be 3 letter strings'
    assert(param == 'velocity' or param == 'power' or param == 'width' or \
        param == 'elevation' or param == 'phi0'), \
        "error, allowable params are 'velocity','power','width','elevation','phi0'"
    assert(scale == [] or len(scale)==2), \
    'error, if present, scales must have 2 elements'
    assert(colors == 'lasse' or colors == 'aj'),"error, valid inputs for color are 'lasse' and 'aj'"
    
    #check freq band and set to default if needed
    assert(tFreqBands == [] or len(tFreqBands) == len(rad)),'error, if present, tFreqBands must have same number of elements as rad'
    tbands = []
    for i in range(len(rad)):
        if tFreqBands == [] or tFreqBands[i] == []: tbands.append([8000,20000])
        else: tbands.append(tFreqBands[i])

    for i in range(len(tbands)):
        assert(tbands[i][1] > tbands[i][0]),'error, frequency upper bound must be > lower bound'

    if(scale == []):
        if(param == 'velocity'): scale=[-200,200]
        elif(param == 'power'): scale=[0,30]
        elif(param == 'width'): scale=[0,150]
        elif(param == 'elevation'): scale=[0,50]
        elif(param == 'phi0'): scale=[-numpy.pi,numpy.pi]

        
    fbase = sTime.strftime("%Y%m%d")
        
    cmap,norm,bounds = utils.plotUtils.genCmap(param,scale,colors=colors,lowGray=lowGray)
    
    #open the data files
    myFiles = []
    myBands = []
    for i in range(len(rad)):
        f = radDataOpen(sTime,rad[i],sTime+dt.timedelta(seconds=interval),fileType=fileType,filtered=filtered,channel=channel)
        if(f is not None): 
            myFiles.append(f)
            myBands.append(tbands[i])


    assert(myFiles != []),'error, no data available for this period'

    xmin,ymin,xmax,ymax = 1e16,1e16,-1e16,-1e16

    allBeams = [''] * len(myFiles)
    sites,fovs,oldCpids,lonFull,latFull=[],[],[],[],[]
    lonC,latC = [],[]

    #go through all open files
    for i in range(len(myFiles)):
        #read until we reach start time
        allBeams[i] = radDataReadRec(myFiles[i])
        while (allBeams[i].time < sTime and allBeams[i] is not None):
            allBeams[i] = radDataReadRec(myFiles[i])

        #check that the file has data in the target interval
        if(allBeams[i] is None): 
            myFiles[i].close()
            myFiles[i] = None
            continue

    
        #get to field of view coords in order to determine map limits
        t=allBeams[i].time
        site = pydarn.radar.site(radId=allBeams[i].stid,dt=t)
        sites.append(site)
        # Make lists of site lats and lons.  latC and lonC are used
        # for finding the map centre.
        xlon, xlat = coord_conv(site.geolon, site.geolat, "geo", coords, 
                                altitude=0., date_time=t)
        latFull.append(xlat)
        lonFull.append(xlon)
        latC.append(xlat)
        lonC.append(xlon)
        myFov = pydarn.radar.radFov.fov(site=site, rsep=allBeams[i].prm.rsep,\
                                        ngates=allBeams[i].prm.nrang+1,
                                        nbeams=site.maxbeam, coords=coords,
                                        date_time=t)
        fovs.append(myFov)
        for b in range(0,site.maxbeam+1):
            for k in range(0,allBeams[i].prm.nrang+1):
                lonFull.append(myFov.lonFull[b][k])
                latFull.append(myFov.latFull[b][k])
        oldCpids.append(allBeams[i].cp)
        
        k=allBeams[i].prm.nrang
        b=0
        latC.append(myFov.latFull[b][k])
        lonC.append(myFov.lonFull[b][k])
        b=site.maxbeam
        latC.append(myFov.latFull[b][k])
        lonC.append(myFov.lonFull[b][k])

    #Now that we have 3 points from the FOVs of the radars, calculate the lat,lon pair
    #to center the map on. We can simply do this by converting from Spherical coords
    #to Cartesian, taking the mean of each coordinate and then converting back
    #to get lat_0 and lon_0
    lonC,latC = (numpy.array(lonC)+360.)%360.0,numpy.array(latC)
    xs=numpy.cos(numpy.deg2rad(latC))*numpy.cos(numpy.deg2rad(lonC))
    ys=numpy.cos(numpy.deg2rad(latC))*numpy.sin(numpy.deg2rad(lonC))
    zs=numpy.sin(numpy.deg2rad(latC))
    xc=numpy.mean(xs)
    yc=numpy.mean(ys)
    zc=numpy.mean(zs)
    lon_0=numpy.rad2deg(numpy.arctan2(yc,xc))
    lat_0=numpy.rad2deg(numpy.arctan2(zc,numpy.sqrt(xc*xc+yc*yc)))

    #Now do some stuff in map projection coords to get necessary width and height of map
    #and also figure out the corners of the map
    t1=dt.datetime.now()
    lonFull,latFull = (numpy.array(lonFull)+360.)%360.0,numpy.array(latFull)

    tmpmap = utils.mapObj(coords=coords,projection='stere', width=10.0**3, 
                          height=10.0**3, lat_0=lat_0, lon_0=lon_0,
                          datetime = sTime)
    x,y = tmpmap(lonFull,latFull)
    minx = x.min()*1.05     #since we don't want the map to cut off labels or
    miny = y.min()*1.05     #FOVs of the radars we should alter the extrema a bit.
    maxx = x.max()*1.05
    maxy = y.max()*1.05
    width = (maxx-minx)
    height = (maxy-miny)
    llcrnrlon,llcrnrlat = tmpmap(minx,miny,inverse=True)
    urcrnrlon,urcrnrlat = tmpmap(maxx,maxy,inverse=True)

    dist = width/50.
    cTime = sTime

    #Clear temporary figure from memory.
    fig = plot.gcf()
    fig.clf()

    myFig = plot.figure(figsize=(12,8))
    
    #draw the actual map we want
    myMap = utils.mapObj(coords=coords, projection='stere', lat_0=lat_0, 
                         lon_0=lon_0, llcrnrlon=llcrnrlon, llcrnrlat=llcrnrlat,
                         urcrnrlon=urcrnrlon, urcrnrlat=urcrnrlat,
                         coastLineWidth=0.5, coastLineColor='k',
                         fillOceans='w',fillContinents='w', fillLakes='w',
                         datetime = sTime)
    #overlay fields of view, if desired
    if(fov == 1):
        for i,r in enumerate(rad):
            pydarn.plotting.overlayRadar(myMap, codes=r, dateTime=sTime)
            #this was missing fovObj! We need to plot the fov for this particular sTime.
            pydarn.plotting.overlayFov(myMap, codes=r, dateTime=sTime, fovObj=fovs[i]) 
    
    print dt.datetime.now()-t1
    #manually draw the legend
    if((not fill) and legend):
        #draw the box
        y = [myMap.urcrnry*.82,myMap.urcrnry*.99]
        x = [myMap.urcrnrx*.86,myMap.urcrnrx*.99]
        verts = [x[0],y[0]],[x[0],y[1]],[x[1],y[1]],[x[1],y[0]]
        poly = patches.Polygon(verts,fc='w',ec='k',zorder=11)
        myFig.gca().add_patch(poly)
        labs = ['5 dB','15 dB','25 dB','35 dB','gs','1000 m/s']
        pts = [5,15,25,35]
        #plot the icons and labels
        for w in range(6):
            myFig.gca().text(x[0]+.35*(x[1]-x[0]),y[1]*(.98-w*.025),labs[w],zorder=15,color='k',size=8,va='center')
            xctr = x[0]+.175*(x[1]-x[0])
            if(w < 4):
                myFig.scatter(xctr,y[1]*(.98-w*.025),s=.1*pts[w],zorder=15,marker='o',linewidths=.5,\
                edgecolor='face',facecolor='k')
            elif(w == 4):
                myFig.scatter(xctr,y[1]*(.98-w*.025),s=.1*35.,zorder=15,marker='o',\
                linewidths=.5,edgecolor='k',facecolor='w')
            elif(w == 5):
                y=LineCollection(numpy.array([((xctr-dist/2.,y[1]*(.98-w*.025)),(xctr+dist/2.,y[1]*(.98-w*.025)))]),linewidths=.5,zorder=15,color='k')
                myFig.gca().add_collection(y)
                
    bbox = myFig.gca().get_axes().get_position()
    #now, loop through desired time interval

    tz = dt.datetime.now()
    cols = []
    bndTime = sTime + dt.timedelta(seconds=interval)
    
    ft = 'None'
    #go though all files
    pcoll = None
    for i in range(len(myFiles)):
        scans = []
        #check that we have good data at this time
        if(myFiles[i] is None or allBeams[i] is None): continue
        ft = allBeams[i].fType
        #until we reach the end of the time window
        while(allBeams[i] is not None and allBeams[i].time < bndTime):
            #filter on frequency
            if allBeams[i].prm.tfreq >= myBands[i][0] and allBeams[i].prm.tfreq <= myBands[i][1]: 
                scans.append(allBeams[i])
            #read the next record
            allBeams[i] = radDataReadRec(myFiles[i])
        #if there is no data in scans, overlayFan will object
        if scans == []: continue
        intensities, pcoll = overlayFan(scans,myMap,myFig,param,coords,gsct=gsct,site=sites[i],fov=fovs[i], fill=fill,velscl=velscl,dist=dist,cmap=cmap,norm=norm)

                                                                            
    #if no data has been found pcoll will not have been set, and the following code will object                                   
    if pcoll: 
        cbar = myFig.colorbar(pcoll,orientation='vertical',shrink=.65,fraction=.1,drawedges=True)
        
        l = []
        #define the colorbar labels
        for i in range(0,len(bounds)):
            if(param == 'phi0'):
                ln = 4
                if(bounds[i] == 0): ln = 3
                elif(bounds[i] < 0): ln = 5
                l.append(str(bounds[i])[:ln])
                continue
            if((i == 0 and param == 'velocity') or i == len(bounds)-1):
                l.append(' ')
                continue
            l.append(str(int(bounds[i])))
        cbar.ax.set_yticklabels(l)
        cbar.ax.tick_params(axis='y',direction='out')
        #set colorbar ticklabel size
        for ti in cbar.ax.get_yticklabels():
            ti.set_fontsize(12)
        if(param == 'velocity'): 
            cbar.set_label('Velocity [m/s]',size=14)
            cbar.extend='max'
            
        if(param == 'grid'): cbar.set_label('Velocity [m/s]',size=14)
        if(param == 'power'): cbar.set_label('Power [dB]',size=14)
        if(param == 'width'): cbar.set_label('Spec Wid [m/s]',size=14)
        if(param == 'elevation'): cbar.set_label('Elev [deg]',size=14)
        if(param == 'phi0'): cbar.set_label('Phi0 [rad]',size=14)
    
    #myFig.gca().set_rasterized(True)
    #label the plot
    tx1 = myFig.text((bbox.x0+bbox.x1)/2.,bbox.y1+.02,cTime.strftime('%Y/%m/%d'),ha='center',size=14,weight=550)
    tx2 = myFig.text(bbox.x1+.02,bbox.y1+.02,cTime.strftime('%H:%M - ')+\
                bndTime.strftime('%H:%M      '),ha='right',size=13,weight=550)
    tx3 = myFig.text(bbox.x0,bbox.y1+.02,'['+ft+']',ha='left',size=13,weight=550)
    #label with frequency bands
    tx4 = myFig.text(bbox.x1+.02,bbox.y1,'Frequency filters:',ha='right',size=8,weight=550)
    for i in range(len(rad)):
        myFig.text(bbox.x1+.02,bbox.y1-((i+1)*.015),rad[i]+': '+\
                str(tbands[i][0]/1e3)+' - '+str(tbands[i][1]/1e3)+\
                ' MHz',ha='right',size=8,weight=550)
    
    if(overlayPoes):
        pcols = gme.sat.poes.overlayPoesTed(myMap, myFig.gca(), cTime, param=poesparam, scMin=poesMin, scMax=poesMax)
        if(pcols is not None):
            cols.append(pcols)
            pTicks = numpy.linspace(poesMin,poesMax,8)
            cbar = myFig.colorbar(pcols,ticks=pTicks,orientation='vertical',shrink=0.65,fraction=.1)
            cbar.ax.set_yticklabels(pTicks)
            cbar.set_label(poesLabel,size=14)
            cbar.ax.tick_params(axis='y',direction='out')
            #set colorbar ticklabel size
            for ti in cbar.ax.get_yticklabels():
                ti.set_fontsize(12)
            
    if(overlayBnd):
        gme.sat.poes.overlayPoesBnd(myMap, myFig.gca(), cTime)

    #handle the outputs
    if png == True:
        # if not show:
        #   canvas = FigureCanvasAgg(myFig)
        myFig.savefig(sTime.strftime("%Y%m%d.%H%M.")+str(interval)+'.fan.png',dpi=dpi)
    if pdf:
        # if not show:
        #   canvas = FigureCanvasAgg(myFig)
        myFig.savefig(sTime.strftime("%Y%m%d.%H%M.")+str(interval)+'.fan.pdf')
    if show:
        myFig.show()
コード例 #6
0
ファイル: radFov.py プロジェクト: BChrisler/davitpy_bcc
    def __init__(self,
                 frang=180.0,
                 rsep=45.0,
                 site=None,
                 nbeams=None,
                 ngates=None,
                 bmsep=None,
                 recrise=None,
                 siteLat=None,
                 siteLon=None,
                 siteBore=None,
                 siteAlt=None,
                 siteYear=None,
                 elevation=None,
                 altitude=300.,
                 hop=None,
                 model='IS',
                 coords='geo',
                 date_time=None,
                 coord_alt=0.,
                 fov_dir='front'):
        # Import neccessary functions and classes
        from davitpy.utils.coordUtils import coord_conv

        # Define class constants
        rn = 'fov'

        # Test that we have enough input arguments to work with
        if (not site and None in [
                nbeams, ngates, bmsep, recrise, siteLat, siteLon, siteBore,
                siteAlt, siteYear
        ]):
            estr = '{:s}: must provide either a site object or '.format(rn)
            estr = '{:s}[nbeams, ngates, bmsep, recrise, siteLat,'.format(estr)
            estr = '{:s} siteLon, siteBore, siteAlt, siteYear].'.format(estr)
            logging.error(estr)
            return

        # date_time checking is handled by coord_conv, and it already
        # knows all of the possible coord systems, so no need to do it
        # here.

        # Then assign variables from the site object if necessary
        if site:
            if not nbeams:
                nbeams = site.maxbeam
            if not ngates:
                ngates = site.maxgate
            if not bmsep:
                bmsep = site.bmsep
            if not recrise:
                recrise = site.recrise
            if not siteLat:
                siteLat = site.geolat
            if not siteLon:
                siteLon = site.geolon
            if not siteAlt:
                siteAlt = site.alt
            if not siteBore:
                siteBore = site.boresite
            if not siteYear:
                siteYear = site.tval.year

        # Some type checking is neccessary. If frang, rsep or recrise are
        # arrays, then they should be of shape (nbeams,).  Set a flag if any of
        # frang, rsep or recrise is an array
        is_param_array = False
        if isinstance(frang, np.ndarray):
            is_param_array = True
            # Array is adjusted to add on extra beam edge by copying the last
            # element
            if len(frang) != nbeams:
                estr = "{:s}: frang must be a scalar or numpy ".format(rn)
                estr = "{:s}ndarray of size (nbeams). Using first".format(estr)
                estr = "{:s} element: {}".format(estr, frang[0])
                logging.error(estr)
                frang = frang[0] * np.ones(nbeams + 1)
            else:
                frang = np.append(frang, frang[-1])
        else:
            frang = np.array([frang])
        if isinstance(rsep, np.ndarray):
            is_param_array = True
            # Array is adjusted to add on extra beam edge by copying the last
            # element
            if len(rsep) != nbeams:
                estr = "{:s}: rsep must be a scalar or numpy ndarray".format(
                    rn)
                estr = "{:s} of size (nbeams). Using first element".format(
                    estr)
                estr = "{:s}: {}".format(estr, rsep[0])
                logging.error(estr)
                rsep = rsep[0] * np.ones(nbeams + 1)
            else:
                rsep = np.append(rsep, rsep[-1])
        else:
            rsep = np.array([rsep])
        if isinstance(recrise, np.ndarray):
            is_param_array = True
            # Array is adjusted to add on extra beam edge by copying the last
            # element
            if len(recrise) != nbeams:
                estr = "{:s}: recrise must be a scalar or numpy ".format(rn)
                estr = "{:s}ndarray of size (nbeams). Using first ".format(
                    estr)
                estr = "{:s}element: {}".format(estr, recrise[0])
                logging.error(estr)
                recrise = recrise[0] * np.ones(nbeams + 1)
            else:
                recrise = np.append(recrise, recrise[-1])
        else:
            recrise = np.array([recrise])

        # If altitude, elevation, or hop are arrays, then they should be of
        # shape (nbeams, ngates)
        if isinstance(altitude, np.ndarray):
            if altitude.ndim == 1:
                # Array is adjusted to add on extra beam/gate edge by copying
                # the last element and replicating the whole array as many
                # times as beams
                if altitude.size != ngates:
                    estr = '{:s}: altitude must be of a scalar or '.format(rn)
                    estr = '{:s}numpy ndarray of size (ngates) or '.format(
                        estr)
                    estr = '{:s}(nbeans,ngates). Using first '.format(estr)
                    estr = '{:s}element: {}'.format(estr, altitude[0])
                    logging.error(estr)
                    altitude = altitude[0] * np.ones((nbeams + 1, ngates + 1))
                else:
                    altitude = np.resize(np.append(altitude, altitude[-1]),
                                         (nbeams + 1, ngates + 1))
            elif altitude.ndim == 2:
                # Array is adjusted to add on extra beam/gate edge by copying
                # the last row and column
                if altitude.shape != (nbeams, ngates):
                    estr = '{:s}: altitude must be of a scalar or '.format(rn)
                    estr = '{:s}numpy ndarray of size (ngates) or '.format(
                        estr)
                    estr = '{:s}(nbeans,ngates). Using first '.format(estr)
                    estr = '{:s}element: {}'.format(altitude[0])
                    logging.error(estr)
                    altitude = altitude[0] * np.ones((nbeams + 1, ngates + 1))
                else:
                    altitude = np.append(altitude,
                                         altitude[-1, :].reshape(1, ngates),
                                         axis=0)
                    altitude = np.append(altitude,
                                         altitude[:, -1].reshape(nbeams, 1),
                                         axis=1)
            else:
                estr = '{:s}: altitude must be of a scalar or '.format(rn)
                estr = '{:s}numpy ndarray of size (ngates) or '.format(estr)
                estr = '{:s}(nbeans,ngates). Using first element: '.format(
                    estr)
                estr = '{:s}{}'.format(estr, altitude[0])
                logging.error(estr)
                altitude = altitude[0] * np.ones((nbeams + 1, ngates + 1))
        if isinstance(elevation, np.ndarray):
            if elevation.ndim == 1:
                # Array is adjusted to add on extra beam/gate edge by copying
                # the last element and replicating the whole array as many
                # times as beams
                if elevation.size != ngates:
                    estr = '{:s}: elevation must be of a scalar or '.format(rn)
                    estr = '{:s}numpy ndarray of size (ngates) or '.format(
                        estr)
                    estr = '{:s}(nbeans,ngates). Using first '.format(estr)
                    estr = '{:s}element: {}'.format(estr, elevation[0])
                    logging.error(estr)
                    elevation = elevation[0] * \
                        np.ones((nbeams + 1, ngates + 1))
                else:
                    elevation = np.resize(np.append(elevation, elevation[-1]),
                                          (nbeams + 1, ngates + 1))
            elif elevation.ndim == 2:
                # Array is adjusted to add on extra beam/gate edge by copying
                # the last row and column
                if elevation.shape != (nbeams, ngates):
                    estr = '{:s}: elevation must be of a scalar or '.format(rn)
                    estr = '{:s}numpy ndarray of size (ngates) or '.format(
                        estr)
                    estr = '{:s}(nbeans,ngates). Using first '.format(estr)
                    estr = '{:s}element: {}'.format(estr, elevation[0])
                    logging.error(estr)
                    elevation = elevation[0] * \
                        np.ones((nbeams + 1, ngates + 1))
                else:
                    elevation = np.append(elevation,
                                          elevation[-1, :].reshape(1, ngates),
                                          axis=0)
                    elevation = np.append(elevation,
                                          elevation[:, -1].reshape(nbeams, 1),
                                          axis=1)
            else:
                estr = '{:s}: elevation must be a scalar or '.format(rn)
                estr = '{:s}numpy ndarray of size (ngates) or '.format(estr)
                estr = '{:s}(nbeans,ngates). Using first element'.format(estr)
                estr = '{:s}: {}'.format(estr, elevation[0])
                logging.error(estr)
                elevation = elevation[0] * np.ones((nbeams + 1, ngates + 1))

        if isinstance(hop, np.ndarray):
            if hop.ndim == 1:
                # Array is adjusted to add on extra beam/gate edge by copying
                # the last element and replicating the whole array as many
                # times as beams
                if hop.size != ngates:
                    estr = '{:s}: hop must be of a scalar or numpy '.format(rn)
                    estr = '{:s}ndarray of size (ngates) or '.format(estr)
                    estr = '{:s}(nbeans,ngates). Using first '.format(estr)
                    estr = '{:s}element: {}'.format(estr, hop[0])
                    logging.error(estr)
                    hop = hop[0] * np.ones((nbeams + 1, ngates + 1))
                else:
                    hop = np.resize(np.append(hop, hop[-1]),
                                    (nbeams + 1, ngates + 1))
            elif hop.ndim == 2:
                # Array is adjusted to add on extra beam/gate edge by copying
                # the last row and column
                if hop.shape != (nbeams, ngates):
                    estr = '{:s}: hop must be of a scalar or numpy '.format(rn)
                    estr = '{:s}ndarray of size (ngates) or '.format(estr)
                    estr = '{:s}(nbeans,ngates). Using first '.format(estr)
                    estr = '{:s}element: {}'.format(hop[0])
                    logging.error(estr)
                    hop = hop[0] * np.ones((nbeams + 1, ngates + 1))
                else:
                    hop = np.append(hop, hop[-1, :].reshape(1, ngates), axis=0)
                    hop = np.append(hop, hop[:, -1].reshape(nbeams, 1), axis=1)
            else:
                estr = '{:s}: hop must be a scalar or numpy ndarray'.format(rn)
                estr = '{:s} of size (ngates) or (nbeams,ngates).'.format(estr)
                estr = '{:s} Using first element: {}'.format(estr, hop[0])
                logging.error(estr)
                hop = hop[0] * np.ones((nbeams + 1, ngates + 1))

        # Do for coord_alt what we just did for altitude.
        if isinstance(coord_alt, np.ndarray):
            if coord_alt.ndim == 1:
                # Array is adjusted to add on extra beam/gate edge by copying
                # the last element and replicating the whole array as many
                # times as beams
                if coord_alt.size != ngates:
                    estr = '{:s}: coord_alt must be a scalar or '.format(rn)
                    estr = '{:s}numpy ndarray of size (ngates) or '.format(
                        estr)
                    estr = '{:s}(nbeans,ngates). Using first '.format(estr)
                    estr = '{:s}element: {}'.format(estr, coord_alt[0])
                    logging.error(estr)
                    coord_alt = coord_alt[0] * \
                        np.ones((nbeams + 1, ngates + 1))
                else:
                    coord_alt = np.resize(np.append(coord_alt, coord_alt[-1]),
                                          (nbeams + 1, ngates + 1))
            elif coord_alt.ndim == 2:
                # Array is adjusted to add on extra beam/gate edge by copying
                # the last row and column
                if coord_alt.shape != (nbeams, ngates):
                    estr = '{:s}: coord_alt must be a scalar or '.format(estr)
                    estr = '{:s}numpy ndarray of size (ngates) or '.format(
                        estr)
                    estr = '{:s}(nbeans,ngates). Using first '.format(estr)
                    estr = '{:s}element: {}'.format(estr, coord_alt[0])
                    logging.error(estr)
                    coord_alt = coord_alt[0] * \
                        np.ones((nbeams + 1, ngates + 1))
                else:
                    coord_alt = np.append(coord_alt,
                                          coord_alt[-1, :].reshape(1, ngates),
                                          axis=0)
                    coord_alt = np.append(coord_alt,
                                          coord_alt[:, -1].reshape(nbeams, 1),
                                          axis=1)
            else:
                estr = '{:s}: coord_alt must be a scalar or '.format(rn)
                estr = '{:s}numpy ndarray of size (ngates) or '.format(estr)
                estr = '{:s}(nbeans,ngates). Using first element'.format(estr)
                estr = '{:s}: {}'.format(estr, coord_alt[0])
                logging.error(estr)
                coord_alt = coord_alt[0] * np.ones((nbeams + 1, ngates + 1))

        # Generate beam/gate arrays
        beams = np.arange(nbeams + 1)
        gates = np.arange(ngates + 1)

        # Create output arrays
        slant_range_full = np.zeros((nbeams + 1, ngates + 1), dtype='float')
        lat_full = np.zeros((nbeams + 1, ngates + 1), dtype='float')
        lon_full = np.zeros((nbeams + 1, ngates + 1), dtype='float')
        slant_range_center = np.zeros((nbeams + 1, ngates + 1), dtype='float')
        lat_center = np.zeros((nbeams + 1, ngates + 1), dtype='float')
        lon_center = np.zeros((nbeams + 1, ngates + 1), dtype='float')

        # Calculate deviation from boresight for center of beam
        boff_center = bmsep * (beams - (nbeams - 1) / 2.0)
        # Calculate deviation from boresight for edge of beam
        boff_edge = bmsep * (beams - (nbeams - 1) / 2.0 - 0.5)

        # Iterates through beams
        for ib in beams:
            # if none of frang, rsep or recrise are arrays, then only execute
            # this for the first loop, otherwise, repeat for every beam
            if (~is_param_array and ib == 0) or is_param_array:
                # Calculate center slant range
                srang_center = slantRange(frang[ib],
                                          rsep[ib],
                                          recrise[ib],
                                          gates,
                                          center=True)
                # Calculate edges slant range
                srang_edge = slantRange(frang[ib],
                                        rsep[ib],
                                        recrise[ib],
                                        gates,
                                        center=False)
            # Save into output arrays
            slant_range_center[ib, :-1] = srang_center[:-1]
            slant_range_full[ib, :] = srang_edge

            # Calculate coordinates for Edge and Center of the current beam
            for ig in gates:
                # Handle array-or-not question.
                talt = altitude[ib, ig] if isinstance(altitude, np.ndarray) \
                    else altitude
                telv = elevation[ib, ig] if isinstance(elevation, np.ndarray) \
                    else elevation
                t_c_alt = coord_alt[ib, ig] \
                    if isinstance(coord_alt, np.ndarray) else coord_alt
                thop = hop[ib, ig] if isinstance(hop, np.ndarray) else hop

                if model == 'GS':
                    if (~is_param_array and ib == 0) or is_param_array:
                        slant_range_center[ib, ig] = \
                            gsMapSlantRange(srang_center[ig], altitude=None,
                                            elevation=None)
                        slant_range_full[ib, ig] = \
                            gsMapSlantRange(srang_edge[ig], altitude=None,
                                            elevation=None)
                        srang_center[ig] = slant_range_center[ib, ig]
                        srang_edge[ig] = slant_range_full[ib, ig]

                if (srang_center[ig] != -1) and (srang_edge[ig] != -1):
                    # Then calculate projections
                    latc, lonc = calcFieldPnt(siteLat,
                                              siteLon,
                                              siteAlt * 1e-3,
                                              siteBore,
                                              boff_center[ib],
                                              srang_center[ig],
                                              elevation=telv,
                                              altitude=talt,
                                              hop=thop,
                                              model=model,
                                              fov_dir=fov_dir)
                    late, lone = calcFieldPnt(siteLat,
                                              siteLon,
                                              siteAlt * 1e-3,
                                              siteBore,
                                              boff_edge[ib],
                                              srang_edge[ig],
                                              elevation=telv,
                                              altitude=talt,
                                              hop=thop,
                                              model=model,
                                              fov_dir=fov_dir)
                    if (coords != 'geo'):
                        lonc, latc = coord_conv(lonc,
                                                latc,
                                                "geo",
                                                coords,
                                                altitude=t_c_alt,
                                                date_time=date_time)
                        lone, late = coord_conv(lone,
                                                late,
                                                "geo",
                                                coords,
                                                altitude=t_c_alt,
                                                date_time=date_time)
                else:
                    latc, lonc = np.nan, np.nan
                    late, lone = np.nan, np.nan

                # Save into output arrays
                lat_center[ib, ig] = latc
                lon_center[ib, ig] = lonc
                lat_full[ib, ig] = late
                lon_full[ib, ig] = lone

        # Output is...
        self.latCenter = lat_center[:-1, :-1]
        self.lonCenter = lon_center[:-1, :-1]
        self.slantRCenter = slant_range_center[:-1, :-1]
        self.latFull = lat_full
        self.lonFull = lon_full
        self.slantRFull = slant_range_full
        self.beams = beams[:-1]
        self.gates = gates[:-1]
        self.coords = coords
        self.fov_dir = fov_dir
        self.model = model
コード例 #7
0
ファイル: radFov.py プロジェクト: vtsuperdarn/davitpy
    def __init__(self, frang=180.0, rsep=45.0, site=None, nbeams=None,
                 ngates=None, bmsep=None, recrise=None, siteLat=None,
                 siteLon=None, siteBore=None, siteAlt=None, siteYear=None,
                 elevation=None, altitude=300., hop=None, model='IS',
                 coords='geo', date_time=None, coord_alt=0., fov_dir='front'):
        # Import neccessary functions and classes
        from davitpy.utils.coordUtils import coord_conv

        # Define class constants
        rn = 'fov'

        # Test that we have enough input arguments to work with
        if(not site and None in [nbeams, ngates, bmsep, recrise, siteLat,
                                 siteLon, siteBore, siteAlt, siteYear]):
            estr = '{:s}: must provide either a site object or '.format(rn)
            estr = '{:s}[nbeams, ngates, bmsep, recrise, siteLat,'.format(estr)
            estr = '{:s} siteLon, siteBore, siteAlt, siteYear].'.format(estr)
            logging.error(estr)
            return

        # date_time checking is handled by coord_conv, and it already
        # knows all of the possible coord systems, so no need to do it
        # here.

        # Then assign variables from the site object if necessary
        if site:
            if not nbeams:
                nbeams = site.maxbeam
            if not ngates:
                ngates = site.maxgate
            if not bmsep:
                bmsep = site.bmsep
            if not recrise:
                recrise = site.recrise
            if not siteLat:
                siteLat = site.geolat
            if not siteLon:
                siteLon = site.geolon
            if not siteAlt:
                siteAlt = site.alt
            if not siteBore:
                siteBore = site.boresite
            if not siteYear:
                siteYear = site.tval.year

        # Some type checking is neccessary. If frang, rsep or recrise are
        # arrays, then they should be of shape (nbeams,).  Set a flag if any of
        # frang, rsep or recrise is an array
        is_param_array = False
        if isinstance(frang, np.ndarray):
            is_param_array = True
            # Array is adjusted to add on extra beam edge by copying the last
            # element
            if len(frang) != nbeams:
                estr = "{:s}: frang must be a scalar or numpy ".format(rn)
                estr = "{:s}ndarray of size (nbeams). Using first".format(estr)
                estr = "{:s} element: {}".format(estr, frang[0])
                logging.error(estr)
                frang = frang[0] * np.ones(nbeams + 1)
            else:
                frang = np.append(frang, frang[-1])
        else:
            frang = np.array([frang])
        if isinstance(rsep, np.ndarray):
            is_param_array = True
            # Array is adjusted to add on extra beam edge by copying the last
            # element
            if len(rsep) != nbeams:
                estr = "{:s}: rsep must be a scalar or numpy ndarray".format(
                    rn)
                estr = "{:s} of size (nbeams). Using first element".format(
                    estr)
                estr = "{:s}: {}".format(estr, rsep[0])
                logging.error(estr)
                rsep = rsep[0] * np.ones(nbeams + 1)
            else:
                rsep = np.append(rsep, rsep[-1])
        else:
            rsep = np.array([rsep])
        if isinstance(recrise, np.ndarray):
            is_param_array = True
            # Array is adjusted to add on extra beam edge by copying the last
            # element
            if len(recrise) != nbeams:
                estr = "{:s}: recrise must be a scalar or numpy ".format(rn)
                estr = "{:s}ndarray of size (nbeams). Using first ".format(
                    estr)
                estr = "{:s}element: {}".format(estr, recrise[0])
                logging.error(estr)
                recrise = recrise[0] * np.ones(nbeams + 1)
            else:
                recrise = np.append(recrise, recrise[-1])
        else:
            recrise = np.array([recrise])

        # If altitude, elevation, or hop are arrays, then they should be of
        # shape (nbeams, ngates)
        if isinstance(altitude, np.ndarray):
            if altitude.ndim == 1:
                # Array is adjusted to add on extra beam/gate edge by copying
                # the last element and replicating the whole array as many
                # times as beams
                if altitude.size != ngates:
                    estr = '{:s}: altitude must be of a scalar or '.format(rn)
                    estr = '{:s}numpy ndarray of size (ngates) or '.format(
                        estr)
                    estr = '{:s}(nbeans,ngates). Using first '.format(estr)
                    estr = '{:s}element: {}'.format(estr, altitude[0])
                    logging.error(estr)
                    altitude = altitude[0] * np.ones((nbeams + 1, ngates + 1))
                else:
                    altitude = np.resize(np.append(altitude, altitude[-1]),
                                         (nbeams + 1, ngates + 1))
            elif altitude.ndim == 2:
                # Array is adjusted to add on extra beam/gate edge by copying
                # the last row and column
                if altitude.shape != (nbeams, ngates):
                    estr = '{:s}: altitude must be of a scalar or '.format(rn)
                    estr = '{:s}numpy ndarray of size (ngates) or '.format(
                        estr)
                    estr = '{:s}(nbeans,ngates). Using first '.format(estr)
                    estr = '{:s}element: {}'.format(altitude[0])
                    logging.error(estr)
                    altitude = altitude[0] * np.ones((nbeams + 1, ngates + 1))
                else:
                    altitude = np.append(altitude,
                                         altitude[-1, :].reshape(1, ngates),
                                         axis=0)
                    altitude = np.append(altitude,
                                         altitude[:, -1].reshape(nbeams, 1),
                                         axis=1)
            else:
                estr = '{:s}: altitude must be of a scalar or '.format(rn)
                estr = '{:s}numpy ndarray of size (ngates) or '.format(estr)
                estr = '{:s}(nbeans,ngates). Using first element: '.format(
                    estr)
                estr = '{:s}{}'.format(estr, altitude[0])
                logging.error(estr)
                altitude = altitude[0] * np.ones((nbeams + 1, ngates + 1))
        if isinstance(elevation, np.ndarray):
            if elevation.ndim == 1:
                # Array is adjusted to add on extra beam/gate edge by copying
                # the last element and replicating the whole array as many
                # times as beams
                if elevation.size != ngates:
                    estr = '{:s}: elevation must be of a scalar or '.format(rn)
                    estr = '{:s}numpy ndarray of size (ngates) or '.format(
                        estr)
                    estr = '{:s}(nbeans,ngates). Using first '.format(estr)
                    estr = '{:s}element: {}'.format(estr, elevation[0])
                    logging.error(estr)
                    elevation = elevation[0] * \
                        np.ones((nbeams + 1, ngates + 1))
                else:
                    elevation = np.resize(np.append(elevation, elevation[-1]),
                                          (nbeams + 1, ngates + 1))
            elif elevation.ndim == 2:
                # Array is adjusted to add on extra beam/gate edge by copying
                # the last row and column
                if elevation.shape != (nbeams, ngates):
                    estr = '{:s}: elevation must be of a scalar or '.format(rn)
                    estr = '{:s}numpy ndarray of size (ngates) or '.format(
                        estr)
                    estr = '{:s}(nbeans,ngates). Using first '.format(estr)
                    estr = '{:s}element: {}'.format(estr, elevation[0])
                    logging.error(estr)
                    elevation = elevation[0] * \
                        np.ones((nbeams + 1, ngates + 1))
                else:
                    elevation = np.append(elevation,
                                          elevation[-1, :].reshape(1, ngates),
                                          axis=0)
                    elevation = np.append(elevation,
                                          elevation[:, -1].reshape(nbeams, 1),
                                          axis=1)
            else:
                estr = '{:s}: elevation must be a scalar or '.format(rn)
                estr = '{:s}numpy ndarray of size (ngates) or '.format(estr)
                estr = '{:s}(nbeans,ngates). Using first element'.format(estr)
                estr = '{:s}: {}'.format(estr, elevation[0])
                logging.error(estr)
                elevation = elevation[0] * np.ones((nbeams + 1, ngates + 1))

        if isinstance(hop, np.ndarray):
            if hop.ndim == 1:
                # Array is adjusted to add on extra beam/gate edge by copying
                # the last element and replicating the whole array as many
                # times as beams
                if hop.size != ngates:
                    estr = '{:s}: hop must be of a scalar or numpy '.format(rn)
                    estr = '{:s}ndarray of size (ngates) or '.format(estr)
                    estr = '{:s}(nbeans,ngates). Using first '.format(estr)
                    estr = '{:s}element: {}'.format(estr, hop[0])
                    logging.error(estr)
                    hop = hop[0] * np.ones((nbeams + 1, ngates + 1))
                else:
                    hop = np.resize(np.append(hop, hop[-1]),
                                    (nbeams + 1, ngates + 1))
            elif hop.ndim == 2:
                # Array is adjusted to add on extra beam/gate edge by copying
                # the last row and column
                if hop.shape != (nbeams, ngates):
                    estr = '{:s}: hop must be of a scalar or numpy '.format(rn)
                    estr = '{:s}ndarray of size (ngates) or '.format(estr)
                    estr = '{:s}(nbeans,ngates). Using first '.format(estr)
                    estr = '{:s}element: {}'.format(hop[0])
                    logging.error(estr)
                    hop = hop[0] * np.ones((nbeams + 1, ngates + 1))
                else:
                    hop = np.append(hop, hop[-1, :].reshape(1, ngates), axis=0)
                    hop = np.append(hop, hop[:, -1].reshape(nbeams, 1), axis=1)
            else:
                estr = '{:s}: hop must be a scalar or numpy ndarray'.format(rn)
                estr = '{:s} of size (ngates) or (nbeams,ngates).'.format(estr)
                estr = '{:s} Using first element: {}'.format(estr, hop[0])
                logging.error(estr)
                hop = hop[0] * np.ones((nbeams + 1, ngates + 1))

        # Do for coord_alt what we just did for altitude.
        if isinstance(coord_alt, np.ndarray):
            if coord_alt.ndim == 1:
                # Array is adjusted to add on extra beam/gate edge by copying
                # the last element and replicating the whole array as many
                # times as beams
                if coord_alt.size != ngates:
                    estr = '{:s}: coord_alt must be a scalar or '.format(rn)
                    estr = '{:s}numpy ndarray of size (ngates) or '.format(
                        estr)
                    estr = '{:s}(nbeans,ngates). Using first '.format(estr)
                    estr = '{:s}element: {}'.format(estr, coord_alt[0])
                    logging.error(estr)
                    coord_alt = coord_alt[0] * \
                        np.ones((nbeams + 1, ngates + 1))
                else:
                    coord_alt = np.resize(np.append(coord_alt, coord_alt[-1]),
                                          (nbeams + 1, ngates + 1))
            elif coord_alt.ndim == 2:
                # Array is adjusted to add on extra beam/gate edge by copying
                # the last row and column
                if coord_alt.shape != (nbeams, ngates):
                    estr = '{:s}: coord_alt must be a scalar or '.format(estr)
                    estr = '{:s}numpy ndarray of size (ngates) or '.format(
                        estr)
                    estr = '{:s}(nbeans,ngates). Using first '.format(estr)
                    estr = '{:s}element: {}'.format(estr, coord_alt[0])
                    logging.error(estr)
                    coord_alt = coord_alt[0] * \
                        np.ones((nbeams + 1, ngates + 1))
                else:
                    coord_alt = np.append(coord_alt,
                                          coord_alt[-1, :].reshape(1, ngates),
                                          axis=0)
                    coord_alt = np.append(coord_alt,
                                          coord_alt[:, -1].reshape(nbeams, 1),
                                          axis=1)
            else:
                estr = '{:s}: coord_alt must be a scalar or '.format(rn)
                estr = '{:s}numpy ndarray of size (ngates) or '.format(estr)
                estr = '{:s}(nbeans,ngates). Using first element'.format(estr)
                estr = '{:s}: {}'.format(estr, coord_alt[0])
                logging.error(estr)
                coord_alt = coord_alt[0] * np.ones((nbeams + 1, ngates + 1))

        # Generate beam/gate arrays
        beams = np.arange(nbeams + 1)
        gates = np.arange(ngates + 1)

        # Create output arrays
        slant_range_full = np.zeros((nbeams + 1, ngates + 1), dtype='float')
        lat_full = np.zeros((nbeams + 1, ngates + 1), dtype='float')
        lon_full = np.zeros((nbeams + 1, ngates + 1), dtype='float')
        slant_range_center = np.zeros((nbeams + 1, ngates + 1), dtype='float')
        lat_center = np.zeros((nbeams + 1, ngates + 1), dtype='float')
        lon_center = np.zeros((nbeams + 1, ngates + 1), dtype='float')

        # Calculate deviation from boresight for center of beam
        boff_center = bmsep * (beams - (nbeams - 1) / 2.0)
        # Calculate deviation from boresight for edge of beam
        boff_edge = bmsep * (beams - (nbeams - 1) / 2.0 - 0.5)

        # Iterates through beams
        for ib in beams:
            # if none of frang, rsep or recrise are arrays, then only execute
            # this for the first loop, otherwise, repeat for every beam
            if (~is_param_array and ib == 0) or is_param_array:
                # Calculate center slant range
                srang_center = slantRange(frang[ib], rsep[ib], recrise[ib],
                                          gates, center=True)
                # Calculate edges slant range
                srang_edge = slantRange(frang[ib], rsep[ib], recrise[ib],
                                        gates, center=False)
            # Save into output arrays
            slant_range_center[ib, :-1] = srang_center[:-1]
            slant_range_full[ib, :] = srang_edge

            # Calculate coordinates for Edge and Center of the current beam
            for ig in gates:
                # Handle array-or-not question.
                talt = altitude[ib, ig] if isinstance(altitude, np.ndarray) \
                    else altitude
                telv = elevation[ib, ig] if isinstance(elevation, np.ndarray) \
                    else elevation
                t_c_alt = coord_alt[ib, ig] \
                    if isinstance(coord_alt, np.ndarray) else coord_alt
                thop = hop[ib, ig] if isinstance(hop, np.ndarray) else hop

                if model == 'GS':
                    if (~is_param_array and ib == 0) or is_param_array:
                        slant_range_center[ib, ig] = \
                            gsMapSlantRange(srang_center[ig], altitude=None,
                                            elevation=None)
                        slant_range_full[ib, ig] = \
                            gsMapSlantRange(srang_edge[ig], altitude=None,
                                            elevation=None)
                        srang_center[ig] = slant_range_center[ib, ig]
                        srang_edge[ig] = slant_range_full[ib, ig]

                if (srang_center[ig] != -1) and (srang_edge[ig] != -1):
                    # Then calculate projections
                    latc, lonc = calcFieldPnt(siteLat, siteLon, siteAlt * 1e-3,
                                              siteBore, boff_center[ib],
                                              srang_center[ig], elevation=telv,
                                              altitude=talt, hop=thop,
                                              model=model, fov_dir=fov_dir)
                    late, lone = calcFieldPnt(siteLat, siteLon, siteAlt * 1e-3,
                                              siteBore, boff_edge[ib],
                                              srang_edge[ig], elevation=telv,
                                              altitude=talt, hop=thop,
                                              model=model, fov_dir=fov_dir)
                    if(coords != 'geo'):
                        lonc, latc = coord_conv(lonc, latc, "geo", coords,
                                                altitude=t_c_alt,
                                                date_time=date_time)
                        lone, late = coord_conv(lone, late, "geo", coords,
                                                altitude=t_c_alt,
                                                date_time=date_time)
                else:
                    latc, lonc = np.nan, np.nan
                    late, lone = np.nan, np.nan

                # Save into output arrays
                lat_center[ib, ig] = latc
                lon_center[ib, ig] = lonc
                lat_full[ib, ig] = late
                lon_full[ib, ig] = lone

        # Output is...
        self.latCenter = lat_center[:-1, :-1]
        self.lonCenter = lon_center[:-1, :-1]
        self.slantRCenter = slant_range_center[:-1, :-1]
        self.latFull = lat_full
        self.lonFull = lon_full
        self.slantRFull = slant_range_full
        self.beams = beams[:-1]
        self.gates = gates[:-1]
        self.coords = coords
        self.fov_dir = fov_dir
        self.model = model
コード例 #8
0
def plotFan(sTime, rad, interval=60, fileType='fitex', param='velocity',
            filtered=False, scale=[], channel=None, coords='geo',
            colors='lasse', gsct=False, fov=True, edgeColors='face',
            lowGray=False, fill=True, velscl=1000., legend=True,
            overlayPoes=False, poesparam='ted', poesMin=-3., poesMax=0.5,
            poesLabel=r"Total Log Energy Flux [ergs cm$^{-2}$ s$^{-1}$]",
            overlayBnd=False, show=True, png=False, pdf=False, dpi=500,
            tFreqBands=[]):
    """A function to make a fan plot

    Parameters
    ----------
    sTime : datetime
        The start time you want to plot
    rad
        A list of 3 letter radar codes, e.g. ['bks'], e.g. ['bks','wal','gbr']
    interval : Optional[int]
        The the time period to be plotted, in seconds.  default = 60
    fileType : Optional[str]
        The file type to plot, valid inputs are 'fitex','fitacf', 'lmfit',
        'fitacf3'.  default = 'fitex'
    param : Optional[str]
        The parameter to be plotted, valid inputs are 'velocity', 'power',
        'width', 'elevation', 'phi0'.  default = 'velocity'
    filtered : Optional[boolean]
        A flag indicating whether the data should be boxcar filtered.
        default = False
    scale : Optional[list]
        The min and max values of the color scale, i.e. [min,max].  If this is
        set to [], then default values will be used
    channel : Optional[char]
        The channel for which to plot data.  default = 'a'
    coords : Optional[str]
        The coordinate system to use; valid inputs are anything handled by
        coord_conv (see davitpy.utils.get_coord_dict).  Default:  geo
    colors : Optional[str]
        The color map to use, valid inputs are 'lasse', 'aj'.
        default = 'lasse'
    gsct : Optional[boolean]
        A flag indicating whether to plot ground scatter as gray.
        default = False
    fov : Optional[boolean]
        A flag indicating whether to overplot the radar fields of view.
        default = True
    edgeColors : Optional[str]
        Edge colors of the polygons, default = 'face'
    lowGray : Optional[boolean]
        A flag indicating whether to plot low velocities in gray.
        default = False
    fill : Optional[boolean]
        A flag indicating whether to plot filled or point RB cells.
        default = True
    velscl : Optional[float]
        The velocity to use as baseline for velocity vector length, only
        applicable if fill = 0.  default = 1000
    legend : Optional[boolean]
        A flag indicating whether to plot the legend, only applicable if
        fill = 0.  default = True
    overlayPoes : Optional[boolean]
        A flag indicating whether to overlay poes data.  default = False
    poesparam : Optional[str]
        The poes parameter to plot.  default = 'ted'.  available params can be
        found in :class:`gme.sat.poes.poesRec`
    poesMin : Optional[float]
        The min value for the poes data color scale.  default = -3.
    poesMax : Optional[float]
        The max value for the poes data color scale.  default = 0.5
    poesLabel : Optional[str]
        The label for the poes color bar.  default = r"Total Log Energy Flux
        [ergs cm$^{-2}$ s$^{-1}$]"
    overlayBnd : Optional[boolean]
        A flag indicating whether to plot an auroral boundary determined from
        fitting poes data.  default = False
    show : Optional[boolean]
        A flag indicating whether to display the figure on the screen.  This
        can cause problems over ssh.  default = True
    pdf : Optional[boolean]
        A flag indicating whether to output to a pdf file.  default = False.
        WARNING: saving as pdf is slow
    png : Optional[boolean]
        A flag indicating whether to output to a png file.  default = False
    dpi : Optional[int]
        Dots per inch if saving as png.  default = 300
    tFreqBands : optional
        Upper and lower bounds of frequency in kHz to be used.  Must be unset
        (or set to []) or have a pair for each radar, and for any band set to
        [] the default will be used.  default = [[8000,20000]],
        [[8000,20000],[8000,20000]], etc.

    Returns
    -------
    Nothing

    Examples
    --------
        import datetime as dt
        pydarn.plotting.fan.plotFan(dt.datetime(2013,3,16,16,30),['fhe','fhw'],param='power',gsct=True)
        pydarn.plotting.fan.plotFan(dt.datetime(2013,3,16,16,30),['fhe','fhw'],param='power',gsct=True,tFreqBands=[[10000,11000],[]])

    """
    from davitpy import pydarn
    from davitpy import gme
    import datetime as dt
    import pickle
    from matplotlib.backends.backend_pdf import PdfPages

    from davitpy.utils.coordUtils import coord_conv

    tt = dt.datetime.now()

    # check the inputs
    assert(isinstance(sTime, dt.datetime)), 'error, sTime must be a datetime \
           object'
    assert(isinstance(rad, list)), "error, rad must be a list, eg ['bks'] or \
           ['bks','fhe']"
    for r in rad:
        assert(isinstance(r, str) and len(r) == 3), 'error, elements of rad \
               list must be 3 letter strings'
    assert(param == 'velocity' or param == 'power' or param == 'width' or
           param == 'elevation' or param == 'phi0'), ("error, allowable params \
           are 'velocity','power','width','elevation','phi0'")
    assert(scale == [] or len(scale) == 2), (
        'error, if present, scales must have 2 elements')
    assert(colors == 'lasse' or colors == 'aj'), "error, valid inputs for color \
        are 'lasse' and 'aj'"

    # check freq band and set to default if needed
    assert(tFreqBands == [] or len(tFreqBands) == len(rad)), 'error, if \
        present, tFreqBands must have same number of elements as rad'
    tbands = []
    for i in range(len(rad)):
        if tFreqBands == [] or tFreqBands[i] == []:
            tbands.append([8000, 20000])
        else:
            tbands.append(tFreqBands[i])

    for i in range(len(tbands)):
        assert(tbands[i][1] > tbands[i][0]), 'error, frequency upper bound must \
            be > lower bound'

    if(scale == []):
        if(param == 'velocity'): scale = [-200, 200]
        elif(param == 'power'): scale = [0, 30]
        elif(param == 'width'): scale = [0, 150]
        elif(param == 'elevation'): scale = [0, 50]
        elif(param == 'phi0'): scale = [-numpy.pi, numpy.pi]

    fbase = sTime.strftime("%Y%m%d")

    cmap, norm, bounds = utils.plotUtils.genCmap(param, scale, colors=colors,
                                                 lowGray=lowGray)

    # open the data files
    myFiles = []
    myBands = []
    for i in range(len(rad)):
        f = radDataOpen(sTime, rad[i], sTime + dt.timedelta(seconds=interval),
                        fileType=fileType, filtered=filtered, channel=channel)
        if(f is not None):
            myFiles.append(f)
            myBands.append(tbands[i])

    assert(myFiles != []), 'error, no data available for this period'

    xmin, ymin, xmax, ymax = 1e16, 1e16, -1e16, -1e16

    allBeams = [''] * len(myFiles)
    sites, fovs, oldCpids, lonFull, latFull = [], [], [], [], []
    lonC, latC = [], []

    # go through all open files
    for i in range(len(myFiles)):
        # read until we reach start time
        allBeams[i] = radDataReadRec(myFiles[i])
        while (allBeams[i].time < sTime and allBeams[i] is not None):
            allBeams[i] = radDataReadRec(myFiles[i])

        # check that the file has data in the target interval
        if(allBeams[i] is None):
            myFiles[i].close()
            myFiles[i] = None
            continue

        # get to field of view coords in order to determine map limits
        t = allBeams[i].time
        site = pydarn.radar.site(radId=allBeams[i].stid, dt=t)
        sites.append(site)
        # Make lists of site lats and lons.  latC and lonC are used
        # for finding the map centre.
        xlon, xlat = coord_conv(site.geolon, site.geolat, "geo", coords,
                                altitude=0., date_time=t)
        latFull.append(xlat)
        lonFull.append(xlon)
        latC.append(xlat)
        lonC.append(xlon)
        myFov = pydarn.radar.radFov.fov(site=site, rsep=allBeams[i].prm.rsep,
                                        ngates=allBeams[i].prm.nrang + 1,
                                        nbeams=site.maxbeam, coords=coords,
                                        date_time=t)
        fovs.append(myFov)
        for b in range(0, site.maxbeam + 1):
            for k in range(0, allBeams[i].prm.nrang + 1):
                lonFull.append(myFov.lonFull[b][k])
                latFull.append(myFov.latFull[b][k])
        oldCpids.append(allBeams[i].cp)

        k = allBeams[i].prm.nrang
        b = 0
        latC.append(myFov.latFull[b][k])
        lonC.append(myFov.lonFull[b][k])
        b = site.maxbeam
        latC.append(myFov.latFull[b][k])
        lonC.append(myFov.lonFull[b][k])

    # Now that we have 3 points from the FOVs of the radars, calculate the
    # lat,lon pair to center the map on. We can simply do this by converting
    # from Spherical coords to Cartesian, taking the mean of each coordinate
    # and then converting back to get lat_0 and lon_0
    lonC, latC = (numpy.array(lonC) + 360.) % 360.0, numpy.array(latC)
    xs = numpy.cos(numpy.deg2rad(latC)) * numpy.cos(numpy.deg2rad(lonC))
    ys = numpy.cos(numpy.deg2rad(latC)) * numpy.sin(numpy.deg2rad(lonC))
    zs = numpy.sin(numpy.deg2rad(latC))
    xc = numpy.mean(xs)
    yc = numpy.mean(ys)
    zc = numpy.mean(zs)
    lon_0 = numpy.rad2deg(numpy.arctan2(yc, xc))
    lat_0 = numpy.rad2deg(numpy.arctan2(zc, numpy.sqrt(xc * xc + yc * yc)))

    # Now do some stuff in map projection coords to get necessary width and
    # height of map and also figure out the corners of the map
    t1 = dt.datetime.now()
    lonFull, latFull = (numpy.array(lonFull) + 360.) % 360.0, \
        numpy.array(latFull)

    tmpmap = utils.mapObj(coords=coords, projection='stere', width=10.0**3,
                          height=10.0**3, lat_0=lat_0, lon_0=lon_0,
                          datetime=sTime)
    x, y = tmpmap(lonFull, latFull)
    minx = x.min() * 1.05     # since we don't want the map to cut off labels
    miny = y.min() * 1.05     # or FOVs of the radars we should alter the
    maxx = x.max() * 1.05     # extrema a bit.
    maxy = y.max() * 1.05
    width = (maxx - minx)
    height = (maxy - miny)
    llcrnrlon, llcrnrlat = tmpmap(minx, miny, inverse=True)
    urcrnrlon, urcrnrlat = tmpmap(maxx, maxy, inverse=True)

    dist = width / 50.
    cTime = sTime

    # Clear temporary figure from memory.
    fig = plot.gcf()
    fig.clf()

    myFig = plot.figure(figsize=(12, 8))

    # draw the actual map we want
    myMap = utils.mapObj(coords=coords, projection='stere', lat_0=lat_0,
                         lon_0=lon_0, llcrnrlon=llcrnrlon, llcrnrlat=llcrnrlat,
                         urcrnrlon=urcrnrlon, urcrnrlat=urcrnrlat,
                         coastLineWidth=0.5, coastLineColor='k',
                         fillOceans='w', fillContinents='w', fillLakes='w',
                         datetime=sTime)
    # overlay fields of view, if desired
    if(fov == 1):
        for i, r in enumerate(rad):
            pydarn.plotting.overlayRadar(myMap, codes=r, dateTime=sTime)
            # this was missing fovObj! We need to plot the fov for this
            # particular sTime.
            pydarn.plotting.overlayFov(myMap, codes=r, dateTime=sTime,
                                       fovObj=fovs[i])

    logging.debug(dt.datetime.now() - t1)
    # manually draw the legend
    if((not fill) and legend):
        # draw the box
        y = [myMap.urcrnry * .82, myMap.urcrnry * .99]
        x = [myMap.urcrnrx * .86, myMap.urcrnrx * .99]
        verts = [x[0], y[0]], [x[0], y[1]], [x[1], y[1]], [x[1], y[0]]
        poly = patches.Polygon(verts, fc='w', ec='k', zorder=11)
        myFig.gca().add_patch(poly)
        labs = ['5 dB', '15 dB', '25 dB', '35 dB', 'gs', '1000 m/s']
        pts = [5, 15, 25, 35]
        # plot the icons and labels
        for w in range(6):
            myFig.gca().text(x[0] + .35 * (x[1] - x[0]), y[1] * (.98 - w *
                             .025), labs[w], zorder=15, color='k', size=8,
                             va='center')
            xctr = x[0] + .175 * (x[1] - x[0])
            if(w < 4):
                myFig.gca().scatter(xctr, y[1] * (.98 - w * .025), s=.1 * pts[w],
                              zorder=15, marker='o', linewidths=.5,
                              edgecolor='face', facecolor='k')
            elif(w == 4):
                myFig.gca().scatter(xctr, y[1] * (.98 - w * .025), s=.1 * 35.,
                              zorder=15, marker='o', linewidths=.5,
                              edgecolor='k', facecolor='w')
            elif(w == 5):
                y = LineCollection(numpy.array([((xctr - dist / 2., y[1] *
                                   (.98 - w * .025)), (xctr + dist / 2., y[1] *
                                                       (.98 - w * .025)))]),
                                   linewidths=.5, zorder=15, color='k')
                myFig.gca().add_collection(y)

    bbox = myFig.gca().get_axes().get_position()
    # now, loop through desired time interval

    tz = dt.datetime.now()
    cols = []
    bndTime = sTime + dt.timedelta(seconds=interval)

    ft = 'None'
    # go though all files
    pcoll = None
    for i in range(len(myFiles)):
        scans = []
        # check that we have good data at this time
        if(myFiles[i] is None or allBeams[i] is None): continue
        ft = allBeams[i].fType
        # until we reach the end of the time window
        while(allBeams[i] is not None and allBeams[i].time < bndTime):
            # filter on frequency
            if (allBeams[i].prm.tfreq >= myBands[i][0] and
                    allBeams[i].prm.tfreq <= myBands[i][1]):
                scans.append(allBeams[i])
            # read the next record
            allBeams[i] = radDataReadRec(myFiles[i])
        # if there is no data in scans, overlayFan will object
        if scans == []: continue
        intensities, pcoll = overlayFan(scans, myMap, myFig, param, coords,
                                        gsct=gsct, site=sites[i], fov=fovs[i],
                                        fill=fill, velscl=velscl, dist=dist,
                                        cmap=cmap, norm=norm)

    # if no data has been found pcoll will not have been set, and the following
    # code will object
    if pcoll:
        cbar = myFig.colorbar(pcoll, orientation='vertical', shrink=.65,
                              fraction=.1, drawedges=True)

        l = []
        # define the colorbar labels
        for i in range(0, len(bounds)):
            if(param == 'phi0'):
                ln = 4
                if(bounds[i] == 0): ln = 3
                elif(bounds[i] < 0): ln = 5
                l.append(str(bounds[i])[:ln])
                continue
            if((i == 0 and param == 'velocity') or i == len(bounds) - 1):
                l.append(' ')
                continue
            l.append(str(int(bounds[i])))
        cbar.ax.set_yticklabels(l)
        cbar.ax.tick_params(axis='y', direction='out')
        # set colorbar ticklabel size
        for ti in cbar.ax.get_yticklabels():
            ti.set_fontsize(12)
        if(param == 'velocity'):
            cbar.set_label('Velocity [m/s]', size=14)
            cbar.extend = 'max'

        if(param == 'grid'): cbar.set_label('Velocity [m/s]', size=14)
        if(param == 'power'): cbar.set_label('Power [dB]', size=14)
        if(param == 'width'): cbar.set_label('Spec Wid [m/s]', size=14)
        if(param == 'elevation'): cbar.set_label('Elev [deg]', size=14)
        if(param == 'phi0'): cbar.set_label('Phi0 [rad]', size=14)

    # myFig.gca().set_rasterized(True)
    # label the plot
    tx1 = myFig.text((bbox.x0 + bbox.x1) / 2.,
                     bbox.y1 + .02, cTime.strftime('%Y/%m/%d'), ha='center',
                     size=14, weight=550)
    tx2 = myFig.text(bbox.x1 + .02, bbox.y1 + .02, cTime.strftime('%H:%M - ') +
                     bndTime.strftime('%H:%M      '), ha='right', size=13,
                     weight=550)
    tx3 = myFig.text(bbox.x0, bbox.y1 + .02, '[' + ft + ']', ha='left',
                     size=13, weight=550)
    # label with frequency bands
    tx4 = myFig.text(bbox.x1 + .02, bbox.y1, 'Frequency filters:', ha='right',
                     size=8, weight=550)
    for i in range(len(rad)):
        myFig.text(bbox.x1 + .02, bbox.y1 - ((i + 1) * .015), rad[i] + ': ' +
                   str(tbands[i][0] / 1e3) + ' - ' + str(tbands[i][1] / 1e3) +
                   ' MHz', ha='right', size=8, weight=550)

    if(overlayPoes):
        pcols = gme.sat.poes.overlayPoesTed(myMap, myFig.gca(), cTime,
                                            param=poesparam, scMin=poesMin,
                                            scMax=poesMax)
        if(pcols is not None):
            cols.append(pcols)
            pTicks = numpy.linspace(poesMin, poesMax, 8)
            cbar = myFig.colorbar(pcols, ticks=pTicks, orientation='vertical',
                                  shrink=0.65, fraction=.1)
            cbar.ax.set_yticklabels(pTicks)
            cbar.set_label(poesLabel, size=14)
            cbar.ax.tick_params(axis='y', direction='out')
            # set colorbar ticklabel size
            for ti in cbar.ax.get_yticklabels():
                ti.set_fontsize(12)

    if(overlayBnd):
        gme.sat.poes.overlayPoesBnd(myMap, myFig.gca(), cTime)

    # handle the outputs
    if png is True:
        # if not show:
        #   canvas = FigureCanvasAgg(myFig)
        myFig.savefig(sTime.strftime("%Y%m%d.%H%M.") + str(interval) +
                      '.fan.png', dpi=dpi)
    if pdf:
        # if not show:
        #   canvas = FigureCanvasAgg(myFig)
        logging.info('Saving as pdf...this may take a moment...')
        myFig.savefig(sTime.strftime("%Y%m%d.%H%M.") + str(interval) +
                      '.fan.pdf')
    if show:
        myFig.show()
コード例 #9
0
def plot_tec(ax,
             dtm,
             mag_latc_range=[53, 62],
             mltc_range=[-6, 6],
             t_c_alt=0.,
             cmap="gist_gray_r",
             scatter_plot=False,
             norm=None,
             db_name=None,
             dbdir="../data/sqlite3/"):
    """ Makes a TEC plot for a given dtm

    Parameters
    ----------

    """

    # Set the minutes a factor of 5
    dtm = dtm.replace(minute=5 * int(dtm.minute / 5))

    if db_name is None:
        db_name = "med_filt_tec.sqlite"

    # make a connection
    conn = sqlite3.connect(dbdir + db_name,
                           detect_types=sqlite3.PARSE_DECLTYPES)

    # load data to a dataframe
    table_name = "med_filt_tec"
    command = "SELECT mlat, mlon, med_tec FROM {tb} " +\
       "WHERE datetime = '{dtm}'"
    command = command.format(tb=table_name, dtm=dtm)
    df = pd.read_sql(command, conn)

    # Filter the data by MLAT
    df = df.loc[(df.mlat >= mag_latc_range[0]) &
                (df.mlat <= mag_latc_range[1]), :]

    # Plot the data
    ccoll = None
    if not df.empty:
        # convert from mag to mlt coords
        lats = df.mlat.as_matrix()
        lons = df.mlon.as_matrix()
        lts, lats = coord_conv(lons,
                               lats,
                               "mag",
                               "mlt",
                               altitude=t_c_alt,
                               date_time=dtm)
        lts = [(round(x, 1)) % 360 for x in lts]
        lats = [round(x, 1) for x in lats]

        # Make MLT between -180 to 180
        lts = [x if x <= 180 else x - 360 for x in lts]
        df["mlt"] = lts
        df["mlat"] = lats

        # Filter the data by MLT
        df = df.loc[(df.mlt >= mltc_range[0] * 15.) &
                    (df.mlt <= mltc_range[1] * 15.), :]
        df = df.sort_values("mlt")

        # Construct arrays
        if scatter_plot:
            xs = df.mlt.as_matrix()
            ys = df.mlat.as_matrix()
            cs = df.med_tec.as_matrix()
        else:
            xs = np.arange(df.mlt.min(), df.mlt.max() + 2, 2)  # in degrees
            ys = np.arange(mag_latc_range[0], mag_latc_range[1] + 1)
            cs = np.ones((len(xs), len(ys))) * np.nan
            for i, x in enumerate(xs):
                for j, y in enumerate(ys):
                    df_tmp = df.loc[(np.isclose(df.mlt, x))
                                    & (np.isclose(df.mlat, y))]
                    if not df_tmp.empty:
                        cs[i, j] = df_tmp.med_tec.as_matrix()[0]

        # Convert MLT from degrees to hours
        xs = xs / 15.

        # Plot the data
        if scatter_plot:
            ccoll = ax.scatter(xs,
                               ys,
                               s=30.0,
                               zorder=1,
                               marker="s",
                               c=cs,
                               linewidths=.5,
                               edgecolors='face',
                               cmap=cmap,
                               norm=norm)
        else:
            X, Y = np.meshgrid(xs, ys)
            Z = np.ma.masked_where(np.isnan(cs.T), cs.T)
            ccoll = ax.pcolormesh(X,
                                  Y,
                                  Z,
                                  edgecolor=None,
                                  cmap=cmap,
                                  norm=norm)

        # Annotate the starting MLT location of the radar


#        rad_mlt_loc = round(df.rad_mlt.as_matrix()[0]/15.,1)
#        lbl = rad + ",b" + str(bmnum) + "\nMLT=" + str(rad_mlt_loc)
#        ax.annotate(lbl, xy=(0.90, 0.1), xycoords="axes fraction", fontsize=8)
    ax.set_ylabel("MLAT", fontsize=10)
    ax.set_ylim([mag_latc_range[0], mag_latc_range[1]])
    ax.set_xlim([mltc_range[0], mltc_range[1]])
    ax.axhline(y=65., color="r", linestyle="--", linewidth=1.)

    # Close conn
    conn.close()

    return ccoll
コード例 #10
0
    try:
        cur.execute(command)
    except Exception, e:
        logging.error(e, exc_info=True)

    rows = cur.fetchall()
    # do the conversion row by row
    if rows:
        for row in rows:
            if row:
                lat, lon, date_time = row

                # convert from mag to mlt coords
                lt, lat = coord_conv(lon,
                                     lat,
                                     "mag",
                                     "mlt",
                                     altitude=t_c_alt,
                                     date_time=date_time)
                lt = (round(lt, 1)) % 360
                lat = round(lat, 1)

                # Add to db
                command = "UPDATE {tb} SET mlt={lt} " +\
                          "WHERE mlat={lat} AND mlon={lon} AND datetime = '{dtm}'"
                command = command.format(tb=table_name,
                                         lat=lat,
                                         lon=lon,
                                         lt=lt,
                                         dtm=date_time)
                print command
                # do the update
コード例 #11
0
def read_point_sdvel(stime, etime, hemi="north", ftype="grdex",
                     coord="mlt", lon_range=[11, 13], lat_point=80.5,
                     lon_del = 0.5):

    from davitpy.pydarn.sdio.sdDataRead import sdDataOpen, sdDataReadAll
    from davitpy.utils.coordUtils import coord_conv

    my_ptr = sdDataOpen(stime, hemi=hemi, eTime=etime, fileType=ftype)
    my_list = sdDataReadAll(my_ptr)

    # convert mlt_lon to mlon
    lon_range_tmp =np.arange(15*lon_range[0], 180, lon_del)
    np.append(lon_range_tmp, np.arange(-180, (15*lon_range[1] - 360), lon_del))
    lon_range = lon_range_tmp
    tms = []
    df_vel_mag = pd.DataFrame(index=range(len(my_list)), columns=lon_range) 
    df_vel_angle = pd.DataFrame(index=range(len(my_list)), columns=lon_range) 
    for k, sdrec in enumerate(my_list):
        # convert mag to mlt
        if ftype in ["grd", "grdex"]:
            xlon, xlat = coord_conv(sdrec.vector.mlon, sdrec.vector.mlat, "mag", coord, 
                                    altitude=100., date_time=sdrec.eTime)
        elif ftype in ["map", "mapex"]:
            xlon, xlat = coord_conv(sdrec.grid.vector.mlon, sdrec.grid.vector.mlat, "mag", coord, 
                                    altitude=100., date_time=sdrec.eTime)
        #lons_tmp = []
        vel_mag, vel_angle = [], []
        for i in range(len(lon_range)):
            diffs = np.array(xlon) - lon_range[i]
            if np.min(abs(diffs)) <= lon_del:
                min_indx = np.argmin(abs(diffs))
                if (xlat[min_indx] - lat_point) == 0:
                    #lons_tmp.append(xlon[min_indx])
                    if ftype in ["grd", "grdex"]:
                        vel_mag.append(sdrec.vector.velmedian[min_indx])
                        kvect = sdrec.vector.kvect[min_indx]
                    elif ftype in ["map", "mapex"]:
                        vel_mag.append(sdrec.grid.vector.velmedian[min_indx])
                        kvect = sdrec.grid.vector.kvect[min_indx]
                    # set the velocity direction parametr, kvect, such that 0 deg is sunward, 90 is dawnward,
                    # 180 is antisunward and -90 is duskward
                    if kvect < 0:
                        kvect = kvect + 180
                    else:
                        kvect = kvect - 180
                    vel_angle.append(kvect)
                else:
                    vel_mag.append(np.nan)
                    vel_angle.append(np.nan)
            else:
                vel_mag.append(np.nan)
                vel_angle.append(np.nan)


        # populate the empty dataframe
        vel_mag_dict = dict(zip(lon_range, vel_mag))
        vel_angle_dict = dict(zip(lon_range, vel_angle))
        df_vel_mag.loc[k] = vel_mag_dict 
        df_vel_angle.loc[k] = vel_angle_dict 

        tms.append(sdrec.sTime)

    # change the dataframe index into datetime index
    df_vel_mag.index = tms
    df_vel_angle.index = tms

    return df_vel_mag, df_vel_angle
コード例 #12
0
ファイル: coordUtils.py プロジェクト: BChrisler/davitpy_bcc
    print
    print "All of these results may have varying sigfigs."
    print "The expected values were found on a 32-bit system."
    print
    print "Test of redirection function coordConv"
    print coordConv(50.7, 34.5, 300., "geo", "geo", 
                    dateTime=datetime(2012, 1, 1, 0, 2))
    print
    print "Single coord pair tests"
    print
    print "Test of list -> list"
    print "Expected for 32-bit system:  ([50.700000000000003], [34.5])"
    print "Expected for 64-bit system:  ([50.700000000000003], [34.5])"
    print "Result:                      " + \
str(coord_conv([50.7], [34.5], 'geo', 'geo'))
    print
    print "Test of float -> float"
    print "Expected for 32-bit system:  (50.700000000000003, 34.5)"
    print "Expected for 64-bit system:  (50.700000000000003, 34.5)" 
    print "Result:                      " + \
str(coord_conv(50.7, 34.5, 'geo', 'geo'))
    print
    print "Test of int -> float"
    print "Expected for 32-bit system:  (50.0, 34.0)"
    print "Expected for 64-bit system:  (50.0, 34.0)"
    print "Result:                      " + \
str(coord_conv(50, 34, 'geo', 'geo'))
    print
    print "Tests of numpy array -> numpy array"
    print "Expected for 32-bit system:  (array([ 50.7]), array([ 34.5]))"
コード例 #13
0
        logging.error(e, exc_info=True)
    rows = cur.fetchall()

    # do the conversion row by row
    if rows:
        for row in rows:
            latc, lonc, date_time = row
            if latc:
                # Load json string
                latc = json.loads(latc)
                lonc = json.loads(lonc)

                # convert from geo to mag coords
                lonc, latc = coord_conv(lonc,
                                        latc,
                                        "geo",
                                        "mag",
                                        altitude=t_c_alt,
                                        date_time=date_time)

                lonc = [(round(x, 1)) % 360 for x in lonc]
                latc = [round(x, 1) for x in latc]

                # convert to string
                latc = json.dumps(latc)
                lonc = json.dumps(lonc)

                # Add to db
                command = "UPDATE {tb} SET " +\
                          "mag_latc='{latc}', mag_lonc='{lonc}' " +\
                          "WHERE datetime = '{dtm}'"
                command = command.format(tb=table_name,