예제 #1
0
    def testOpenDap(self):

        print 'testOpenDap NASA EARTHDATA'

        # Get authentication from the .netrc file
        netrc_file = '/home/jli/.netrc'

        if os.path.isfile(netrc_file):
            logins = netrc.netrc()
            accounts = logins.hosts
            for host, info in accounts.iteritems():
                self.login, self.account, self.password = info

        #dataset_url = 'https://goldsmr5.gesdisc.eosdis.nasa.gov:443/opendap/MERRA2/M2I3NPASM.5.12.4/1986/12/MERRA2_100.inst3_3d_asm_Np.19861201.nc4'
        dataset_url = 'https://goldsmr4.gesdisc.eosdis.nasa.gov/dods/M2T1NXSLV.dods?t2m[1:2][1:5][7:14]'
        dataset_url = 'https://goldsmr4.gesdisc.eosdis.nasa.gov/dods/M2T1NXSLV.dods?t2m'
        session = setup_session(self.login,
                                self.password,
                                check_url=dataset_url)
        #dataset = open_url(dataset_url,session=session)
        #dataset_url = 'https://goldsmr4.gesdisc.eosdis.nasa.gov/dods/M2T1NXSLV.dods?t2m[1:2][1:5][7:14]'
        dataset = open_dods(dataset_url, session=session)
        #store = xarray.backends.PydapDataStore(dataset)
        #ds = xarray.open_dataset(store)

        #print ds.keys()
        print type(dataset)
        print dataset.keys()
def get_trmm_data(start_date, end_date, dty):
    lon_lb = (-77.979315 - 0.2489797462 / 2)
    lon_ub = (-76.649286 + 0.455314383 / 2)
    lat_lb = 36.321159
    lat_ub = 37.203955
    st_date = dt.datetime.strptime(start_date, "%Y-%m-%d")
    ed_date = dt.datetime.strptime(end_date, "%Y-%m-%d")
    date_range = pd.date_range(st_date, ed_date, freq='3H')
    precip_list = []
    for d in date_range:
        print 'getting trmm data for {}'.format(d)
        doy = d.timetuple().tm_yday
        if d.hour == 0:
            doy -= 1
        url = 'https://disc2.gesdisc.eosdis.nasa.gov:443/opendap/TRMM_RT/TRMM_3B42RT.7/{yr}/{doy}/3B42RT.{yr}{mth}{dy}{hr}.7R2.nc4'.format(
            yr=d.year, mth=zero_pad(d.month), dy=zero_pad(d.day), hr=zero_pad(d.hour), doy=doy)
        session = setup_session('jsadler2', 'UVa2017!', check_url=url)
        dataset = open_url(url, session=session)
        var = dataset['precipitation']
        precp = var[:, :]
        lats = np.array(dataset['lat'[:]])
        lons = np.array(dataset['lon'[:]])
        lon_mask = (lons > lon_lb) & (lons < lon_ub)
        lon_filt = lons[lon_mask]
        lat_mask = (lats > lat_lb) & (lats < lat_ub)
        lat_filt = lats[lat_mask]
        prcep_place = precp[lat_mask]
        precp_m = prcep_place.T[lon_mask]
        p = precp_m.T

        x, y, precip_proj = get_projected_array(lat_filt,
                                           lon_filt, p, '{}{}'.format(d.day, d.hour), dty)
        precip_list.append(precip_proj)
    return x, y, precip_list
예제 #3
0
def get_data_from_url(url, date, settings):
    """
    Worker function that
        - accesses url
        - subset data
        - download data
        - write compressed output to disk
    """
    # Create session
    with setup_session(settings["eosdis_username"],
                       settings["eosdis_password"],
                       check_url=url) as session:
        # Open remote dataset
        store = xr.backends.PydapDataStore.open(url, session=session)
        ds_in = xr.open_dataset(store)

        # Subset dataset
        ds_subset = subset_dataset(ds_in, settings["vars_of_interest"],
                                   settings["sat_region"])

        # Add time dimension
        ds_subset = add_time_dimension(ds_subset, date)

        # Compress dataset
        ds_subset = compress_dataset(ds_subset)

        # Add metainformation
        ds_subset = add_metadata(ds_subset, url)

        # Write dataset to disk
        ds_subset.to_netcdf(
            "AIRS__{time}.nc".format(time=date.strftime('%Y%m%d')),
            unlimited_dims=['time'])
예제 #4
0
파일: fetch_pwv.py 프로젝트: talister/pwv
def extract_opendap_data(opendap_url, DATAFIELD_NAME):
    username, password = get_netrc_credentials()
    if username and password:
        session = setup_session(username, password, check_url=opendap_url)
        dataset = open_url(opendap_url, session=session)
        data3D = dataset[DATAFIELD_NAME]
        data = data3D.data[:, :]

        # Read geolocation dataset.
        # For...reasons... the Latitude and Longitude arrays come back as 1D
        # via pydap rather than the correct 2D ones, via pyhdf. So we need
        # to grow them back to the right shape
        lat = dataset['Latitude'].data
        lon = dataset['Longitude'].data
        latitude = np.array([
            lat[:, :],
        ] * lon.shape[0]).transpose()
        longitude = np.array([
            lon[:, :],
        ] * lat.shape[0])

        # Handle fill value.
        attrs = data3D.attributes
        fillvalue = attrs["_FillValue"]
        data[data == fillvalue] = np.nan
        data = np.ma.masked_array(data, np.isnan(data))

    return data, latitude, longitude
예제 #5
0
 def start_session(self):
     self.session = setup_session(
         self.username,
         self.password,
         check_url=
         "https://goldsmr4.gesdisc.eosdis.nasa.gov/opendap/MERRA2_MONTHLY/M2C0NXASM.5.12.4/1980/MERRA2_101.const_2d_asm_Nx.00000000.nc4"
     )
예제 #6
0
def test_basic_urs_auth():
    """
    Set up PyDAP to use the URS request() function.

    The intent here is to ensure that pydap.net is able to
    open and url if and only if requests is able to
    open the same url.
    """
    session = urs.setup_session(os.environ.get('USERNAME_URS'),
                                os.environ.get('PASSWORD_URS'),
                                check_url=url)

    # Check that the requests library can access the link:
    res = requests.get(test_url, cookies=session.cookies)
    assert(res.status_code == 200)
    res.close()

    # Check that the pydap library can access the link:
    res = pydap.net.follow_redirect(test_url, session=session)
    assert(res.status_code == 200)

    # Check that the pydap library can access another link:
    res = pydap.net.follow_redirect(test_url_2, session=session)
    assert(res.status_code == 200)
    session.close()
예제 #7
0
    def test_basic_urs_auth(self):
        """
        Set up PyDAP to use the URS request() function.

        The intent here is to ensure that pydap.net is able to
        open and url if and only if requests is able to
        open the same url.
        """
        assert(os.environ.get('USERNAME_URS'))
        assert(os.environ.get('PASSWORD_URS'))
        session = urs.setup_session(os.environ.get('USERNAME_URS'),
                                    os.environ.get('PASSWORD_URS'),
                                    check_url=self.url)

        # Check that the requests library can access the link:
        res = requests.get(self.test_url, cookies=session.cookies)
        assert(res.status_code == 200)
        res.close()

        # Check that the pydap library can access the link:
        res = pydap.net.follow_redirect(self.test_url, session=session)
        assert(res.status_code == 200)

        # Check that the pydap library can access another link:
        res = pydap.net.follow_redirect(self.test_url_2, session=session)
        assert(res.status_code == 200)
        session.close()
def getInsolation(earthLoginUser, earthLoginPass, tile, year=None, doy=None):
    if year == None:
        dd = datetime.date.today() + datetime.timedelta(days=-1)
        year = dd.year
    if doy == None:
        doy = (datetime.date.today() - datetime.date(year, 1, 1)).days
        dd = datetime.date.today() + datetime.timedelta(days=-1)
        month = dd.month
        day = dd.day
    llLat, llLon = tile2latlon(tile)
    ulx = llLon
    uly = llLat + 15.
    lrx = llLon + 15.
    lry = llLat
    MERRA2_ulLat = 90.0
    MERRA2_ulLon = -180.0
    MERRA2LatRes = 0.5
    MERRA2LonRes = 0.625
    nrow = 3750
    ncol = 3750
    date = '%d%03d' % (year, doy)
    inProj4 = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
    #========get MERRA2 Insolation data at overpass time======================
    inRes = [MERRA2LonRes, MERRA2LatRes]
    inUL = [MERRA2_ulLon, MERRA2_ulLat]

    if year < 1992:
        fileType = 100
    elif year > 1991 and year < 2001:
        fileType = 200
    elif year > 2000 and year < 2011:
        fileType = 300
    else:
        fileType = 400

    opendap_url = 'https://goldsmr4.gesdisc.eosdis.nasa.gov/opendap/hyrax/MERRA2/'
    product = 'M2T1NXRAD.5.12.4'

    filename = 'MERRA2_%d.tavg1_2d_rad_Nx.%04d%02d%02d.nc4' % (fileType, year,
                                                               month, day)
    fullUrl = os.path.join(opendap_url, product, '%04d' % year, '%02d' % month,
                           filename)
    session = urs.setup_session(username=earthLoginUser,
                                password=earthLoginPass,
                                check_url=fullUrl)
    d = open_url(fullUrl, session=session)
    Insol = d.SWGDNCLR

    #====get daily insolation=========================================
    outFN = os.path.join(static_path, 'INSOL24', 'T%03d' % tile,
                         'RS24_%s_T%03d.tif' % (date, tile))
    if not os.path.exists(outFN):
        dataset2 = np.flipud(np.sum(np.squeeze(Insol[:, :, :]), axis=0))
        outfile = os.path.join(os.getcwd(), 'insol24')
        outFormat = gdal.GDT_Float32
        writeArray2Tiff(dataset2, inRes, inUL, inProj4, outfile, outFormat)
        optionList = ['-overwrite', '-s_srs', '%s' % inProj4,'-t_srs','%s' % inProj4,\
        '-te', '%f' % ulx, '%f' % lry,'%f' % lrx,'%f' % uly,'-r', 'bilinear',\
        '-ts', '%f' % nrow, '%f' % ncol,'-multi','-of','GTiff','%s' % outfile, '%s' % outFN]
        warp(optionList)
예제 #9
0
파일: nmtools.py 프로젝트: usnair/georad
 def initSession(self):
     username = self.pwdDict['NASA Earth Data']['user']
     password = self.pwdDict['NASA Earth Data']['password']
     self.session = setup_session(username,
                                  password,
                                  check_url=self.baseURL)
     firstDay = datetime(1980, 1, 1)
     dataset = open_url(self.getUrlMERRA(firstDay), session=self.session)
     self.lon = dataset['lon'][:]
     self.lat = dataset['lat'][:]
예제 #10
0
파일: merra2.py 프로젝트: mhdella/MetReLoad
    def open(collection, username, password, base_url=DODS_URL):
        """Open a MERRA-2 data collection

        Args:
            collection (str): Earth Science Data Types Name of the collection (9 characters)
            username (str)
            password (str)
            base_url (str, optional): Base url for requests,
                                      default https://goldsmr4.gesdisc.eosdis.nasa.gov/dods
        """

        # Initialize session and open dataset
        url = '/'.join((base_url, collection.upper()))
        logger.debug("Setting up session to %s as %s", url, username)
        try:
            session = setup_session(username, password, check_url=url)
        except Exception:
            err_str = "Unable to set up session to {} with username {}.".format(
                url, username)
            raise RuntimeError(err_str)

        logger.debug("Opening Pydap data store")
        store = None
        try:
            store = PydapDataStore.open(url, session=session)
        except ModuleNotFoundError:  # pylint: disable=W0706
            raise  # Special case for detecting missing packages
        except Exception:
            raise RuntimeError("Invalid url '{}'".format(url))
        finally:
            if not store:  # Clean up if store was not opened
                session.close()

        logger.debug("Opening dataset")
        try:
            dataset = xa.open_dataset(store, chunks=TIME_CHUNKS)
        except HTTPError:
            raise RuntimeError("""Authentication failed!\n
                                  Hint: check that 'NASA GESDISC DATA ARCHIVE'
                                  app is authorized for your account at
                                  https://urs.earthdata.nasa.gov""")

        finally:
            store.close()
            session.close()

        # Fix for mysterious `area` variable error
        if collection.upper() == 'M2C0NXASM':
            try:
                dataset = dataset.drop('area')
            except KeyError:
                pass

        return MERRA2Dataset(dataset)
예제 #11
0
def get_dataset(url):

    from pydap.client import open_url
    from pydap.cas.urs import setup_session

    user = '******'
    pswd = 'T0talBollocks'

    session = setup_session(user, pswd, check_url=url)
    dataset = open_url(url, session=session)

    return dataset
예제 #12
0
    def load_dataset(self, dataset_name):

        #For dataset into lowercase and attach to object
        self.dataset_name = dataset_name.lower()

        #Start session
        session = setup_session(self.username, self.password,
            check_url=self.base_url+self.available_datasets[self.dataset_name][0])

        #Now load the appropriate dataset
        self.dataset = []
        #Loop over dates
        for d in self.dates:

            #Loop over file versions (there were changes over the years; start with most recent)
            for i in [4, 3, 2, 1]:
                #Construct url
                url = ("{0}{1}/{2}/{3:02d}/MERRA2_{4}00.tavg1_2d_{5}_Nx.{2}{3:02d}{6:02d}.nc4"
                    .format(self.base_url, self.available_datasets[self.dataset_name][0],
                    d.year, d.month, i, self.available_datasets[self.dataset_name][1], d.day))
                
                #Load data
                try:                
                    self.dataset.append(open_url(url, session=session))
                    #data = open_url(url, session=session)
                except:
                    continue

                #Break to next date on successful load
                break

        #Store variables
        self.var_list = sorted(list(self.dataset[0].keys()))

        #Now pull grid info
        lons = numpy.array(self.dataset[0]["lon"][:])
        lats = numpy.array(self.dataset[0]["lat"][:])
        self.lons, self.lats = numpy.meshgrid(lons, lats)
        self.full_extent = [numpy.min(self.lons), numpy.max(self.lons),
            numpy.min(self.lats), numpy.max(self.lats)]
        self.nx = len(self.lons)
        self.ny = len(self.lats)
        self.dx = float(self.dataset[0].attributes["HDF5_GLOBAL"]["LongitudeResolution"])
        self.dy = float(self.dataset[0].attributes["HDF5_GLOBAL"]["LatitudeResolution"])

        #Now create projection information using first file in the dataset
        self.proj_name = "Global Latitude/Longitude"
        self.center_lon = (self.full_extent[0]+self.full_extent[1])/2
        self.proj = ccrs.PlateCarree(central_longitude=self.center_lon)
        self.pcp = ccrs.PlateCarree() #This is for transformations within the object
        self.states = cfeature.NaturalEarthFeature(category="cultural",
            name="admin_1_states_provinces_lines",scale="110m", facecolor="none")
예제 #13
0
파일: get_data.py 프로젝트: rocheseb/tccon
def download_merradap(urllistpath, outpath, subdict={}):
    """
	NOT working, there is an issue with the last line "to_netcdf", there seems to be a problems with the coordinates getting written to netcdf

	outpath: full path to folder where the files will be written
	urllistpath: path to list of MERRA2 opendap urls
	subdict: dictionary for subsetting e.g. {'lat':range(first_lat_id,last_lat_id+1),'lon':range(first_lon_id,last_lon_id+1)}
	"""

    # fetch earthdata credentials from netrc file
    print 'Fetch earthdata credentials from netrc file'
    username, account, password = netrc.netrc().authenticators(
        'urs.earthdata.nasa.gov')

    # read the list of urls
    print 'Read the list of urls'
    with open(urllistpath, 'r') as f:
        url_list = f.read().splitlines()

    # setup earthdata session
    print 'Setup earthdata session'
    session = setup_session(username, password, check_url=url_list[0])

    # connect to each file
    print 'Connect to each file'
    store_list = [
        xarray.backends.PydapDataStore.open(url, session) for url in url_list
    ]

    # open datasets
    print 'Open datasets'
    dataset_list = [xarray.open_dataset(store) for store in store_list]

    # subset datasets
    print 'Subset datasets'
    if subdict != {}:
        subsest_dataset_list = [dataset[subdict] for dataset in dataset_list]
    else:
        subsest_dataset_list = dataset_list

    return subsest_dataset_list

    # to_netcdf does not work directly on the datasets ...
    # name of files that will be saved in outpath
    filename_list = [i.split('/')[-1] for i in url_list]
    print 'Downloading files:'
    for i, dataset in enumerate(subsest_dataset_list):
        print filename_list[i]
        dataset.fillna(1e15)  # replace nans with a fill value
        dataset.to_netcdf(os.path.join(outpath, filename_list[i]))
예제 #14
0
 def test_basic_urs_query(self):
     session = urs.setup_session(os.environ.get('USERNAME_URS'),
                                 os.environ.get('PASSWORD_URS'),
                                 check_url=self.url)
     assert session.auth
     dataset = open_url(self.url, session=session)
     expected_data = [
         [[99066.15625, 99066.15625, 99066.15625, 99066.15625, 99066.15625],
          [98868.15625, 98870.15625, 98872.15625, 98874.15625, 98874.15625],
          [98798.15625, 98810.15625, 98820.15625, 98832.15625, 98844.15625],
          [98856.15625, 98828.15625, 98756.15625, 98710.15625, 98776.15625],
          [99070.15625, 99098.15625, 99048.15625, 98984.15625, 99032.15625]]
     ]
     assert ((dataset['SLP'][0, :5, :5] == expected_data).all())
예제 #15
0
def download(file_address, output_dir):
    """Downloads OCO2 level 1 files from the OPenDAP NASA server"""
    print "Initializing session for user", user
    session = setup_session(user, psswrd)

    print "Finding dataset at", file_address
    dataset = open_url(file_address, session=session)
    filename = os.path.split(file_address)[1]

    # file path for where to save the file
    local_path = os.path.join(output_dir, filename)

    print "Creating local file", local_path
    output = h5py.File(local_path, 'w')

    try:
        print "Writing datasets to local file..."

        # to download all keys, use
        # for key in dataset.keys():
        for key in download_keys:
            data_arr = dataset[key][:]

            # problem with h5py arrays: can't store dtype='U8' arrays; see
            #  https://github.com/h5py/h5py/issues/624
            if data_arr.dtype == np.dtype('U8'):
                data_arr = data_arr.astype('S')

            group, dset = split_dataset(key)
            print "making dataset", '/'.join([group, dset])

            try:
                if group in output.keys():
                    output[group].create_dataset(dset, data=data_arr)
                else:
                    output.create_group(group)
                    output[group].create_dataset(dset, data=data_arr)
            except:
                print 'Unable to read dataset', key
                raise

        print "Done writing local file. Saved as", local_path
    except:
        raise
    finally:
        output.close()
예제 #16
0
 def test_basic_urs_query(self):
     assert (os.environ.get('USERNAME_URS'))
     assert (os.environ.get('PASSWORD_URS'))
     session = urs.setup_session(os.environ.get('USERNAME_URS'),
                                 os.environ.get('PASSWORD_URS'),
                                 check_url=self.url)
     # Ensure authentication:
     res = pydap.net.follow_redirect(self.test_url, session=session)
     assert (res.status_code == 200)
     dataset = open_url(self.url, session=session)
     expected_data = [
         [[99066.15625, 99066.15625, 99066.15625, 99066.15625, 99066.15625],
          [98868.15625, 98870.15625, 98872.15625, 98874.15625, 98874.15625],
          [98798.15625, 98810.15625, 98820.15625, 98832.15625, 98844.15625],
          [98856.15625, 98828.15625, 98756.15625, 98710.15625, 98776.15625],
          [99070.15625, 99098.15625, 99048.15625, 98984.15625, 99032.15625]]
     ]
     assert ((dataset['SLP'][0, :5, :5] == expected_data).all())
예제 #17
0
    def test_basic_urs_auth(self):
        """
        Set up PyDAP to use the URS request() function.

        The intent here is to ensure that pydap.net is able to
        open and url if and only if requests is able to
        open the same url.
        """
        session = urs.setup_session(os.environ.get('USERNAME_URS'),
                                    os.environ.get('PASSWORD_URS'),
                                    check_url=self.url)

        test_url = self.url + '.dods?SLP[0:1:0][0:1:10][0:1:10]'
        res = requests.get(test_url, cookies=session.cookies)
        assert (res.status_code == 200)
        res.close()

        res = pydap.net.follow_redirect(test_url, session=session)
        assert (res.status_code == 200)
예제 #18
0
def connect_to_urs(urs_user,
                   urs_pass,
                   lat_chunk=224,
                   lon_chunk=464,
                   time_chunk=480):
    """
    make a connection to the urs server 
    :param urs_user: [str] the urs username
    :param urs_pass: [str] the urs password
    :param lat_chunk: [int] the zarr chunk size for the lat dimension
    :param lon_chunk: [int] the zarr chunk size for the lon dimension
    :param time_chunk: [int] the zarr chunk size for the time dimension
    :return: [xarray dataset] dataset representing server data
    """
    session = setup_session(urs_pass, urs_user, check_url=base_url)

    store = xr.backends.PydapDataStore.open(base_url, session=session)
    chunks = {'lat': lat_chunk, 'lon': lon_chunk, 'time': time_chunk}
    ds = xr.open_dataset(store).chunk(chunks)
    return ds
예제 #19
0
def test_basic_urs_query():
    session = urs.setup_session(os.environ.get('USERNAME_URS'),
                                os.environ.get('PASSWORD_URS'),
                                check_url=url)
    # Ensure authentication:
    res = pydap.net.follow_redirect(test_url, session=session)
    assert(res.status_code == 200)
    dataset = open_url(url, session=session)
    expected_data = [[[99066.15625, 99066.15625, 99066.15625,
                       99066.15625, 99066.15625],
                      [98868.15625, 98870.15625, 98872.15625,
                       98874.15625, 98874.15625],
                      [98798.15625, 98810.15625, 98820.15625,
                       98832.15625, 98844.15625],
                      [98856.15625, 98828.15625, 98756.15625,
                       98710.15625, 98776.15625],
                      [99070.15625, 99098.15625, 99048.15625,
                       98984.15625, 99032.15625]]]
    assert((dataset['SLP'][0, :5, :5] == expected_data).all())
    session.close()
예제 #20
0
    def test_basic_urs_auth(self):
        """
        Set up PyDAP to use the URS request() function.

        The intent here is to ensure that pydap.net is able to
        open and url if and only if requests is able to
        open the same url.
        """
        assert(os.environ.get('USERNAME_URS'))
        assert(os.environ.get('PASSWORD_URS'))
        session = urs.setup_session(os.environ.get('USERNAME_URS'),
                                    os.environ.get('PASSWORD_URS'),
                                    check_url=self.url)

        res = requests.get(self.test_url, cookies=session.cookies)
        assert(res.status_code == 200)
        res.close()

        res = pydap.net.follow_redirect(self.test_url, session=session)
        assert(res.status_code == 200)
예제 #21
0
파일: core.py 프로젝트: mullenkamp/nasadap
    def session(self, username, password, mission, cache_dir=None):
        """
        Function to initiate a dap session.

        Parameters
        ----------
        username : str
            The username for the login.
        password : str
            The password for the login.
        mission : str
            Mission name.
        cach_dir : str or None
            A path to cache the netcdf files for future reading. If None, the currently working directory is used.

        Returns
        -------
        Nasa object
        """
        if mission in mission_product_dict:
            self.mission_dict = mission_product_dict[mission]
        else:
            raise ValueError('mission should be one of: ' +
                             ', '.join(mission_product_dict.keys()))

        self.mission = mission

        if isinstance(cache_dir, str):
            if not os.path.exists(cache_dir):
                os.makedirs(cache_dir)
            self.cache_dir = cache_dir
        else:
            self.cache_dir = os.getcwd()

        self.session = setup_session(username,
                                     password,
                                     check_url='/'.join([
                                         self.mission_dict['base_url'],
                                         'opendap',
                                         self.mission_dict['process_level']
                                     ]))
예제 #22
0
파일: _ts.py 프로젝트: rileyhales/grids
 def _open_data(self, path):
     if self.engine == 'xarray':
         return xr.open_dataset(path, backend_kwargs=self.xr_kwargs)
     elif self.engine == 'opendap':
         try:
             if self.session:
                 return xr.open_dataset(
                     xr.backends.PydapDataStore.open(path,
                                                     session=self.session))
             else:
                 return xr.open_dataset(path)
         except ConnectionRefusedError as e:
             raise e
         except Exception as e:
             print('Unexpected Error')
             raise e
     elif self.engine == 'auth-opendap':
         return xr.open_dataset(
             xr.backends.PydapDataStore.open(path,
                                             session=setup_session(
                                                 self.user,
                                                 self.pswd,
                                                 check_url=path)))
     elif self.engine == 'netcdf4':
         return nc.Dataset(path, 'r')
     elif self.engine == 'cfgrib':
         return xr.open_dataset(path,
                                engine='cfgrib',
                                backend_kwargs=self.xr_kwargs)
     elif self.engine == 'pygrib':
         a = pygrib.open(path)
         return a.read()
     elif self.engine == 'h5py':
         return h5py.File(path, 'r')
     elif self.engine == 'rasterio':
         return xr.open_rasterio(path)
     else:
         raise ValueError(
             f'Unable to open file, unsupported engine: {self.engine}')
예제 #23
0
 def getInsolation(self):
     MERRA2_ulLat = 90.0
     MERRA2_ulLon = -180.0
     MERRA2LatRes = 0.5
     MERRA2LonRes = 0.625
     inProj4 = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
     #========get MERRA2 Insolation data at overpass time====================== 
     inRes = [MERRA2LonRes,MERRA2LatRes]
     inUL = [MERRA2_ulLon,MERRA2_ulLat]
 
     if self.year <1992:
         fileType = 100
     elif self.year >1991 and self.year < 2001:
         fileType=200
     elif self.year > 2000 and self.year<2011:
         fileType = 300
     else:
         fileType = 400
 
     opendap_url = 'https://goldsmr4.gesdisc.eosdis.nasa.gov/opendap/hyrax/MERRA2/'
     product = 'M2T1NXRAD.5.12.4'
     
     filename = 'MERRA2_%d.tavg1_2d_rad_Nx.%04d%02d%02d.nc4' % (fileType,self.year,self.month,self.day)
     fullUrl =os.path.join(opendap_url,product,'%04d'% self.year,'%02d'% self.month,filename)
 
     dailyPath = os.path.join(self.metBase,'%s' % self.scene)
     if not os.path.exists(dailyPath):
         os.makedirs(dailyPath)
     #====get overpass hour insolation=========================================
     outFN = os.path.join(dailyPath,'%s_Insol1Sub.tiff' % self.sceneID) 
     print 'processing : %s...' % outFN
     session = urs.setup_session(username = self.earthLoginUser, 
                 password = self.earthLoginPass,
                 check_url=fullUrl)
     d = open_url(fullUrl,session=session)
     Insol = d.SWGDNCLR
     if not os.path.exists(outFN):
     # wv_mmr = 1.e-6 * wv_ppmv_layer * (Rair / Rwater)
     # wv_mmr in kg/kg, Rair = 287.0, Rwater = 461.5
         dataset = np.squeeze(Insol[self.hr,:,:,:])*1.
         
         outfile = os.path.join(dailyPath,'%s_%s.tiff' % (self.sceneID,'Insol1'))
         subsetFile = outfile[:-5]+'Sub.tiff'
         outFormat = gdal.GDT_Float32
         writeArray2Tiff(dataset,inRes,inUL,inProj4,outfile,outFormat)
         optionList = ['-overwrite', '-s_srs', '%s' % inProj4,'-t_srs','%s' % self.proj4,\
         '-te', '%f' % self.ulx, '%f' % self.lry,'%f' % self.lrx,'%f' % self.uly,'-r', 'bilinear',\
         '-tr', '%f' % self.delx, '%f' % self.dely,'-multi','-of','GTiff','%s' % outfile, '%s' % subsetFile]
         warp(optionList)
         
             #====get daily insolation=========================================
     outFN = os.path.join(dailyPath,'%s_Insol24Sub.tiff' % self.sceneID)
     if not os.path.exists(outFN):
         dataset2 = np.flipud(np.sum(np.squeeze(Insol[:,:,:]),axis=0))
         outfile = os.path.join(dailyPath,'%s_%s.tiff' % (self.sceneID,'Insol24'))
         subsetFile = outfile[:-5]+'Sub.tiff'
         outFormat = gdal.GDT_Float32
         writeArray2Tiff(dataset2,inRes,inUL,inProj4,outfile,outFormat)
         optionList = ['-overwrite', '-s_srs', '%s' % inProj4,'-t_srs','%s' % self.proj4,\
         '-te', '%f' % self.ulx, '%f' % self.lry,'%f' % self.lrx,'%f' % self.uly,'-r', 'bilinear',\
         '-tr', '%f' % self.delx, '%f' % self.dely,'-multi','-of','GTiff','%s' % outfile, '%s' % subsetFile]
         warp(optionList)
예제 #24
0
파일: GPM.py 프로젝트: mortezaomidi/uganda
 def start_session(self, url):
     from pydap.cas.urs import setup_session
     self.session = setup_session(self.username,
                                  self.password,
                                  check_url=url)
     return self.session
예제 #25
0
from collections import OrderedDict
from functools import partial

import requests
from six.moves.urllib.parse import urlparse
from six.moves import range, input
from lxml import etree, html
from ipywidgets import widgets, Layout
from IPython.display import display, Javascript

PYCURL = True

if not PYCURL:
    from pydap.cas.urs import setup_session
    session = setup_session(
        os.environ.get('NLDAS_USERNAME') or input('NLDAS Username: '******'NLDAS_PASSWORD') or getpass.getpass('Password: '******'/tmp/cookie.jar')
        c.setopt(c.NETRC, True)
예제 #26
0
from pydap.client import open_url
from pydap.cas.urs import setup_session
#May choose your own URL to connect NASA Hyrax services
url='https://airsl2.gesdisc.eosdis.nasa.gov/opendap/Aqua_AIRS_Level2/AIRG2SSD.006/2002/243/AIRS.2002.08.31.L2G.Precip_Est.v1.0.3.0.G13208041617.hdf'
#You may also set up the session at your .cshrc or .bashrc.
session=setup_session('your earthdata URS username','your password',check_url=url)
dataset=open_url(url,session=session)
print(dataset)
예제 #27
0
from pydap.client import open_url
from pydap.cas.urs import setup_session
#May choose your own URL to connect NASA Hyrax services
url = 'https://airsl2.gesdisc.eosdis.nasa.gov/opendap/Aqua_AIRS_Level2/AIRG2SSD.006/2002/243/AIRS.2002.08.31.L2G.Precip_Est.v1.0.3.0.G13208041617.hdf'
#You may also set up the session at your .cshrc or .bashrc.
session = setup_session('your earthdata URS username',
                        'your password',
                        check_url=url)
dataset = open_url(url, session=session)
print(dataset)
예제 #28
0
def load_merra(files, set_flight):

    if files.met_file == False:

        #Load libraries needed to access server
        import urllib.request as urllib2
        import http.cookiejar
        from pydap.client import open_url
        from pydap.cas.urs import setup_session

        #Must have correct authentication
        #NASA earthdata authenticationshould be stored in ~/.netrc file
        import netrc
        authData = netrc.netrc().hosts['urs.earthdata.nasa.gov']
        myLogin = authData[0]
        myPassword = authData[2]

        #Query MERRA data
        print('Querying MERRA data........')

        #date_str = str(set_flight.date_flight)[0:4] + str(set_flight.date_flight)[4:6] + str(set_flight.date_flight)[6:8]
        date_str = str(int(str(set_flight.date_flight)[0:4]) - 1) + str(
            set_flight.date_flight)[4:6] + str(
                set_flight.date_flight)[6:8]  #dcusworth hack
        #Query OpenDAP
        base_url = 'https://goldsmr5.gesdisc.eosdis.nasa.gov:443/opendap/MERRA2/M2I3NVASM.5.12.4/%YR%/%MON%/MERRA2_400.inst3_3d_asm_Nv.%DATE%.nc4'
        #iurl = base_url.replace('%YR%', str(set_flight.date_flight)[0:4])
        iurl = base_url.replace('%YR%',
                                str(int(str(set_flight.date_flight)[0:4]) -
                                    1))  #dcusworth hack
        iurl = iurl.replace('%MON%', str(set_flight.date_flight)[4:6])
        iurl = iurl.replace('%DATE%', date_str)
        session = setup_session(myLogin, myPassword, check_url=iurl)
        dataset = open_url(iurl, session=session)

        #Select metadata
        dlon = np.array(dataset['lon'][:])
        dlat = np.array(dataset['lat'][:])
        dtime = np.array(dataset['time'][:])

        #Select right dimensions
        sel_time = np.argmin(np.abs(set_flight.hour_utc - (dtime / 60)))
        sel_lon = np.argmin(np.abs(dlon - set_flight.longitude))
        sel_lat = np.argmin(np.abs(dlat - set_flight.latitude))

        #Get needed variables
        print('Downloading surface pressure........')
        PS = np.array(dataset['PS'][:][sel_time, sel_lat, sel_lon])
        print('Downloading pressure levels........')
        PL = np.array(dataset['PL'][:][sel_time, :, sel_lat, sel_lon])
        print('Downloading specific humidity........')
        QV = np.array(dataset['QV'][:][sel_time, :, sel_lat, sel_lon])
        print('Downloading temperature profile........')
        T = np.array(dataset['T'][:][sel_time, :, sel_lat, sel_lon])
        print('Downloading pressure thickness........')
        DELP = np.array(dataset['DELP'][:][sel_time, :, sel_lat, sel_lon])
        print('Finished querying opendap server')

        #Save output
        met_dict = {
            'PS': PS / 100,
            'PL': PL / 100,
            'QV': QV,
            'T': T,
            'DELP': DELP / 100
        }
        pickle.dump(met_dict,
                    open('met/merra_met' + files.rad_name + '.p', 'wb'))

    else:
        metname = files.met_file
        met_dict = pickle.load(open(metname, 'rb'))
        PS = met_dict['PS']
        PL = met_dict['PL']
        QV = met_dict['QV']
        T = met_dict['T']
        DELP = met_dict['DELP']

    return PS, PL, QV, T, DELP
예제 #29
0
    def prepare_profile_data(self):

        ul = [self.ulLon - 1.5, self.ulLat + 1.5]
        lr = [self.lrLon + 1.5, self.lrLat - 1.5]
        # The data is lat/lon and upside down so [0,0] = [-90.0,-180.0]
        max_x = int((lr[0] - (-180)) / 0.625)
        min_x = int((ul[0] - (-180)) / 0.625)
        min_y = int((lr[1] - (-90)) / 0.5)
        max_y = int((ul[1] - (-90)) / 0.5)

        if self.year < 1992:
            file_type = 100
        elif 1991 < self.year < 2001:
            file_type = 200
        elif 2000 < self.year < 2011:
            file_type = 300
        else:
            file_type = 400

        # Instantaneous Two-Dimensional Collections
        # inst1_2d_asm_Nx (M2I1NXASM): Single-Level Diagnostics
        # =============================================================================
        # 'https://goldsmr4.sci.gsfc.nasa.gov/opendap/hyrax/MERRA2/'
        opendap_url = 'https://goldsmr4.gesdisc.eosdis.nasa.gov/opendap/hyrax/MERRA2/'
        product = 'M2I1NXASM.5.12.4'
        filename = 'MERRA2_%d.inst1_2d_asm_Nx.%04d%02d%02d.nc4' % (
            file_type, self.year, self.month, self.day)
        full_url = os.path.join(opendap_url, product, '%04d' % self.year,
                                '%02d' % self.month, filename)
        # d=open_dods(full_url+'?PS[1:1:23][0:1:360][0:1:575]')
        session = urs.setup_session(username=self.earthLoginUser,
                                    password=self.earthLoginPass,
                                    check_url=full_url)
        d = client.open_url(full_url, session=session)
        #    d.keys()
        # surface presure [Pa]
        surface_pressure = d.PS
        sp = np.squeeze(surface_pressure[self.hr, min_y:max_y, min_x:max_x] /
                        100)  # Pa to kPa
        sprshp = np.reshape(sp, sp.shape[0] * sp.shape[1])

        # 2m air Temp (K)
        temp_2m = d.T2M
        # temp_2m=open_dods(full_url+'?T2M[1:1:23][0:1:360][0:1:575]')
        t2 = np.squeeze(temp_2m[self.hr, min_y:max_y, min_x:max_x])
        t2rshp = np.reshape(t2, t2.shape[0] * t2.shape[1])

        # 2m specific humidity [kg kg -1] -> 2 m water vapor [ppmv]
        spc_hum = d.QV2M
        # spc_hum=open_dods(full_url+'?QV2M[1:1:23][0:1:360][0:1:575]')
        # wv_mmr = 1.e-6 * wv_ppmv_layer * (Rair / Rwater)
        # wv_mmr in kg/kg, Rair = 287.0, Rwater = 461.5
        q = np.squeeze(spc_hum[self.hr, min_y:max_y, min_x:max_x])
        q2 = q / (1e-6 * (287.0 / 461.5))
        q2_reshape = np.reshape(q2, q2.shape[0] * q2.shape[1])

        # skin temp [K]
        sktIn = d.TS
        # sktIn=open_dods(full_url+'?TS[1:1:23][0:1:360][0:1:575]')
        skt = np.squeeze(sktIn[self.hr, min_y:max_y, min_x:max_x])
        sktrshp = np.reshape(skt, skt.shape[0] * skt.shape[1])

        # U10M 10-meter_eastward_wind [m s-1]
        u10In = d.U10M
        # u10In=open_dods(full_url+'?U10[1:1:23][0:1:360][0:1:575]')
        u10 = np.squeeze(u10In[self.hr, min_y:max_y, min_x:max_x])
        u10rshp = np.reshape(u10, u10.shape[0] * u10.shape[1])

        # V10M 10-meter_northward_wind [m s-1]
        v10In = d.V10M
        # v10In=open_dods(full_url+'?V10M[1:1:23][0:1:360][0:1:575]')
        v10 = np.squeeze(v10In[self.hr, min_y:max_y, min_x:max_x])
        v10rshp = np.reshape(v10, v10.shape[0] * v10.shape[1])

        opendap_url = 'https://goldsmr5.gesdisc.eosdis.nasa.gov/opendap/hyrax/MERRA2/'
        product = 'M2I3NVASM.5.12.4'
        filename = 'MERRA2_%d.inst3_3d_asm_Nv.%04d%02d%02d.nc4' % (
            file_type, self.year, self.month, self.day)
        full_url = os.path.join(opendap_url, product, '%04d' % self.year,
                                '%02d' % self.month, filename)
        session = urs.setup_session(username=self.earthLoginUser,
                                    password=self.earthLoginPass,
                                    check_url=full_url)
        d = client.open_url(full_url, session=session)
        hr = int(np.round(self.hr / 3.))  # convert from 1 hr to 3 hr dataset

        # layers specific humidity [kg kg -1] -> 2 m water vapor [ppmv]
        qvIn = d.QV
        # qvIn=open_dods(full_url+'?QV[0:1:7][0,:1:71][0:1:360][0:1:575]')
        # wv_mmr = 1.e-6 * wv_ppmv_layer * (Rair / Rwater)
        # wv_mmr in kg/kg, Rair = 287.0, Rwater = 461.5
        qv = np.squeeze(qvIn[hr, :, min_y:max_y, min_x:max_x])
        qv = qv / (1e-6 * (287.0 / 461.5))
        qvrshp = np.reshape(qv, [qv.shape[0], qv.shape[1] * qv.shape[2]]).T

        # layers air temperature [K]
        tIn = d.T
        # tIn=open_dods(full_url+'?T[0:1:7][0,:1:71][0:1:360][0:1:575]')
        # wv_mmr = 1.e-6 * wv_ppmv_layer * (Rair / Rwater)
        # wv_mmr in kg/kg, Rair = 287.0, Rwater = 461.5
        t = np.squeeze(tIn[hr, :, min_y:max_y, min_x:max_x])
        trshp = np.reshape(t, [t.shape[0], t.shape[1] * t.shape[2]]).T

        # mid_level_pressure [Pa]

        plIn = d.PL
        # plIn=open_dods(full_url+'?PL[0:1:7][0,:1:71][0:1:360][0:1:575]')
        pl = np.squeeze(plIn[hr, :, min_y:max_y, min_x:max_x] /
                        100)  # Pa to kPa
        plrshp = np.reshape(pl, [pl.shape[0], pl.shape[1] * pl.shape[2]]).T
        # qrshp =np.reshape(q,q.shape[0]*q.shape[1])

        LAT = d.lat
        LON = d.lon
        lats = LAT[:]
        lons = LON[:]
        lat = np.tile(lats, (len(lons), 1)).T
        latIn = np.squeeze(lat[min_y:max_y, min_x:max_x])
        latrshp = np.reshape(latIn, latIn.shape[0] * latIn.shape[1])
        lon = np.tile(lons, (len(lats), 1))
        lonIn = np.squeeze(lon[min_y:max_y, min_x:max_x])
        lonrshp = np.reshape(lonIn, lonIn.shape[0] * lonIn.shape[1])
        el = np.repeat(0.0, v10.shape[0] * v10.shape[1])  # NEED DEM
        # check surface pressure

        sunzen = np.repeat(self.solZen, v10.shape[0] * v10.shape[1])
        sunazi = np.repeat(self.solAzi, v10.shape[0] * v10.shape[1])
        fetch = np.repeat(100000, v10.shape[0] * v10.shape[1])
        satzen = np.repeat(0.0, v10.shape[0] * v10.shape[1])
        satazi = np.repeat(0.0, v10.shape[0] * v10.shape[1])

        # Units for gas profiles
        gas_units = 2  # ppmv over moist air

        # datetimes[6][nprofiles]: yy, mm, dd, hh, mm, ss
        datetimes = np.tile([self.year, self.month, self.day, hr, 0, 0],
                            (v10.shape[0] * v10.shape[1], 1))

        # angles[4][nprofiles]: satzen, satazi, sunzen, sunazi
        # get from landsat MTL
        angles = np.vstack((satzen, satazi, sunzen, sunazi)).T

        # surftype[2][nprofiles]: surftype, watertype
        surftype = np.zeros([angles.shape[0], 2])  # NEED LAND/WATER mask

        # surfgeom[3][nprofiles]: lat, lon, elev
        surfgeom = np.vstack((latrshp, lonrshp, el)).T

        # s2m[6][nprofiles]: 2m p, 2m t, 2m q, 10m wind u, v, wind fetch
        s2m = np.vstack(
            (sprshp, t2rshp, q2_reshape, u10rshp, v10rshp, fetch)).T

        # skin[9][nprofiles]: skin T, salinity, snow_frac, foam_frac, fastem_coefsx5
        sal = np.repeat(35.0, v10.shape[0] * v10.shape[1])
        snow_frac = np.repeat(0.0, v10.shape[0] * v10.shape[1])
        foam_frac = np.repeat(0.0, v10.shape[0] * v10.shape[1])
        fastem_coef1 = np.repeat(3.0, v10.shape[0] * v10.shape[1])
        fastem_coef2 = np.repeat(5.0, v10.shape[0] * v10.shape[1])
        fastem_coef3 = np.repeat(15.0, v10.shape[0] * v10.shape[1])
        fastem_coef4 = np.repeat(0.1, v10.shape[0] * v10.shape[1])
        fastem_coef5 = np.repeat(0.3, v10.shape[0] * v10.shape[1])

        skin = np.vstack(
            (sktrshp, sal, snow_frac, foam_frac, fastem_coef1, fastem_coef2,
             fastem_coef3, fastem_coef4, fastem_coef5)).T

        outDict = {'P': plrshp, 'T': trshp, 'Q': qvrshp, 'Angles': angles, 'S2m': s2m, \
                   'Skin': skin, 'SurfType': surftype, 'SurfGeom': surfgeom, 'Datetimes': datetimes, \
                   'origShape': t2.shape}

        return outDict
예제 #30
0
import glob
import subprocess
import landsatTools
import pyDisALEXI_utils as dA
import h5py
import urllib
from pydap.client import open_url
from pydap.client import open_dods
from landsat.downloader import Downloader
from landsat.search import Search
import datetime
import shutil
import tarfile
import pycurl
from pydap.cas.urs import setup_session
session = setup_session('mschull', 'sushmaMITCH12')

base = os.path.join(os.sep, 'data', 'smcd4', 'mschull')
base = os.path.join(os.sep, 'Users', 'mschull', 'umdGD')


def setFolders(base):
    dataBase = os.path.join(base, 'pyDisALEXI', 'data')
    landsatDataBase = os.path.join(dataBase, 'Landsat-8')
    lstBase = os.path.join(landsatDataBase, 'LST')
    asterDataBase = os.path.join(dataBase, 'ASTER')
    if not os.path.exists(lstBase):
        os.makedirs(lstBase)
    landsatDN = os.path.join(landsatDataBase, 'DN')
    if not os.path.exists(landsatDN):
        os.makedirs(landsatDN)
예제 #31
0
def start_session(url):
    """Initiates a session"""
    user, _, pswd = netrc().authenticators('urs.earthdata.nasa.gov')
    session = setup_session(user, pswd, check_url=url)
    return session
def get_session():
    u, p = os.environ['NLDAS_USER'], os.environ['NLDAS_PASS']
    return setup_session(u, p)
예제 #33
0
파일: mod_maker.py 프로젝트: rocheseb/tccon
def read_merradap(username, password, mode, site_lon_180, gravity_at_lat, date,
                  end_date, time_step, varlist, surf_varlist):
    """
	Read MERRA2 data via opendap.

	This has to connect to the daily netcdf files, and then concatenate the subsetted datasets.

	This is EXTREMELY slow, should probably make that use separate to generate local files, and then use to files in mod_maker
	"""
    DATA = {}
    SURF_DATA = {}

    if '42' in mode:
        letter = 'P'
    elif '72' in mode:
        letter = 'V'
        varlist += ['PL']

    old_UTC_date = ''
    urllist = []
    surface_urllist = []
    print '\n\t-Making lists of URLs'
    while date < end_date:
        UTC_date = date + timedelta(
            hours=-site_lon_180 / 15.0
        )  # merra times are in UTC, so the date may be different than the local date, make sure to use the UTC date to querry the file
        if (UTC_date.strftime('%Y%m%d') != old_UTC_date):
            print '\t\t', UTC_date.strftime('%Y-%m-%d')
            urllist += [
                'https://goldsmr5.gesdisc.eosdis.nasa.gov/opendap/hyrax/MERRA2/M2I3N{}ASM.5.12.4/{:0>4}/{:0>2}/MERRA2_400.inst3_3d_asm_N{}.{:0>4}{:0>2}{:0>2}.nc4'
                .format(letter, UTC_date.year, UTC_date.month, letter.lower(),
                        UTC_date.year, UTC_date.month, UTC_date.day)
            ]
            surface_urllist += [
                'https://goldsmr4.gesdisc.eosdis.nasa.gov/opendap/hyrax/MERRA2/M2I1NXASM.5.12.4/{:0>4}/{:0>2}/MERRA2_400.inst1_2d_asm_Nx.{:0>4}{:0>2}{:0>2}.nc4'
                .format(UTC_date.year, UTC_date.month, UTC_date.year,
                        UTC_date.month, UTC_date.day)
            ]
            if old_UTC_date == '':
                session = setup_session(
                    username, password, check_url=urllist[0]
                )  # just need to setup the authentication session once
        old_UTC_date = UTC_date.strftime('%Y%m%d')
        date = date + time_step

    # multi-level data
    print '\nNow doing multi-level data'
    print '\t-Connecting to datasets ...'
    store_list = [
        xarray.backends.PydapDataStore.open(url, session) for url in urllist
    ]
    dataset_list = [xarray.open_dataset(store) for store in store_list]
    print '\t-Datasets opened'
    min_lat_ID, max_lat_ID, min_lon_ID, max_lon_ID = querry_indices(
        dataset_list[0], site_lat, site_lon_180, 2.5,
        2.5)  # just need to get the lat/lon box once
    subsest_dataset_list = [
        dataset[{
            'lat': range(min_lat_ID, max_lat_ID + 1),
            'lon': range(min_lon_ID, max_lon_ID + 1)
        }] for dataset in dataset_list
    ]
    print '\t-Datasets subsetted'
    print '\t-Merging datasets (time consuming)'
    merged_dataset = xarray.concat(subsest_dataset_list, 'time')
    merged_dataset = merged_dataset.fillna(1e15)

    # single-level data
    print '\nNow doing single-level data'
    print '\t-Connecting to datasets ...'
    surface_store_list = [
        xarray.backends.PydapDataStore.open(url, session)
        for url in surface_urllist
    ]
    surface_dataset_list = [
        xarray.open_dataset(store) for store in surface_store_list
    ]
    print '\t-Datasets opened'
    subsest_surface_dataset_list = [
        dataset[{
            'lat': range(min_lat_ID, max_lat_ID + 1),
            'lon': range(min_lon_ID, max_lon_ID + 1)
        }] for dataset in surface_dataset_list
    ]
    print '\t-Datasets subsetted'
    print '\t-Merging datasets (time consuming)'
    merged_surface_dataset = xarray.concat(subsest_surface_dataset_list,
                                           'time')
    merged_surface_dataset = merged_surface_dataset.fillna(1e15)

    for varname in varlist:
        DATA[varname] = merged_dataset[varname].data
        DATA['add_offset_' + varname] = 0.0
        DATA['scale_factor_' + varname] = 1.0
    for varname in surf_varlist:
        SURF_DATA[varname] = merged_surface_dataset[varname].data
        SURF_DATA['add_offset_' + varname] = 0.0
        SURF_DATA['scale_factor_' + varname] = 1.0

    for varname in ['time', 'lat', 'lon']:
        DATA[varname] = merged_dataset[varname].data
        SURF_DATA[varname] = merged_surface_dataset[varname].data
    DATA['lev'] = merged_dataset['lev'].data

    DATA['PHIS'] = DATA['PHIS'] / gravity_at_lat  # convert from m2 s-2 to m

    delta_time = [(i - DATA['time'][0]).astype('timedelta64[h]') /
                  np.timedelta64(1, 'h')
                  for i in DATA['time']]  # hours since base time
    surf_delta_time = [(i - SURF_DATA['time'][0]).astype('timedelta64[h]') /
                       np.timedelta64(1, 'h')
                       for i in SURF_DATA['time']]  # hours since base time

    DATA['julday0'] = Time(str(DATA['time'][0]), format="isot").jd
    SURF_DATA['julday0'] = Time(str(SURF_DATA['time'][0]), format="isot").jd

    DATA['time'] = delta_time
    SURF_DATA['time'] = surf_delta_time

    # get longitudes as 0 -> 360 instead of -180 -> 180, needed for trilinear_interp
    for i, elem in enumerate(DATA['lon']):
        if elem < 0:
            DATA['lon'][i] = elem + 360.0
    for i, elem in enumerate(SURF_DATA['lon']):
        if elem < 0:
            SURF_DATA['lon'][i] = elem + 360.0

    return DATA, SURF_DATA
예제 #34
0
d = date.fromisoformat('1984-11-01')
while True:

    files.append(f'{url}{str(d.day).zfill(2)}.nc4')
    d = d + timedelta(days=1)

    if d.month == 12:
        break

files

import getpass

username = input("URS Username: "******"URS Password: ")

from pydap.client import open_url
from pydap.cas.urs import setup_session

ds_url = files[0]
session = setup_session(username, password, check_url=ds_url)
gesdisc_data = xa.open_mfdataset(files,
                                 engine='pydap',
                                 parallel=True,
                                 combine='by_coords',
                                 backend_kwargs={'session': session})
gesdisc_data
hflux = gesdisc_data.HFLUX.sel(lat=slice(-53.99, -14), lon=slice(140, 170))
hflux_mean = hflux.mean(dim=['lat', 'lon'])
hflux_mean.plot.line()