Ejemplo n.º 1
0
        furi, upd = geturi.download(self.cache, True)
        if upd:
            with ZipFile(furi.url, 'r') as zp:
                zp.extractall(self.cache)
            self.updateInvent(False)


# Factory method to dynamically create classes
def GSHHGClassFactory(clsName):
    splt = clsName.split("_")
    if len(splt) == 3:
        rgx = splt[1]
    else:
        rgx = None
    path = splt[0] + "_shp/" + splt[-1]
    return type(clsName, (GSHHGBase, ), {"path": path, "layerregex": rgx})


def getGSHHGDsets(conf):
    """Automatically create all classes contained within the GSHHG database"""
    out = []
    for nm in ["GSHHS", "WDBII_river", "WDBII_border"]:
        for res in ['c', 'l', 'i', 'h', 'f']:
            clsName = nm + "_" + res
            out.append(GSHHGClassFactory(clsName))
    return out


geoslurpCatalogue.addDatasetFactory(getGSHHGDsets)
Ejemplo n.º 2
0
                    mv=[]
                    tv=[]
                    cnmv=[]
                    sigcnmv=[]

            self.updateInvent()
        
def GRACEGeocFac(conf):
    out=[]
    for center in ["CSR", "GFZ", "JPL"]:
        out.append(type("geocenter_"+center+"RL06_TN13",(geocenter_GRCRL06_TN13,),{"fout":"TN-13_GEOC_"+center+"_RL06.txt"}))
    return out



geoslurpCatalogue.addDatasetFactory(GRACEGeocFac)


class geocenter_RIES_CFCM(DataSet):
    fout30="GCN_L1_L2_30d_CF-CM.txt"
    fout60="GCN_L1_L2_60d_CF-CM.txt"
    #note also embed mm to meter conversion in here (e3)
    sqrt3timesRE=11047256.23312e3
    scheme=scheme
    table=type("geocenter_ries_cfcmTable", (GravitySHTBase,), {})
    def __init__(self,dbconn):
        super().__init__(dbconn)
        # super().__init__(direc=direc,uri='https://wobbly.earth/data/Geocenter_dec2017.tgz',order=['c10','c11','s11'],lastupdate=datetime(2018,10,16))
    
    def pull(self):
        """Pulls the geocenter ascii files in the cache"""
Ejemplo n.º 3
0
def getRGIDsets(conf):
    """Automatically create all classes contained within the GSHHG database"""
    out=[]
    regionnames=['01_rgi60_Alaska', '02_rgi60_WesternCanadaUS', '03_rgi60_ArcticCanadaNorth',
               '04_rgi60_ArcticCanadaSouth', '05_rgi60_GreenlandPeriphery', '06_rgi60_Iceland',
               '07_rgi60_Svalbard', '08_rgi60_Scandinavia', '09_rgi60_RussianArctic','10_rgi60_NorthAsia', '11_rgi60_CentralEurope',
               '12_rgi60_CaucasusMiddleEast', '13_rgi60_CentralAsia', '14_rgi60_SouthAsiaWest',
               '15_rgi60_SouthAsiaEast', '16_rgi60_LowLatitudes', '17_rgi60_SouthernAndes',
               '18_rgi60_NewZealand', '19_rgi60_AntarcticSubantarctic']

    
    out.append(RGISHPClassFactory("00_rgi60_O1Regions.shp"))
    out.append(RGISHPClassFactory("00_rgi60_O2Regions.shp"))
    out.append(RGICSVClassFactory("00_rgi60_summary.csv", 1))
    out.append(RGICSVClassFactory("00_rgi60_links.csv", 2))

    for nm in regionnames:
        out.append(RGISHPClassFactory(nm+".shp"))


    #also add important csv files
    for nm in regionnames:
        out.append(RGICSVClassFactory(nm+"_hypso.csv"))

    return out



geoslurpCatalogue.addDatasetFactory(getRGIDsets)
Ejemplo n.º 4
0
        splt, (SWOTSIMBase, ), {
            "ogrfile": ogrfile,
            "zipf": zipfileName,
            "swapxy": True,
            "gtype": "GEOMETRY"
        })


def getSWOTSIMDsets(conf):
    """Automatically create all classes for the swot simulation tracks"""
    out = []
    swotversions = [
        ('swot_calval_orbit_june2015-v2_nadir.shp', 'shp_calval_nadir.zip'),
        ('swot_calval_orbit_june2015-v2_swath.shp', 'sph_calval_swath.zip'),
        ('swot_science_orbit_sept2015-v2_10s_nadir.shp',
         'swot_science_orbit_sept2015-v2_10s_nadir.zip'),
        ('swot_science_orbit_sept2015-v2_10s_swath.shp',
         'swot_science_orbit_sept2015-v2_10s_swath.zip'),
        ('swot_science_hr_2.0s_4.0s_June2019-v3_nadir.shp',
         'swot_science_hr_2.0s_4.0s_June2019-v3_nadir.zip'),
        ('swot_science_hr_2.0s_4.0s_June2019-v3_swath.shp',
         'swot_science_hr_2.0s_4.0s_June2019-v3_swath.zip')
    ]
    for ogr, zipf in swotversions:
        out.append(SWOTSIMClassFactory(ogr, zipf))

    return out


geoslurpCatalogue.addDatasetFactory(getSWOTSIMDsets)
Ejemplo n.º 5
0
# Factory method to dynamically create classes
def GRDCGISClassFactory(fileName):
    splt = fileName.split(".")
    return type(splt[0], (grdc_gis_base, ), {
        "filename": fileName,
        "gtype": "GEOMETRY",
        "swapxy": True
    })


def getGRDCDsets(conf):
    """Automatically create all classes contained within the GRDC tables"""
    GISshapes = [
        'GRDC_405_basins_from_mouth.shp', 'GRDC_687_rivers.shp',
        'GRDC_687_rivers_class.shp', 'GRDC_lakes_join_rivers.shp',
        'grdc_basins_smoothed.shp'
    ]

    out = [GRDCGISClassFactory(name) for name in GISshapes]

    #also add the monthly and daily  datasets
    for name in ["grdc_monthly", "grdc_daily"]:
        out.append(GRDCClassFactory(name))

    return out


geoslurpCatalogue.addDatasetFactory(getGRDCDsets)
geoslurpCatalogue.addDataset(grdc_catalogue)
Ejemplo n.º 6
0
    cachedir = conf.getCacheDir("githubcache")
    try:
        cred = conf.authCred("github", ['oauthtoken'])
        token = cred.oauthtoken
    except:
        token = None
    # import pdb;pdb.set_trace()
    catalog = cachedGithubCatalogue(reponame,
                                    cachedir=cachedir,
                                    commitsha=commitsha,
                                    gfilter=ghfilter({
                                        "type": "blob",
                                        "path": "\.geojson"
                                    }),
                                    gfollowfilter=ghfilter({
                                        "type": "tree",
                                        "path": "envelopes"
                                    }),
                                    ghtoken=token)
    out = []

    #create a list of datasets
    for entry in catalog["datasets"]:
        clsname = os.path.basename(entry["path"]).split(".")[0]
        out.append(GeoshapeClassFactory(clsname, entry))

    return out


geoslurpCatalogue.addDatasetFactory(getGeoshapesDsets)
Ejemplo n.º 7
0
        else:
            files=[UriFile(file) for file in findFiles(self._dbinvent.datadir,'.*gfc.gz',since=self._dbinvent.lastupdate)]

        newfiles=self.retainnewUris(files)
        #loop over files
        for uri in newfiles:
            slurplogger().info("extracting meta info from %s"%(uri.url))
            meta=icgemMetaExtractor(uri)
            meta=enhanceMeta(meta)
            self.addEntry(meta)

        self.updateInvent()


def TUGRAZGRACEL2ClassFactory(release,subdirs):
    """Dynamically construct GRACE Level 2 dataset classes for TU GRAZ"""
    base,gtype=subdirs.split('/')
    clsName="_".join([release,gtype])
    table=type(clsName.replace('-',"_") +"Table", (GravitySHTBase,), {})
    return type(clsName, (TUGRAZGRACEL2Base,), {"release": release, "table":table,"subdirs":subdirs})

# setup GRACE datasets
def TUGRAZGRACEDsets(conf):
    out=[]
    release='ITSG-Grace2018'
    for subdirs in ["daily_kalman/daily_background","daily_kalman/daily_n40","monthly/monthly_background","monthly/monthly_n60","monthly/monthly_n96","monthly/monthly_n120"]:
        out.append(TUGRAZGRACEL2ClassFactory(release,subdirs))
    return out

geoslurpCatalogue.addDatasetFactory(TUGRAZGRACEDsets)
Ejemplo n.º 8
0

def hydroriverClassFactory(hytype):
    fname = "%s.shp" % (hytype)
    return type(hytype, (HydroshedBase, ), {
        "hytype": hytype,
        "filename": fname,
        "gtype": "GEOMETRY",
        "swapxy": True
    })


def getHyRivers(conf):
    out = []
    for hytype in ["af_riv_30s", "eu_riv_30s"]:
        out.append(hydroriverClassFactory(hytype))
    return out


def getHyBasins(conf):
    out = []
    for hytype in ["hybas_af", "hybas_eu"]:
        for lev in range(1, 7):
            out.append(hydrobasinsClassFactory(hytype, lev))

    return out


geoslurpCatalogue.addDatasetFactory(getHyRivers)
geoslurpCatalogue.addDatasetFactory(getHyBasins)
Ejemplo n.º 9
0
                #open documentation files
                with open(os.path.join(zipdir,'docu',"%d.txt"%(id))) as docid:
                    data["doc"]=docid.readlines()
                #open auth file
                with open(os.path.join(zipdir,'docu',"%d_auth.txt"%(id))) as docid:
                    data["auth"]=docid.readlines()

                meta['tstart']=tmin
                meta["tend"]=tmax
                meta["data"]=data

                self.addEntry(meta)
            self.updateInvent()

def PSMSLClassFactory(clsName):
    dum,typ,freq=clsName.lower().split("_")
    url="http://www.psmsl.org/data/obtaining/"+typ+"."+freq+".data/"+typ+"_"+freq+".zip"
    table=type(clsName +"Table", (PSMSLTBase,), {})
    return type(clsName, (PSMSLBase,), {"url": url, "table":table,"typ":typ,"freq":freq})


def getPSMSLDsets(conf):
    out=[]
    for clsName in ["psmsl_rlr_monthly","psmsl_rlr_annual","psmsl_met_monthly"]:
        out.append(PSMSLClassFactory(clsName))
    return out


geoslurpCatalogue.addDatasetFactory(getPSMSLDsets)
Ejemplo n.º 10
0
                "A directory/regex with output data needs to be supplied when registering this dataset"
            )

        newfiles = self.retainnewUris(
            [UriFile(file) for file in findFiles(rundir, pattern)])

        for uri in newfiles:
            meta = FESOMMetaExtractor(uri)
            if not meta:
                #don't register empty entries
                continue

            self.addEntry(meta)

        self._dbinvent.data["Description"] = "FESOM output data table"
        self.setDataDir(os.path.abspath(rundir))
        self._dbinvent.data["grid"] = self.grid
        self.updateInvent()


def getFESOMDsets(conf):
    """Create dummy tables for displaying"""
    out = []
    out.append(type("vertices_TEMPLATE", (FESOMverticesBase, ), {}))
    out.append(type("triangles_TEMPLATE", (FESOMtinBase, ), {}))
    out.append(type("run_TEMPLATE_g_TEMPLATE", (FESOMRunBase, ), {}))
    return out


geoslurpCatalogue.addDatasetFactory(getFESOMDsets)
Ejemplo n.º 11
0
        self._dbinvent.data["Description"] = "ArcticDEM raster table"

    def pull(self, intersect=None):
        # download the entire mosaic domain in one tif
        if self.res in ['1km', '500m', '100m']:
            rasteruri = http(
                "http://data.pgc.umn.edu/elev/dem/setsm/ArcticDEM/mosaic/v3.0/"
                + self.res + "/" + self.rasterfile,
                lastmod=datetime(2018, 9, 26))
            rasterfileuri, upd = rasteruri.download(self.srcdir, check=False)

        #download only those tiles which are needed


def getArcticDems(conf):
    out = []
    for res in ['1km', '500m', '100m']:
        out.append(
            type("arcticdem_mosaic_" + res + "_v3", (ArcticDemRasterBase, ), {
                "res": res,
                "tiles": [100, 100]
            }))
        # out.append(type("arcticdem_mosaic_"+res+"_v3", (ArcticDemRasterBase,), {"res":res}))

    return out


#register datasets
geoslurpCatalogue.addDataset(Arcticdemindex)
geoslurpCatalogue.addDatasetFactory(getArcticDems)
Ejemplo n.º 12
0
        newfiles=self.retainnewUris([UriFile(file) for file in findFiles(rundir,pattern)])

        for uri in newfiles:
            meta=orasMetaExtractor(uri)
            if not meta:
                #don't register empty entries
                continue

            self.addEntry(meta)



        self._dbinvent.data["Description"]="ORAS5 output data table"
        self.setDataDir(os.path.abspath(rundir))
        self._dbinvent.data["grid"]="025"
        self.updateInvent()
        
#%% =============================================================================
# dataset factory
# =============================================================================

def getOrasDsets(conf):
    """Create dummy tables for displaying"""
    out=[]
    out.append(type("vertices_TEMPLATE_TEMPLATE", (orasVerticesBase,), {})) # 'vertices_oras5_025'
    out.append(type("run_TEMPLATE_TEMPLATE_TEMPLATE_TEMPLATE", (orasRunBase,), {})) # 'run_oras5_temp_opa0_025'
    return out

geoslurpCatalogue.addDatasetFactory(getOrasDsets)

Ejemplo n.º 13
0
    cachedir = conf.getCacheDir("githubcache")
    try:
        cred = conf.authCred("github", ['oauthtoken'])
        token = cred.oauthtoken
    except:
        token = None
    # import pdb;pdb.set_trace()
    catalog = cachedGithubCatalogue(reponame,
                                    cachedir=cachedir,
                                    commitsha=commitsha,
                                    gfilter=ghfilter({
                                        "type": "blob",
                                        "path": "\.geojson"
                                    }),
                                    gfollowfilter=ghfilter({
                                        "type": "tree",
                                        "path": "geojson"
                                    }),
                                    ghtoken=token)

    out = []
    #create a list of datasets
    for entry in catalog["datasets"]:
        clsname = os.path.basename(entry["path"]).split(".")[0]
        out.append(NaturalEarthClassFactory(clsname, entry))

    return out


geoslurpCatalogue.addDatasetFactory(getNaturalEarthDsets)
Ejemplo n.º 14
0
        """ Register the drainage divides"""
        slurplogger().info("Registering %s" % self.name)
        #possibly empty table
        self.truncateTable()
        fname = os.path.join(self.cacheDir(), self.fbase + ".gz")
        #loop over  polygon entries
        for dicentry in IceSatPolygons(fname):
            self.addEntry(dicentry)

        self.updateInvent()


def DrainDivClassFactory(clsName, fbase):
    table = type(clsName + "Table", (draindivTBase, ), {})
    return type(clsName, (IceSatDDivBase, ), {"fbase": fbase, "table": table})


def getDrainDivDsets(conf):
    clslookup = {
        "antarc_ddiv_icesat": "Ant_Full_DrainageSystem_Polygons.txt",
        "green_ddiv_icesat": "GrnDrainageSystems_Ekholm.txt",
        "antarc_ddiv_icesat_grnd": "Ant_Grounded_DrainageSystem_Polygons.txt"
    }
    out = []
    for clsName, fbase in clslookup.items():
        out.append(DrainDivClassFactory(clsName, fbase))
    return out


geoslurpCatalogue.addDatasetFactory(getDrainDivDsets)