def load(): dbs = [] prefixes = [] metainfo = database.StoredMetainfo({ 'FIPS': dict(unit="name"), 'Alfalfa': dict(unit="acre"), 'Otherhay': dict(unit="acre"), 'Barley': dict(unit="acre"), 'Barley.Winter': dict(unit="acre"), 'Maize': dict(unit="acre"), 'Sorghum': dict(unit="acre"), 'Soybean': dict(unit="acre"), 'Wheat': dict(unit="acre"), 'Wheat.Winter': dict(unit="acre"), 'fips': dict(unit="name"), 'known': dict(unit="acre"), 'total': dict(unit="acre"), 'barley': dict(unit="acre"), 'corn': dict(unit="acre"), 'sorghum': dict(unit="acre"), 'soybeans': dict(unit="acre"), 'wheat': dict(unit="acre"), 'hay': dict(unit="acre") }) db = database.StaticCSVDatabase(os.path.join(pathhere, "irrigatedareas.csv"), 'FIPS', year=2010) db.set_metainfo(metainfo) dbs.append(db) prefixes.append('irrigatedareas') db = database.StaticCSVDatabase(os.path.join(pathhere, "rainfedareas.csv"), 'FIPS', year=2010) db.set_metainfo(metainfo) dbs.append(db) prefixes.append('rainfedareas') db = database.StaticCSVDatabase(os.path.join(pathhere, "knownareas.csv"), 'fips', year=2010) db.set_metainfo(metainfo) dbs.append(db) prefixes.append('knownareas') db = database.StaticCSVDatabase(os.path.join(pathhere, "totalareas.csv"), 'FIPS', year=2010) db.set_metainfo(metainfo) dbs.append(db) prefixes.append('totalareas') return database.CombinedDatabase(dbs, prefixes, '.')
def load(): get_fips = lambda df: np.array(df['NHGISST']) * 100 + np.array(df[ 'NHGISCTY']) / 10 variable_filter = lambda cols: filter( lambda col: 'NHGIS' in col or 'mean' in col or 'sum' in col or col == 'STATENAM', cols) metainfo = database.StoredMetainfo({ 'NHGISNAM': dict(unit="name"), 'NHGISST': dict(unit="code"), 'NHGISCTY': dict(unit="code"), 'STATENAM': dict(unit="name"), 'solarsum': dict(unit="W"), 'solarmean': dict(unit="W/m^2"), 'windsum': dict(unit="m^3/s"), 'windmean': dict(unit="m/s"), 'windpowerm': dict(unit="W"), 'windpowers': dict(unit="W/m^2") }) db = database.StaticCSVDatabase( database.localpath("energy/repotential.csv"), get_fips, variable_filter) db.set_metainfo(metainfo) return db
def load(): masterpath = database.localpath("awash/counties.csv") fipsdb = database.StaticCSVDatabase(masterpath, 'fips') dbs = [] for filepath in glob.glob(database.localpath("groundwater/*.txt")): if os.path.basename(filepath) == 'notes.txt': continue db = database.OrderedVectorDatabase.read_text( filepath, os.path.basename(filepath[:-4]), 2010, fipsdb) if os.path.basename(filepath[:-4]) == 'aquifer_depth': db.set_metainfo( database.UniformMetainfo("Depth to groundwater table", "m")) if os.path.basename(filepath[:-4]) == 'piezohead0': db.set_metainfo(database.UniformMetainfo("piezohead", "m")) if os.path.basename(filepath[:-4]) == 'county_area': db.set_metainfo(database.UniformMetainfo("county area", "m^2")) if os.path.basename(filepath[:-4]) == 'county_elevation': db.set_metainfo(database.UniformMetainfo("county elevation", "m")) if os.path.basename(filepath[:-4]) == 'drawdown0': db.set_metainfo(database.UniformMetainfo("draw down", "m")) if os.path.basename(filepath[:-4]) == 'vector_storativity': db.set_metainfo(database.UniformMetainfo(" ", "None")) dbs.append(db) return database.ConcatenatedDatabase(dbs)
def load(): metainfo = database.StoredMetainfo({'NHGISNAM': dict(unit="name"), 'NHGISST': dict(unit="code"), 'NHGISCTY': dict(unit="code"), 'STATENAM': dict(unit="name"), 'bio1_mean': dict(unit='dC'), 'bio2_mean': dict(unit='dC'), 'bio5_mean': dict(unit='dC'), 'bio6_mean': dict(unit='dC'), 'bio7_mean': dict(unit='dC'), 'bio8_mean': dict(unit='dC'), 'bio9_mean': dict(unit='dC'), 'bio10_mean': dict(unit='dC'), 'bio11_mean': dict(unit='dC'), 'bio12_mean': dict(unit='mm'), 'bio13_mean': dict(unit='mm'), 'bio14_mean': dict(unit='mm'), 'bio16_mean': dict(unit='mm'), 'bio17_mean': dict(unit='mm'), 'bio18_mean': dict(unit='mm'), 'bio19_mean': dict(unit='mm')}) get_fips = lambda df: np.array(df['NHGISST']) * 100 + np.array(df['NHGISCTY']) / 10 variable_filter = lambda cols: filter(lambda col: 'NHGIS' in col or '_mean' in col or col == 'STATENAM', cols) current = database.StaticCSVDatabase(database.localpath("climate/bioclims-current.csv"), get_fips, variable_filter) current.set_metainfo(metainfo) dbs = [current] prefixes = ['current'] for filepath in glob.glob(database.localpath("climate/bioclims-2050/*.csv")): db = database.StaticCSVDatabase(filepath, get_fips, variable_filter, year=2050) db.set_metainfo(metainfo) dbs.append(db) prefixes.append(filepath[filepath.rindex('/')+1:filepath.rindex('/')+3]) return database.CombinedDatabase(dbs, prefixes, '.')
def load(): fipsdb = database.StaticCSVDatabase(masterpath, 'fips') dbs = [] for filename in glob.glob(os.path.join(pathhere, "edds/*.csv")): filename = os.path.basename(filename) filepath = os.path.join(pathhere, "edds", filename) db = database.OrderedDatabase.use_fips( fipsdb, SingleVariableDatabase(filepath, filename[:-4])) db.set_metainfo(database.UniformMetainfo(None, 'C day')) dbs.append(db) return database.ConcatenatedDatabase(dbs)
def load(): allage = database.StaticCSVDatabase( database.localpath('mortality/cmf-1999-2010.txt'), 'County Code', year=2004, sep='\t') allage.set_metainfo(metainfo.StoredMetainfo(infos)) byage = database.InterlevedCSVDatabase( database.localpath("mortality/cmf-age-1999-2010.txt"), 'County Code', 'Age Group', 2004, sep='\t') byage.set_metainfo(metainfo.StoredMetainfo(infos)) return database.CombinedDatabase([allage, byage], ['all', 'age'], '.')
def load(): dbs = [] dbs.append( database.StaticCSVDatabase(filepath, 'FIPS', sheetname='1985', year=1985)) dbs.append( database.StaticCSVDatabase(filepath, 'FIPS', sheetname='1990', year=1990)) dbs.append( database.StaticCSVDatabase(filepath, 'FIPS', sheetname='1995', year=1995)) dbs.append( database.StaticCSVDatabase(filepath, 'FIPS', sheetname='2000', year=2000)) dbs.append( database.StaticCSVDatabase(filepath, 'FIPS', sheetname='2005', year=2005)) dbs.append( database.StaticCSVDatabase(filepath, 'FIPS', sheetname='2010', year=2010)) for db in dbs: db.set_metainfo(database.FunctionalMetainfo(get_description, get_unit)) return database.CombinedYearsDatabase(dbs, dbs[-1].get_fips())
def load(): filepath = database.localpath("labor/lab_cty_00_05_sum.csv") db = database.StaticCSVDatabase(filepath, 'fips', year=2002) db.set_metainfo(metainfo.StoredMetainfo.load_csv(database.localpath("labor/info.csv"), 'variable', 'description', 'unit')) return db
def load(): filepath = database.localpath("ccimpacts/county_damage_mapping_data.csv") db = database.StaticCSVDatabase(filepath, 'fips', year=2090) db.set_metainfo(metainfo.StoredMetainfo(metas)) return db