from datetime import date from boundaryservice import utils SHAPEFILES = { # This key should be the plural name of the boundaries in this set 'Counties': { # Path to a shapefile, relative to /data/shapefiles 'file': 'co/counties.shp', # Generic singular name for an boundary of from this set 'singular': 'County', # Should the singular name come first when creating canonical identifiers for this set? 'kind_first': False, # Function which each feature wall be passed to in order to extract its "external_id" property # The utils module contains several generic functions for doing this 'ider': utils.simple_namer(['FIPS']), # Function which each feature will be passed to in order to extract its "name" property 'namer': utils.simple_namer(['COUNTY']), # Authority that is responsible for the accuracy of this data 'authority': 'US Census Bureau', # Geographic extents which the boundary set encompasses 'domain': 'Colorado', # Last time the source was checked for new data 'last_updated': date(2013, 11, 1), # A url to the source of the data 'href': 'http://www.nationalatlas.gov/atlasftp-1m.html', # Notes identifying any pecularities about the data, such as columns that were deleted or files which were merged 'notes': '', # Encoding of the text fields in the shapefile, i.e. 'utf-8'. If this is left empty 'ascii' is assumed 'encoding': '', # SRID of the geometry data in the shapefile if it can not be inferred from an accompanying .prj file
from datetime import date from boundaryservice import utils SHAPEFILES = { # This key should be the plural name of the boundaries in this set 'New Jersey Municipalities': { # Path to a shapefile, relative to /data/shapefiles 'file': 'nj_boundaries/nj_munis.shp', # Generic singular name for an boundary of from this set 'singular': 'Municipality', # Should the singular name come first when creating canonical identifiers for this set? 'kind_first': True, # Function which each feature will be passed to in order to extract its "external_id" property # The utils module contains several generic functions for doing this 'ider': utils.simple_namer(['KEY']), # Function which each feature will be passed to in order to extract its "name" property 'namer': utils.simple_namer(['NAME']), # Authority that is responsible for the accuracy of this data 'authority': 'NJ Geographic Information Network', # Geographic extents which the boundary set encompasses 'domain': 'New Jersey', # Last time the source was checked for new data 'last_updated': date(2012, 12, 27), # A url to the source of the data 'href': 'https://njgin.state.nj.us/NJ_NJGINExplorer/jviewer.jsp?pg=DataDownloads', # Notes identifying any pecularities about the data, such as columns that were deleted or files which were merged 'notes': '', # Encoding of the text fields in the shapefile, i.e. 'utf-8'. If this is left empty 'ascii' is assumed 'encoding': '', # SRID of the geometry data in the shapefile if it can not be inferred from an accompanying .prj file
from datetime import date from boundaryservice import utils SHAPEFILES = { 'Counties': { 'file': 'counties.zip', 'singular': 'County', 'kind_first': False, 'ider': utils.simple_namer(['GEOID']), 'namer': utils.simple_namer(['NAME']), 'authority': 'United States Census Bureau', 'domain': 'United States of America', 'last_updated': date(2013, 8, 6), 'href': ( 'ftp://ftp2.census.gov/geo/tiger/TIGER2013/' 'COUNTY/tl_2013_us_county.zip'), 'notes': 'Resaved as UTF-8 encoding', 'encoding': '', 'srid': '', 'simplification': 0.0001, }, 'States': { 'file': 'tl_2013_us_state.zip', 'singular': 'State', 'kind_first': False, 'ider': utils.simple_namer(['GEOID']), 'namer': utils.simple_namer(['NAME']), 'authority': 'United States Census Bureau', 'domain': 'United States of America', 'last_updated': date(2013, 8, 6),
from datetime import date from boundaryservice import utils SHAPEFILES = { "Counties": { "file": "counties.zip", "singular": "County", "kind_first": False, "ider": utils.simple_namer(["GEOID"]), "namer": utils.simple_namer(["NAME"]), "authority": "United States Census Bureau", "domain": "United States of America", "last_updated": date(2013, 8, 6), "href": ("ftp://ftp2.census.gov/geo/tiger/TIGER2013/" "COUNTY/tl_2013_us_county.zip"), "notes": "Resaved as UTF-8 encoding", "encoding": "", "srid": "", "simplification": 0.0001, }, "States": { "file": "tl_2013_us_state.zip", "singular": "State", "kind_first": False, "ider": utils.simple_namer(["GEOID"]), "namer": utils.simple_namer(["NAME"]), "authority": "United States Census Bureau", "domain": "United States of America", "last_updated": date(2013, 8, 6), "href": ("ftp://ftp2.census.gov/geo/tiger/TIGER2013/" "STATE/tl_2013_us_state.zip"), "notes": "",
from datetime import date from boundaryservice import utils import processing SHAPEFILES = { # This key should be the plural name of the boundaries in this set 'State House districts (2002)': { # Path to a shapefile, relative to /data/shapefiles 'file': 'state-house-districts/2002-census/tl_2010_27_sldl10/tl_2010_27_sldl10.shp', # Generic singular name for an boundary of from this set 'singular': 'State House district (2002)', # Should the singular name come first when creating canonical identifiers for this set? 'kind_first': False, # Function which each feature wall be passed to in order to extract its "external_id" property # The utils module contains several generic functions for doing this 'ider': utils.simple_namer(['geoid10']), # Function which each feature will be passed to in order to extract its "name" property # # This ends up being the slug: 63a-state-house-district-2002 'namer': utils.simple_namer(['sldlst10'], normalizer=lambda x: x.lstrip('0')), # Authority that is responsible for the accuracy of this data 'authority': 'U.S. Census Bureau TIGER lines', # Geographic extents which the boundary set encompasses 'domain': 'Minnesota', # Last time the source was checked for new data 'last_updated': date(2012, 5, 3), # A url to the source of the data 'href': 'http://www.census.gov/cgi-bin/geo/shapefiles2010/main', # Notes identifying any pecularities about the data, such as columns that were deleted or files which were merged 'notes': 'These districts were defined in 2002.', # Encoding of the text fields in the shapefile, i.e. 'utf-8'. If this is left empty 'ascii' is assumed