def __init__(self, edges_shp, featnames_dbf, faces_dbf, place_shp, filter_city=None, verbose=False, encoding='utf8'): BlockImporter.__init__(self, shapefile=edges_shp, layer_id=0, verbose=verbose, encoding=encoding) self.featnames_db = featnames_db = {} for tlid, row in self._load_rel_db(featnames_dbf, 'TLID').iteritems(): # TLID is Tiger/Line ID, unique per edge. # We use TLID instead of LINEARID as the key because # LINEARID is only unique per 'linear feature', which is # an implicit union of some edges. So if we used LINEARID, # we'd clobber a lot of keys in the call to # _load_rel_db(). # Fixes #14 ("missing blocks"). if row['MTFCC'] not in VALID_MTFCC: continue if not row.get('FULLNAME'): self.log("skipping tlid %r, no fullname" % tlid) continue featnames_db.setdefault(tlid, []) featnames_db[tlid].append(row) self.faces_db = self._load_rel_db(faces_dbf, 'TFID') # Load places keyed by FIPS code places_layer = DataSource(place_shp)[0] fields = places_layer.fields self.places = places = {} for feature in DataSource(place_shp)[0]: fips = feature.get('PLACEFP') values = dict(zip(fields, map(feature.get, fields))) places[fips] = values self.filter_city = filter_city and filter_city.upper() or None self.tlids_with_blocks = set()
def __init__(self, edges_shp, featnames_dbf, faces_dbf, place_shp, filter_city=None, filter_bounds=None, filter_locations=(), verbose=False, encoding='utf8', fix_cities=False, reset=False, ): BlockImporter.__init__(self, shapefile=edges_shp, layer_id=0, verbose=verbose, encoding=encoding, reset=reset, ) self.fix_cities = fix_cities self.featnames_db = self._clean_featnames(featnames_dbf) self.faces_db = self._load_rel_db(faces_dbf, 'TFID') # Load places keyed by FIPS code places_layer = DataSource(place_shp)[0] fields = places_layer.fields self.places = places = {} for feature in places_layer: try: fips = feature.get('PLACEFP10') or feature.get('PLACEFP00') # 2010 Census files. except OGRIndexError: fips = feature.get('PLACEFP') values = dict(zip(fields, map(feature.get, fields))) places[fips] = values self.filter_city = filter_city and filter_city.upper() or None if hasattr(filter_bounds, 'ogr'): filter_bounds = filter_bounds.ogr self.filter_bounds = filter_bounds self.tlids_with_blocks = set()
def __init__(self, edges_shp, featnames_dbf, faces_dbf, place_shp, filter_city=None, filter_bounds=None, filter_locations=(), verbose=False, encoding='utf8', fix_cities=False): BlockImporter.__init__(self, shapefile=edges_shp, layer_id=0, verbose=verbose, encoding=encoding) self.fix_cities = fix_cities self.featnames_db = featnames_db = {} for tlid, row in self._load_rel_db(featnames_dbf, 'TLID').iteritems(): # TLID is Tiger/Line ID, unique per edge. # We use TLID instead of LINEARID as the key because # LINEARID is only unique per 'linear feature', which is # an implicit union of some edges. So if we used LINEARID, # we'd clobber a lot of keys in the call to # _load_rel_db(). # Fixes #14 ("missing blocks"). if row['MTFCC'] not in VALID_MTFCC: continue if not row.get('FULLNAME'): self.log("skipping tlid %r, no fullname" % tlid) continue featnames_db.setdefault(tlid, []) featnames_db[tlid].append(row) self.faces_db = self._load_rel_db(faces_dbf, 'TFID') # Load places keyed by FIPS code places_layer = DataSource(place_shp)[0] fields = places_layer.fields self.places = places = {} for feature in places_layer: try: fips = feature.get('PLACEFP10') or feature.get( 'PLACEFP00') # 2010 Census files. except OGRIndexError: fips = feature.get('PLACEFP') values = dict(zip(fields, map(feature.get, fields))) places[fips] = values self.filter_city = filter_city and filter_city.upper() or None if hasattr(filter_bounds, 'ogr'): filter_bounds = filter_bounds.ogr self.filter_bounds = filter_bounds self.tlids_with_blocks = set()
def __init__( self, edges_shp, featnames_dbf, faces_dbf, place_shp, filter_city=None, filter_bounds=None, filter_locations=(), verbose=False, encoding='utf8', fix_cities=False, reset=False, ): BlockImporter.__init__( self, shapefile=edges_shp, layer_id=0, verbose=verbose, encoding=encoding, reset=reset, ) self.fix_cities = fix_cities self.featnames_db = self._clean_featnames(featnames_dbf) self.faces_db = self._load_rel_db(faces_dbf, 'TFID') # Load places keyed by FIPS code places_layer = DataSource(place_shp)[0] fields = places_layer.fields self.places = places = {} for feature in places_layer: try: fips = feature.get('PLACEFP10') or feature.get( 'PLACEFP00') # 2010 Census files. except OGRIndexError: fips = feature.get('PLACEFP') values = dict(zip(fields, map(feature.get, fields))) places[fips] = values self.filter_city = filter_city and filter_city.upper() or None if hasattr(filter_bounds, 'ogr'): filter_bounds = filter_bounds.ogr self.filter_bounds = filter_bounds self.tlids_with_blocks = set()