def get_shapefile_directory_list(state=None, geo_type=None):
    _, directories, _ = next(os.walk(EXTRACT_DIR))
    shapefile_directory_list = [
        os.path.join(EXTRACT_DIR, directory) for directory in directories
        if len(glob.glob(os.path.join(EXTRACT_DIR, directory, '*.shp'))) > 0
    ]

    if geo_type:
        # handle cd112, cd113, zcta5, etc.
        if geo_type in ['cd', 'zcta5']:
            geo_type_check = '_us_%s' % geo_type
            shapefile_directory_list = filter(
                lambda directory: geo_type_check in directory,
                shapefile_directory_list)
        else:
            geo_type_check = '_%s' % geo_type.lower()
            shapefile_directory_list = filter(
                lambda directory: directory.endswith(geo_type_check),
                shapefile_directory_list)

    if state:
        state_check = '_%s_' % get_fips_code_for_state(state)
        shapefile_directory_list = filter(
            lambda directory: state_check in directory or
            ('_us_' in directory and '_us_zcta5' not in directory),
            shapefile_directory_list)

    return shapefile_directory_list
def get_shapefile_directory_list(state=None, geo_type=None):
    shapefile_directory_list = [
        os.path.join(EXTRACT_DIR, directory) \
        for directory in os.walk(EXTRACT_DIR).next()[1]
    ]

    if geo_type:
        # handle cd112, cd113, zcta5, etc.
        if geo_type in ['cd','zcta5']:
            geo_type_check = '_us_%s' % geo_type
            shapefile_directory_list = filter(
                lambda directory: geo_type_check in directory,
                shapefile_directory_list
            )
        else:
            geo_type_check = '_%s' % geo_type.lower()
            shapefile_directory_list = filter(
                lambda directory: directory.endswith(geo_type_check),
                shapefile_directory_list
            )
    
    if state:
        state_check = '_%s_' % get_fips_code_for_state(state)
        shapefile_directory_list = filter(
            lambda directory: state_check in directory \
            or ('_us_' in directory and '_us_zcta5' not in directory),
            shapefile_directory_list
        )

    return shapefile_directory_list
def get_filename_list_from_ftp(target, state):
    target_files = urllib2.urlopen(target).read().splitlines()
    filename_list = []

    for line in target_files:
        filename = '%s%s' % (target, line.decode().split()[-1])
        filename_list.append(filename)

    if state:
        state_check = '_%s_' % get_fips_code_for_state(state)
        filename_list = filter(
            lambda filename: state_check in filename or
            ('_us_' in filename and '_us_zcta5' not in filename),
            filename_list)

    return filename_list
def build_dict_list(filename,
                    state=None,
                    geo_type=None,
                    include_polygon=False):
    shapefile = ogr.Open(filename)
    layer = shapefile.GetLayer()
    state_check = get_fips_code_for_state(state) if state else None
    dict_list = []

    feature = layer.GetNextFeature()
    while feature:
        item = {}
        if geo_type == 'zcta5':
            # ZCTA shapefiles have different attribute names
            _item_options = {
                'include_polygon': include_polygon,
            }
            item = csv_helpers.make_zcta5_row(feature, item, geo_type,
                                              _item_options)
        else:
            # All other geo_types share attribute names
            # Filter rows by state if -s arg is passed
            _statefp = feature.GetField("STATEFP")
            if not state_check or (state_check == _statefp):
                _item_options = {
                    'statefp': _statefp,
                    'include_polygon': include_polygon,
                    'geoid': feature.GetField("GEOID"),
                    'state_dict': STATE_FIPS_DICT[str(_statefp)],
                }

                item = csv_helpers.make_basic_row(feature, item, geo_type,
                                                  _item_options)
                if geo_type:
                    row_builder = getattr(csv_helpers,
                                          'make_%s_row' % geo_type)
                    item = row_builder(feature, item, _item_options)

        if item:
            dict_list.append(item)
        feature.Destroy()
        feature = layer.GetNextFeature()

    shapefile.Destroy()

    return dict_list
def get_filename_list_from_ftp(target, state):
    target_files = urllib2.urlopen(target).read().splitlines()
    filename_list = []

    for line in target_files:
        filename = '%s%s' % (target, line.split()[-1])
        filename_list.append(filename)

    if state:
        state_check = '_%s_' % get_fips_code_for_state(state)
        filename_list = filter(
            lambda filename: state_check in filename \
            or ('_us_' in filename and '_us_zcta5' not in filename),
            filename_list
        )

    return filename_list
def build_dict_list(filename, state=None, geo_type=None,
                    include_polygon=False):
    shapefile = ogr.Open(filename)
    layer = shapefile.GetLayer()
    state_check = get_fips_code_for_state(state) if state else None
    dict_list = []

    feature = layer.GetNextFeature()
    while feature:
        item = {}
        if geo_type == 'zcta5':
            # ZCTA shapefiles have different attribute names
            _item_options = {
                'include_polygon': include_polygon,
            }
            item = csv_helpers.make_zcta5_row(
                feature, item, geo_type, _item_options)
        else:
            # All other geo_types share attribute names
            # Filter rows by state if -s arg is passed
            _statefp = feature.GetField("STATEFP")
            if not state_check or (state_check == _statefp):
                _item_options = {
                    'statefp': _statefp,
                    'include_polygon': include_polygon,
                    'geoid': feature.GetField("GEOID"),
                    'state_dict': STATE_FIPS_DICT[str(_statefp)],
                }

                item = csv_helpers.make_basic_row(feature, item, geo_type, _item_options)
                if geo_type:
                    row_builder = getattr(csv_helpers, 'make_%s_row' % geo_type)
                    item = row_builder(feature, item, _item_options)

        if item:
            dict_list.append(item)
        feature.Destroy()
        feature = layer.GetNextFeature()

    shapefile.Destroy()

    return dict_list