コード例 #1
0
ファイル: download_BBBike.py プロジェクト: manesioz/pydriosm
def collect_bbbike_subregion_catalogue(confirmation_required=True,
                                       verbose=False):
    """
    :param confirmation_required: [bool] (default: True)
    :param verbose: [bool] (default: False)

    Testing e.g.
        confirmation_required = True
        verbose               = True
        collect_bbbike_subregion_catalogue(confirmation_required, verbose)
    """
    if confirmed("To collect BBBike subregion catalogue? ",
                 confirmation_required=confirmation_required):
        try:
            home_url = 'http://download.bbbike.org/osm/bbbike/'
            bbbike_subregion_catalogue = pd.read_html(
                home_url, header=0, parse_dates=['Last Modified'])[0].drop(0)
            bbbike_subregion_catalogue.Name = bbbike_subregion_catalogue.Name.map(
                lambda x: x.strip('/'))

            save_pickle(bbbike_subregion_catalogue,
                        cd_dat("BBBike-subregion-catalogue.pickle"),
                        verbose=verbose)

            bbbike_subregion_names = bbbike_subregion_catalogue.Name.tolist()
            save_pickle(bbbike_subregion_names,
                        cd_dat("BBBike-subregion-name-list.pickle"),
                        verbose=verbose)

        except Exception as e:
            print("Failed to get the required information ... {}.".format(e))
    else:
        print(
            "The information collection process was not activated. The existing local copy will be loaded instead."
        )
コード例 #2
0
ファイル: download_BBBike.py プロジェクト: Mariosmsk/pydriosm
def collect_bbbike_download_catalogue(confirmation_required=True):
    """
    :param confirmation_required: [bool]
    """
    if confirmed("To collect BBBike download dictionary? ", confirmation_required=confirmation_required):
        try:
            bbbike_subregion_names = fetch_bbbike_subregion_catalogue("BBBike-subregion-name-list", update=True)
            download_catalogue = [
                fetch_bbbike_subregion_download_catalogue(subregion_name, update=True, confirmation_required=False)
                for subregion_name in bbbike_subregion_names]

            subregion_name, subregion_download_catalogue = bbbike_subregion_names[0], download_catalogue[0]

            # Available file formats
            file_fmt = [re.sub('{}|CHECKSUM'.format(subregion_name), '', f)
                        for f in subregion_download_catalogue.Filename]
            save_pickle(file_fmt[:-2], cd_dat("BBBike-osm-file-formats.pickle"))

            # Available data types
            data_typ = subregion_download_catalogue.DataType.tolist()
            save_pickle(data_typ[:-2], cd_dat("BBBike-osm-data-types.pickle"))

            # available_file_formats = dict(zip(file_fmt, file_ext))

            downloads_dictionary = dict(zip(bbbike_subregion_names, download_catalogue))
            save_pickle(downloads_dictionary, cd_dat("BBBike-download-catalogue.pickle"))
        except Exception as e:
            print("Failed to collect BBBike download dictionary. {}".format(e))
    else:
        print("The information collection process was not activated. The existing local copy will be loaded instead.")
コード例 #3
0
def fetch_bbbike_subregion_catalogue(catalogue_name,
                                     update=False,
                                     verbose=False):
    """
    :param catalogue_name: [str] "BBBike-subregion-catalogue"; "BBBike-subregion-name-list"
    :param update: [bool] (default: False)
    :param verbose: [bool] (default: False)
    :return: [pd.DataFrame; list]

    Examples:
        update         = False
        verbose        = True
        catalogue_name = "BBBike-subregion-catalogue"
        fetch_bbbike_subregion_catalogue(catalogue_name, update, verbose)
        catalogue_name = "BBBike-subregion-name-list"
        fetch_bbbike_subregion_catalogue(catalogue_name, update, verbose)
    """
    available_catalogue = ("BBBike-subregion-catalogue",
                           "BBBike-subregion-name-list")
    assert catalogue_name in available_catalogue, \
        "'catalogue_name' must be one of the following: \n  \"{}\".".format("\",\n  \"".join(available_catalogue))

    path_to_pickle = cd_dat(catalogue_name + ".pickle")
    if not os.path.isfile(path_to_pickle) or update:
        collect_bbbike_subregion_catalogue(confirmation_required=False,
                                           verbose=verbose)

    try:
        bbbike_subregion_catalogue = load_pickle(path_to_pickle,
                                                 verbose=verbose)
        return bbbike_subregion_catalogue
    except Exception as e:
        print(e)
コード例 #4
0
def fetch_bbbike_download_catalogue(catalogue_name,
                                    update=False,
                                    verbose=False):
    """
    :param catalogue_name [str] "BBBike-download-catalogue"; "BBBike-osm-file-formats"; "BBBike-osm-data-types"
    :param update: [bool] (default: False)
    :param verbose: [bool] (default: False)
    :return: [dict; list]

    Examples:
        verbose = True
        catalogue_name = "BBBike-download-catalogue"
        fetch_bbbike_download_catalogue(catalogue_name, update, verbose)  # dict
        catalogue_name = "BBBike-osm-file-formats"
        fetch_bbbike_download_catalogue(catalogue_name, update, verbose)  # list
        catalogue_name = "BBBike-osm-data-types"
        fetch_bbbike_download_catalogue(catalogue_name, update, verbose)  # list
    """
    available_catalogue = ("BBBike-download-catalogue",
                           "BBBike-osm-file-formats", "BBBike-osm-data-types")
    assert catalogue_name in available_catalogue, \
        "'catalogue_name' must be one of the following: \n  \"{}\".".format("\",\n  \"".join(available_catalogue))

    path_to_file = cd_dat(catalogue_name + ".pickle")
    if not os.path.isfile(path_to_file) or update:
        collect_bbbike_download_catalogue(confirmation_required=True,
                                          verbose=verbose)
    try:
        bbbike_downloads_dictionary = load_pickle(path_to_file,
                                                  verbose=verbose)
        return bbbike_downloads_dictionary
    except Exception as e:
        print(e)
コード例 #5
0
def fetch_region_subregion_tier(catalogue_name,
                                file_format=".pickle",
                                update=False):
    """
    :param catalogue_name: [str] e.g. "GeoFabrik-region-subregion-tier"
    :param file_format: [str] ".pickle"(default), or ".json"
    :param update: [bool] whether to update (i.e. re-collect) all subregion tables for each continent
    :return: [dict, or list] or null
    """
    available_catalogue = ("GeoFabrik-region-subregion-tier",
                           "GeoFabrik-non-subregion-list")
    assert catalogue_name in available_catalogue, \
        "'catalogue_name' must be one of the following: \n  \"{}\".".format("\",\n  \"".join(available_catalogue))

    available_fmt = (".pickle", ".json")
    assert file_format in available_fmt, \
        "'file_format' must be one of the following: \n  \"{}\".".format("\",\n  \"".join(available_fmt))

    path_to_file = cd_dat(catalogue_name + file_format)
    if not os.path.isfile(path_to_file) or update:
        collect_region_subregion_tier(confirmation_required=True)
    try:
        index_file = load_pickle(
            path_to_file) if file_format == ".pickle" else load_json(
                path_to_file)
        return index_file
    except Exception as e:
        print(e)
コード例 #6
0
def collect_continents_subregion_tables(confirmation_required=True):
    """
    :param confirmation_required: [bool] whether to ask for a confirmation before starting to collect the information
    """
    if confirmed("To collect information about subregions of each continent? ",
                 confirmation_required=confirmation_required):
        try:
            home_link = 'https://download.geofabrik.de/'
            source = requests.get(home_link)
            soup = bs4.BeautifulSoup(source.text,
                                     'lxml').find_all('td',
                                                      {'class': 'subregion'})
            source.close()
            continent_names = [td.a.text for td in soup]
            continent_links = [
                urllib.parse.urljoin(home_link, td.a['href']) for td in soup
            ]
            subregion_tables = dict(
                zip(continent_names,
                    [get_subregion_table(url) for url in continent_links]))
            save_pickle(subregion_tables,
                        cd_dat("GeoFabrik-continents-subregion-tables.pickle"))
        except Exception as e:
            print(
                "Failed to collect the required information ... {}.".format(e))
    else:
        print(
            "The information collection process was not activated. The existing local copy will be loaded instead."
        )
コード例 #7
0
def fetch_subregion_info_catalogue(catalogue_name,
                                   file_format=".pickle",
                                   update=False):
    """
    :param catalogue_name: [str] e.g. "GeoFabrik-subregion-name-list"
    :param file_format: [str] ".pickle"(default), or ".json"
    :param update: [bool] whether to update (re-collect) the catalogues of subregion information
    :return: [list, dict, pandas.DataFrame] or null
    """
    available_catalogue = ("GeoFabrik-subregion-name-list",
                           "GeoFabrik-subregion-name-url-dictionary",
                           "GeoFabrik-subregion-downloads-catalogue")
    assert catalogue_name in available_catalogue, \
        "'catalogue_name' must be one of the following: \n  \"{}\".".format("\",\n  \"".join(available_catalogue))

    available_fmt = (".pickle", ".json")
    assert file_format in available_fmt, \
        "'file_format' must be one of the following: \n  \"{}\".".format("\",\n  \"".join(available_fmt))

    path_to_catalogue = cd_dat(catalogue_name + file_format)
    if not os.path.isfile(
            path_to_catalogue) or update:  # all(paths_to_files_exist) and
        collect_subregion_info_catalogue(confirmation_required=True)
    try:
        index_file = load_pickle(
            path_to_catalogue) if file_format == ".pickle" else load_json(
                path_to_catalogue)
        return index_file
    except Exception as e:
        print(e)
コード例 #8
0
def fetch_continents_subregion_tables(update=False):
    """
    :param update: [bool] whether to update (i.e. re-collect) all subregion tables for each continent
    :return: [pandas.DataFrame] or null
    """
    path_to_pickle = cd_dat("GeoFabrik-continents-subregion-tables.pickle")
    if not os.path.isfile(path_to_pickle) or update:
        collect_continents_subregion_tables(confirmation_required=True)
    try:
        subregion_tables = load_pickle(path_to_pickle)
        return subregion_tables
    except Exception as e:
        print(e)
コード例 #9
0
ファイル: download_BBBike.py プロジェクト: Mariosmsk/pydriosm
def fetch_bbbike_subregion_catalogue(catalogue_name, update=False):
    """
    :param catalogue_name: [str]
    :param update: [bool]
    :return: [pandas.DataFrame]
    """
    available_catalogue = ("BBBike-subregion-catalogue", "BBBike-subregion-name-list")
    assert catalogue_name in available_catalogue, \
        "'catalogue_name' must be one of the following: \n  \"{}\".".format("\",\n  \"".join(available_catalogue))

    path_to_pickle = cd_dat(catalogue_name + ".pickle")
    if not os.path.isfile(path_to_pickle) or update:
        collect_bbbike_subregion_catalogue(confirmation_required=False)
    try:
        bbbike_subregion_catalogue = load_pickle(path_to_pickle)
        return bbbike_subregion_catalogue
    except Exception as e:
        print(e)
コード例 #10
0
def fetch_continents_subregion_tables(update=False, verbose=False):
    """
    :param update: [bool] (default: False) whether to update (i.e. re-collect) all subregion tables for each continent
    :param verbose: [bool] (default: False)
    :return: [pd.DataFrame]

    Testing e.g.
        update  = False
        verbose = True
        fetch_continents_subregion_tables(update, verbose)
    """
    path_to_pickle = cd_dat("GeoFabrik-continents-subregion-tables.pickle")
    if not os.path.isfile(path_to_pickle) or update:
        collect_continents_subregion_tables(confirmation_required=True,
                                            verbose=verbose)
    try:
        subregion_tables = load_pickle(path_to_pickle, verbose=verbose)
        return subregion_tables
    except Exception as e:
        print(e)
コード例 #11
0
def collect_bbbike_download_catalogue(confirmation_required=True,
                                      verbose=False):
    """
    :param confirmation_required: [bool] (default: True)
    :param verbose: [bool] (default: False)

    Example:
        confirmation_required = True
        verbose               = True
        collect_bbbike_download_catalogue(confirmation_required, verbose)
    """

    #
    def collect_bbbike_subregion_download_catalogue(subregion_name):
        """
        :param subregion_name: [str]

        Example:
            subregion_name        = 'leeds'
            confirmation_required = True
            verbose               = True
            collect_bbbike_subregion_download_catalogue(subregion_name, confirmation_required, verbose)
        """
        def parse_dlc(dlc):
            dlc_href = dlc.get('href')  # URL
            filename, download_url = dlc_href.strip(
                './'), urllib.parse.urljoin(url, dlc_href)
            if not dlc.has_attr('title'):
                file_format, file_size, last_update = 'Poly', None, None
            else:
                if len(dlc.contents) < 3:
                    file_format, file_size = 'Txt', None
                else:
                    file_format, file_size, _ = dlc.contents  # File type and size
                    file_format, file_size = file_format.strip(
                    ), file_size.text
                last_update = pd.to_datetime(dlc.get('title'))  # Date and time
            parsed_dat = [
                filename, download_url, file_format, file_size, last_update
            ]
            return parsed_dat

        subregion_name_ = regulate_bbbike_input_subregion_name(subregion_name)
        #
        try:
            print("  \"{}\" ... ".format(subregion_name_),
                  end="") if verbose else ""
            url = 'https://download.bbbike.org/osm/bbbike/{}/'.format(
                subregion_name_)

            source = urllib.request.urlopen(url)

            import bs4
            source_soup = bs4.BeautifulSoup(source, 'lxml')
            download_links_class = source_soup.find_all(
                name='a', attrs={'class': ['download_link', 'small']})

            subregion_downloads_catalogue = pd.DataFrame(
                parse_dlc(x) for x in download_links_class)
            subregion_downloads_catalogue.columns = [
                'Filename', 'URL', 'DataType', 'Size', 'LastUpdate'
            ]

            # path_to_file = cd_dat_bbbike(subregion_name_, subregion_name_ + "-download-catalogue.pickle")
            # save_pickle(subregion_downloads_catalogue, path_to_file, verbose=verbose)
            print("Done. ") if verbose else ""

        except Exception as e_:
            subregion_downloads_catalogue = None
            print("Failed. {}".format(subregion_name_, e_)) if verbose else ""

        return subregion_downloads_catalogue

    if confirmed("To collect BBBike download dictionary? ",
                 confirmation_required=confirmation_required):
        try:
            bbbike_subregion_names = fetch_bbbike_subregion_catalogue(
                "BBBike-subregion-name-list", verbose=verbose)
            print("Collecting BBBike download catalogue for: "
                  ) if verbose else ""
            download_catalogue = [
                collect_bbbike_subregion_download_catalogue(subregion_name)
                for subregion_name in bbbike_subregion_names
            ]

            sr_name, sr_download_catalogue = bbbike_subregion_names[
                0], download_catalogue[0]

            # Available file formats
            file_fmt = [
                re.sub('{}|CHECKSUM'.format(sr_name), '', f)
                for f in sr_download_catalogue.Filename
            ]
            save_pickle(file_fmt[:-2],
                        cd_dat("BBBike-osm-file-formats.pickle"),
                        verbose=verbose)

            # Available data types
            data_typ = sr_download_catalogue.DataType.tolist()
            save_pickle(data_typ[:-2],
                        cd_dat("BBBike-osm-data-types.pickle"),
                        verbose=verbose)

            # available_file_formats = dict(zip(file_fmt, file_ext))

            downloads_dictionary = dict(
                zip(bbbike_subregion_names, download_catalogue))
            save_pickle(downloads_dictionary,
                        cd_dat("BBBike-download-catalogue.pickle"),
                        verbose=verbose)

        except Exception as e:
            print("Failed to collect BBBike download dictionary. {}".format(
                e)) if verbose else ""
コード例 #12
0
def collect_region_subregion_tier(confirmation_required=True, verbose=False):
    """
    :param confirmation_required: [bool] (default: True) whether to confirm before collecting region-subregion tier
    :param verbose: [bool] (default: False)

    Testing e.g.
        confirmation_required = True
        verbose               = True
        collect_region_subregion_tier(confirmation_required, verbose)
    """

    # Find out the all regions and their subregions
    def compile_region_subregion_tier(sub_reg_tbls):
        """
        :param sub_reg_tbls: [pd.DataFrame] obtained from fetch_continents_subregion_tables()
        :return: ([dict], [list]) a dictionary of region-subregion, and a list of (sub)regions without subregions
        """
        having_subregions = copy.deepcopy(sub_reg_tbls)
        region_subregion_tiers = copy.deepcopy(sub_reg_tbls)

        non_subregions_list = []
        for k, v in sub_reg_tbls.items():
            if v is not None and isinstance(v, pd.DataFrame):
                region_subregion_tiers = update_nested_dict(
                    sub_reg_tbls, {k: set(v.Subregion)})
            else:
                non_subregions_list.append(k)

        for x in non_subregions_list:
            having_subregions.pop(x)

        having_subregions_temp = copy.deepcopy(having_subregions)

        while having_subregions_temp:

            for region_name, subregion_table in having_subregions.items():
                #
                subregion_names, subregion_links = subregion_table.Subregion, subregion_table.SubregionURL
                sub_subregion_tables = dict(
                    zip(subregion_names, [
                        get_subregion_table(link) for link in subregion_links
                    ]))

                subregion_index, without_subregion_ = compile_region_subregion_tier(
                    sub_subregion_tables)
                non_subregions_list += without_subregion_

                region_subregion_tiers.update({region_name: subregion_index})

                having_subregions_temp.pop(region_name)

        # Russian Federation in both pages of Asia and Europe, so that there are duplicates in non_subregions_list
        non_subregions_list = list(
            more_itertools.unique_everseen(non_subregions_list))
        return region_subregion_tiers, non_subregions_list

    if confirmed(
            "To compile a region-subregion tier? (Note that it may take a few minutes.) ",
            confirmation_required=confirmation_required):
        try:
            subregion_tables = fetch_continents_subregion_tables(update=True)
            region_subregion_tier, non_subregions = compile_region_subregion_tier(
                subregion_tables)
            save_pickle(region_subregion_tier,
                        cd_dat("GeoFabrik-region-subregion-tier.pickle"),
                        verbose=verbose)
            save_json(region_subregion_tier,
                      cd_dat("GeoFabrik-region-subregion-tier.json"),
                      verbose=verbose)
            save_pickle(non_subregions,
                        cd_dat("GeoFabrik-non-subregion-list.pickle"),
                        verbose=verbose)
        except Exception as e:
            print("Failed to get the required information ... {}.".format(e))
    else:
        print("The information collection process was not activated.")
コード例 #13
0
def collect_subregion_info_catalogue(confirmation_required=True,
                                     verbose=False):
    """
    :param confirmation_required: [bool] (default: False) whether to confirm before starting to collect information
    :param verbose: [bool] (default: False)

    Testing e.g.
        confirmation_required = True
        verbose               = True
        collect_subregion_info_catalogue(confirmation_required, verbose)
    """
    if confirmed(
            "To collect all available subregion links? (Note that it may take a few minutes.) ",
            confirmation_required=confirmation_required):

        home_url = 'http://download.geofabrik.de/'

        try:
            source = requests.get(home_url)
            soup = bs4.BeautifulSoup(source.text, 'lxml')
            source.close()
            avail_subregions = [
                td.a.text for td in soup.find_all('td', {'class': 'subregion'})
            ]
            avail_subregion_urls = [
                urllib.parse.urljoin(home_url, td.a['href'])
                for td in soup.find_all('td', {'class': 'subregion'})
            ]
            avail_subregion_url_tables = [
                get_subregion_table(sub_url, verbose)
                for sub_url in avail_subregion_urls
            ]
            avail_subregion_url_tables = [
                tbl for tbl in avail_subregion_url_tables if tbl is not None
            ]

            subregion_url_tables = list(avail_subregion_url_tables)

            while subregion_url_tables:

                subregion_url_tables_ = []

                for subregion_url_table in subregion_url_tables:
                    subregions = list(subregion_url_table.Subregion)
                    subregion_urls = list(subregion_url_table.SubregionURL)
                    subregion_url_tables_0 = [
                        get_subregion_table(subregion_url, verbose)
                        for subregion_url in subregion_urls
                    ]
                    subregion_url_tables_ += [
                        tbl for tbl in subregion_url_tables_0
                        if tbl is not None
                    ]

                    # (Note that 'Russian Federation' data is available in both 'Asia' and 'Europe')
                    avail_subregions += subregions
                    avail_subregion_urls += subregion_urls
                    avail_subregion_url_tables += subregion_url_tables_

                subregion_url_tables = list(subregion_url_tables_)

            # Save a list of available subregions locally
            save_pickle(avail_subregions,
                        cd_dat("GeoFabrik-subregion-name-list.pickle"),
                        verbose=verbose)

            # Subregion index - {Subregion: URL}
            subregion_url_index = dict(
                zip(avail_subregions, avail_subregion_urls))
            # Save subregion_index to local disk
            save_pickle(
                subregion_url_index,
                cd_dat("GeoFabrik-subregion-name-url-dictionary.pickle"),
                verbose=verbose)
            save_json(subregion_url_index,
                      cd_dat("GeoFabrik-subregion-name-url-dictionary.json"),
                      verbose=verbose)

            # All available URLs for downloading
            home_subregion_url_table = get_subregion_table(home_url)
            avail_subregion_url_tables.append(home_subregion_url_table)
            subregion_downloads_index = pd.DataFrame(
                pd.concat(avail_subregion_url_tables, ignore_index=True))
            subregion_downloads_index.drop_duplicates(inplace=True)
            subregion_downloads_index_json = subregion_downloads_index.set_index(
                'Subregion').to_json()

            # Save subregion_index_downloads to local disk
            save_pickle(
                subregion_downloads_index,
                cd_dat("GeoFabrik-subregion-downloads-catalogue.pickle"),
                verbose=verbose)
            save_json(subregion_downloads_index_json,
                      cd_dat("GeoFabrik-subregion-downloads-catalogue.json"),
                      verbose=verbose)

        except Exception as e:
            print("Failed to get the required information ... {}.".format(e))

    else:
        print("The information collection process was not activated.")
コード例 #14
0
def collect_subregion_info_catalogue(confirmation_required=True,
                                     verbose=False):
    """
    :param confirmation_required: [bool] (default: False) whether to confirm before starting to collect information
    :param verbose: [bool] (default: False)

    Example:
        confirmation_required = True
        verbose               = True
        collect_subregion_info_catalogue(confirmation_required, verbose)
    """
    if confirmed(
            "To collect all available subregion links? (Note that it may take a few minutes.) ",
            confirmation_required=confirmation_required):

        home_url = 'http://download.geofabrik.de/'

        try:
            source = requests.get(home_url)
            soup = bs4.BeautifulSoup(source.text, 'lxml')
            source.close()
            # avail_subregions = [td.a.text for td in soup.find_all('td', {'class': 'subregion'})]
            subregion_href = soup.find_all('td', {'class': 'subregion'})
            avail_subregion_urls = (urllib.parse.urljoin(
                home_url, td.a['href']) for td in subregion_href)
            avail_subregion_url_tables_0 = (get_subregion_table(
                sub_url, verbose) for sub_url in avail_subregion_urls)
            avail_subregion_url_tables = [
                tbl for tbl in avail_subregion_url_tables_0 if tbl is not None
            ]

            subregion_url_tables = list(avail_subregion_url_tables)

            while subregion_url_tables:

                subregion_url_tables_ = []

                for subregion_url_table in subregion_url_tables:
                    # subregions = list(subregion_url_table.Subregion)
                    subregion_urls = list(subregion_url_table.SubregionURL)
                    subregion_url_tables_0 = [
                        get_subregion_table(sr_url, verbose)
                        for sr_url in subregion_urls
                    ]
                    subregion_url_tables_ += [
                        tbl for tbl in subregion_url_tables_0
                        if tbl is not None
                    ]

                    # (Note that 'Russian Federation' data is available in both 'Asia' and 'Europe')
                    # avail_subregions += subregions
                    # avail_subregion_urls += subregion_urls
                    avail_subregion_url_tables += subregion_url_tables_

                subregion_url_tables = list(subregion_url_tables_)

            # All available URLs for downloading
            home_subregion_url_table = get_subregion_table(home_url)
            avail_subregion_url_tables.append(home_subregion_url_table)
            subregion_downloads_index = pd.DataFrame(
                pd.concat(avail_subregion_url_tables, ignore_index=True))
            subregion_downloads_index.drop_duplicates(inplace=True)

            duplicated = subregion_downloads_index[
                subregion_downloads_index.Subregion.duplicated(keep=False)]
            if not duplicated.empty:
                import humanfriendly
                for i in range(0, 2, len(duplicated)):
                    temp = duplicated.iloc[i:i + 2]
                    size = temp['.osm.pbf_Size'].map(
                        lambda x: humanfriendly.parse_size(
                            x.strip('(').strip(')').replace('\xa0', ' ')))
                    idx = size[size == size.min()].index
                    subregion_downloads_index.drop(idx, inplace=True)
                subregion_downloads_index.index = range(
                    len(subregion_downloads_index))

            subregion_downloads_index_json = subregion_downloads_index.set_index(
                'Subregion').to_json()

            # Save subregion_index_downloads to local disk
            save_pickle(
                subregion_downloads_index,
                cd_dat("GeoFabrik-subregion-downloads-catalogue.pickle"),
                verbose=verbose)
            save_json(subregion_downloads_index_json,
                      cd_dat("GeoFabrik-subregion-downloads-catalogue.json"),
                      verbose=verbose)

            avail_subregions = list(subregion_downloads_index.Subregion)
            avail_subregion_urls = list(subregion_downloads_index.SubregionURL)

            # Subregion index - {Subregion: URL}
            subregion_url_index = dict(
                zip(avail_subregions, avail_subregion_urls))

            # Save a list of available subregions locally
            save_pickle(avail_subregions,
                        cd_dat("GeoFabrik-subregion-name-list.pickle"),
                        verbose=verbose)
            # Save subregion_index to local disk
            save_pickle(
                subregion_url_index,
                cd_dat("GeoFabrik-subregion-name-url-dictionary.pickle"),
                verbose=verbose)
            save_json(subregion_url_index,
                      cd_dat("GeoFabrik-subregion-name-url-dictionary.json"),
                      verbose=verbose)

        except Exception as e:
            print("Failed to get the required information ... {}.".format(e))

    else:
        print("The information collection process was not activated.")