def get_detail_xml_files(url, logger):
    counter = 0
    logger.info(f'Starting to process URL: {url}')
    participating_county_urls = helpers.get_participating_counties(url, 10, logger) # noqa
    num_counties = len(participating_county_urls)
    for c in participating_county_urls:
        counter += 1
        logger.info(f'Extracting {counter} of {num_counties} counties')
        logger.info(c)
        o = urlparse(c)
        county = o.path.split('/')[3]
        logger.info(f'Looking for detail xml for {county} country')
        url = helpers.get_county_detail_xml_urls(c, 10, logger)
        if not url:
            logger.error(f'No detail XML URL found for {county} country')
        else:
            helpers.download_detail_xml_file(url, county, logger)


if __name__ == '__main__':
    # move to working directory...
    abspath = os.path.abspath(__file__)
    dname = os.path.dirname(abspath)
    os.chdir(dname)

    logger = helpers.setup_logger_stdout('get_detail_xml_files')

    url = 'https://results.enr.clarityelections.com/GA/105369/web.264614/#/access-to-races' # noqa

    get_detail_xml_files(url, logger)
                            'votes': votes
                        }
                    )
    write_results(results)


def write_results(results):
    conn = create_connection()
    conn.cursor().executemany(INSERT_SQL, results)
    conn.commit()
    conn.close()


if __name__ == '__main__':
    # move to working directory...
    abspath = os.path.abspath(__file__)
    dname = os.path.dirname(abspath)
    os.chdir(dname)

    logger = helpers.setup_logger_stdout('process_detail_xml_file')

    files = glob.glob('detail_*.xml')

    counter = 0
    num_files = len(files)
    logger.info(f'Found {num_files} to process')
    for file in files:
        counter += 1
        logger.info(f'Processing file {counter} of {num_files} files')
        process_detail_xml_file(file, logger)