예제 #1
0
def parse_regions(database: SqliteUtil, regions_file: str, src_epsg: int,
                  prj_epsg: int):

    log.info('Allocating tables for regions.')
    create_tables(database)

    transformer = Transformer.from_crs(f'epsg:{src_epsg}',
                                       f'epsg:{prj_epsg}',
                                       always_xy=True,
                                       skip_equivalent=True)
    project = transformer.transform

    log.info('Parsing regions from shapefile.')
    parser = shapefile.Reader(regions_file)
    iter_regions = counter(iter(parser), 'Parsing region %s.')
    regions = []
    for item in iter_regions:
        points = (project(*point) for point in item.shape.points)
        polygon = Polygon(points)

        regions.append(
            (item.record.MAZ_ID_10, item.record.TAZ_2015, item.record.Sq_miles,
             dumps(polygon.centroid), dumps(polygon)))

    parser.close()

    log.info('Writing parsed regions to database.')
    database.insert_values('regions', regions, 5)
    database.connection.commit()

    log.info('Creating indexes on new tables.')
    create_indexes(database)
예제 #2
0
def create_tables(database: SqliteUtil):
    database.drop_table('nodes', 'links')
    query = '''
        CREATE TABLE nodes(
            node_id VARCHAR(255),
            maz SMALLINT UNSIGNED,
            point VARCHAR(255)
        );
    '''
    database.cursor.execute(query)
    query = '''
        CREATE TABLE links(
            link_id VARCHAR(255),
            source_node VARCHAR(255),
            terminal_node VARCHAR(255),
            length FLOAT,
            freespeed FLOAT,
            capacity FLOAT,
            permlanes FLOAT,
            oneway TINYINT UNSIGNED,
            modes VARHCAR(255),
            air_temperature INT UNSIGNED,
            mrt_temperature INT UNSIGNED
        );
    '''
    database.cursor.execute(query)
    database.connection.commit()
예제 #3
0
def create_tables(database: SqliteUtil):
    database.drop_table('air_temperatures', 'temp_links', 'temp_parcels',
                        'temp_links_merged', 'temp_parcels_merged')
    query = '''
        CREATE TABLE air_temperatures(
            temperature_id MEDIUMINT UNSIGNED,
            temperature_idx SMALLINT UNSIGNED,
            time MEDIUMINT UNSIGNED,
            temperature FLOAT
        );  
    '''
    database.cursor.execute(query)
    query = '''
        CREATE TABLE temp_links(
            link_id VARCHAR(255),
            air_temperature MEDIUMINT UNSIGNED
        );
    '''
    database.cursor.execute(query)
    query = '''
        CREATE TABLE temp_parcels(
            apn VARCHAR(255),
            air_temperature MEDIUMINT UNSIGNED
        );
    '''
    database.cursor.execute(query)
    database.connection.commit()
예제 #4
0
def create_tables(database: SqliteUtil):
    database.drop_table('regions')
    query = '''
        CREATE TABLE regions(
            maz SMALLINT UNSIGNED,
            taz SMALLINT UNSIGNED,
            area FLOAT,
            center VARCHAR(255),
            region TEXT
        );  
    '''
    database.cursor.execute(query)
    database.connection.commit()
예제 #5
0
def main():
    parser = ArgumentParser('mrt temperature visualizer')
    
    parser.add_argument('--dir', type=str, dest='dir', default='.',
        help='path to directory containing Icarus run data')
    parser.add_argument('--log', type=str, dest='log', default=None,
        help='path to file to save the process log; not saved by default')
    parser.add_argument('--level', type=str, dest='level', default='info',
        choices=('notset', 'debug', 'info', 'warning', 'error', 'critical'),
        help='verbosity of the process log')

    args = parser.parse_args()

    handlers = []
    handlers.append(log.StreamHandler())
    if args.log is not None:
        handlers.append(log.FileHandler(args.log, 'w'))
    log.basicConfig(
        format='%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s',
        level=getattr(log, args.level.upper()),
        handlers=handlers
    )

    database = SqliteUtil('database.db', readonly=True)
    kind = 'mrt'

    map_mrt_temperature(database, kind)
예제 #6
0
def load_links(database: SqliteUtil):
    query = '''
        SELECT
            links.link_id,
            links.mrt_temperature,
            nodes1.point AS source_point,
            nodes2.point AS terminal_point
        FROM links
        INNER JOIN nodes AS nodes1
        ON links.source_node = nodes1.node_id
        INNER JOIN nodes AS nodes2
        ON links.terminal_node = nodes2.node_id;
    '''
    database.cursor.execute(query)
    rows = database.fetch_rows()
    rows = counter(rows, 'Loading link %s.')

    bounds = lambda x, y: min(x) > 0.5e6 and max(x) < 0.85e6 and \
        min(y) > 0.8e6 and max(y) < 1.0e6

    links = []
    for link_id, profile, src_pt, term_pt in rows:
        line = LineString((xy(src_pt), xy(term_pt)))
        x, y = line.coords.xy
        if bounds(x, y):
            link = Link(link_id, line, profile)
            links.append(link)
    
    return links
예제 #7
0
def load_temperatures(database: SqliteUtil, kind: str, 
            max_idx: int, min_idx: int):
    query = f'''
        SELECT
            mrt_temperatures.temperature_id,
            mrt_temperatures.temperature_idx,
            mrt_temperatures.{kind},
            COUNT(*) AS util
        FROM mrt_temperatures
        INNER JOIN links
        ON links.mrt_temperature = mrt_temperatures.temperature_id
        INNER JOIN output_events
        ON output_events.link_id = links.link_id
        WHERE output_events.start >= mrt_temperatures.temperature_idx * 900
        AND output_events.end < mrt_temperatures.temperature_idx * 900 + 900
        GROUP BY temperature_id, temperature_idx;
    '''
    database.cursor.execute(query)
    rows = database.fetch_rows()
    rows = counter(rows, 'Loading temperature profile %s.')

    temps = defaultdict(lambda: [None] * (max_idx - min_idx + 1))
    for uuid, idx, temp, util in rows:
        if util > 0:
            temps[uuid][idx - min_idx] = temp

    return temps
예제 #8
0
def create_tables(database: SqliteUtil):
    database.drop_table('parcels')
    query = '''
        CREATE TABLE parcels(
            apn VARCHAR(255),
            maz SMALLINT UNSIGNED,
            type VARCHAR(255),
            cooling TINYINT UNSIGNED,
            air_temperature INT UNSIGNED,
            mrt_temperature INT UNSIGNED,
            center VARCHAR(255),
            region TEXT
        );
    '''
    database.cursor.execute(query)
    database.connection.commit()
예제 #9
0
def complete(database: SqliteUtil):
    tables = ('nodes', 'links')
    exists = database.table_exists(*tables)
    if len(exists):
        present = ', '.join(exists)
        log.info(f'Found tables {present} already in database.')
    return len(exists) > 0
예제 #10
0
def complete(database: SqliteUtil):
    done = False
    exists = database.table_exists('parcels')
    if len(exists):
        log.warning('Database already has table parcels.')
        done = True

    return done
예제 #11
0
def main():
    parser = ArgumentParser('mag abm parser')
    parser.add_argument('--folder', type=str, dest='folder', default='.')
    parser.add_argument('--log', type=str, dest='log', default=None)
    parser.add_argument('--level',
                        type=str,
                        dest='level',
                        default='info',
                        choices=('notset', 'debug', 'info', 'warning', 'error',
                                 'critical'))
    args = parser.parse_args()

    handlers = []
    handlers.append(log.StreamHandler())
    if args.log is not None:
        handlers.append(log.FileHandler(args.log, 'w'))
    log.basicConfig(
        format='%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s',
        level=getattr(log, args.level.upper()),
        handlers=handlers)

    path = lambda x: os.path.abspath(os.path.join(args.folder, x))
    home = path('')

    log.info('Running MAG ABM parsing tool.')
    log.info(f'Loading run data from {home}.')

    database = SqliteUtil(path('database.db'))
    config = ConfigUtil.load_config(path('config.json'))

    trips_file = config['population']['trips_file']
    persons_file = config['population']['persons_file']
    households_file = config['population']['households_file']

    if not ready(trips_file, households_file, persons_file):
        log.warning('Dependent data not parsed or generated.')
        exit(1)
    elif complete(database):
        log.warning(
            'Population data already parsed. Would you like to replace it? [Y/n]'
        )
        if input().lower() not in ('y', 'yes', 'yeet'):
            log.info(
                'User chose to keep existing population data; exiting parsing tool.'
            )
            exit()

    try:
        log.info('Starting population parsing.')
        parse_abm(database, trips_file, households_file, persons_file)
    except:
        log.exception('Critical error while parsing population; '
                      'terminating process and exiting.')
        exit(1)
예제 #12
0
def parse_abm(database: SqliteUtil, trips_file: str, households_file: str,
              persons_file: str):
    log.info('Allocating tables for households, persons and trips.')
    create_tables(database)

    log.info('Parsing households.')
    households = load_households(households_file)
    database.insert_values('households', households, 18)
    database.connection.commit()
    del households

    log.info('Parsing persons.')
    persons = load_persons(persons_file)
    database.insert_values('persons', persons, 39)
    database.connection.commit()
    del persons

    log.info('Parsing trips.')
    trips = load_trips(trips_file)
    database.insert_values('trips', trips, 18)
    database.connection.commit()
    del trips

    log.info('Creating indexes on new tables.')
    create_indexes(database)
예제 #13
0
def load_extrema(database: SqliteUtil, kind: str):
    query = f'''
        SELECT
            max({kind}),
            min({kind}),
            max(temperature_idx),
            min(temperature_idx)
        FROM mrt_temperatures;
    '''
    database.cursor.execute(query)
    max_temp, min_temp, max_idx, min_idx = next(database.fetch_rows())

    return max_temp, min_temp, max_idx, min_idx
예제 #14
0
def main():
    parser = ArgumentParser('mrt temperature parser', add_help=False)
    
    parser.add_argument('--help', action='help', default=SUPPRESS,
        help='show this help menu and exit process')
    parser.add_argument('--dir', type=str, dest='dir', default='.',
        help='path to directory containing Icarus run data')
    parser.add_argument('--log', type=str, dest='log', default=None,
        help='path to file to save the process log; not saved by default')
    parser.add_argument('--level', type=str, dest='level', default='info',
        choices=('notset', 'debug', 'info', 'warning', 'error', 'critical'),
        help='verbosity of the process log')

    args = parser.parse_args()

    handlers = []
    handlers.append(log.StreamHandler())
    if args.log is not None:
        handlers.append(log.FileHandler(args.log, 'w'))
    log.basicConfig(
        format='%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s',
        level=getattr(log, args.level.upper()),
        handlers=handlers
    )

    path = lambda x: os.path.abspath(os.path.join(args.dir, x))
    home = path('')

    log.info('Running mrt temperature parsing tool.')
    log.info(f'Loading run data from {home}.')

    config = ConfigUtil.load_config(path('config.json'))
    database = SqliteUtil(path('database.db'))

    path = config['network']['exposure']['mrt_dir']

    try:
        log.info('Starting mrt temperature parsing.')
        parse_mrt(
            database, 
            path, 
            src_epsg=4326,
            prj_epsg=2223, 
            bounds=50,
            steps=96
        )
    except:
        log.exception('Critical error while running mrt temperature '
            'parsing; cleaning up and terminating.')
예제 #15
0
def ready(database: SqliteUtil, residence_file: str, commerce_file: str,
          parcel_file: str):
    ready = True
    exists = database.table_exists('regions')
    if not len(exists):
        log.warning('Database is missing table regions.')
        ready = False
    files = (residence_file, commerce_file, parcel_file)
    for f in files:
        exists = os.path.exists(f)
        if not exists:
            log.warning(f'Could not open file {f}.')
            ready = False

    return ready
예제 #16
0
def ready(database: SqliteUtil, tmin_files: List[str], tmax_files: List[str]):
    ready = True
    for t_file in tmin_files + tmax_files:
        exists = os.path.exists(t_file)
        if not exists:
            log.warning(f'Could not open file {t_file}.')
            ready = False
    tables = ('parcels', 'links', 'nodes')
    exists = database.table_exists(*tables)
    missing = set(tables) - set(exists)
    for table in missing:
        log.warning(f'Database is missing table {table}.')
        ready = False

    return ready
예제 #17
0
def main(database: SqliteUtil):

    query = '''
        SELECT
            households.hhIncomeDollar,
            output_agents.exposure
        FROM households
        INNER JOIN agents
        ON households.hhid = agents.household_id
        INNER JOIN output_agents
        ON agents.agent_id = output_agents.agent_id
        WHERE households.hhIncomeDollar < 500000;
    '''

    database.cursor.execute(query)
    persons = database.fetch_rows()
    total = {}
    data = []

    for income, exposure in persons:
        adj = (income // 10000) * 10000
        if adj in total:
            total[adj][0] += exposure
            total[adj][1] += 1
        else:
            total[adj] = [exposure, 1]

    for age, (total, count) in total.items():
        data.append((age, total / count))

    data = pd.DataFrame(data,
                        columns=('household income (USD)',
                                 'average exposure (°C·sec)'))
    axes = sns.barplot(x=data['household income (USD)'],
                       y=data['average exposure (°C·sec)'],
                       color='royalblue')
    axes.set_title('Exposure By Income')

    for ind, label in enumerate(axes.get_xticklabels()):
        if ind % 10 == 0:
            label.set_visible(True)
        else:
            label.set_visible(False)

    plot = axes.get_figure()

    plot.savefig('result/income_exposure.png', bbox_inches='tight')
    plot.clf()
예제 #18
0
def load_regions(database: SqliteUtil):
    query = '''
        SELECT
            maz,
            region
        FROM regions;
    '''
    database.cursor.execute(query)
    rows = database.fetch_rows()
    rows = counter(rows, 'Loading region %s.')

    regions = []
    for maz, polygon in rows:
        region = Region(maz, polygon)
        regions.append(region)

    return regions
예제 #19
0
def load_nodes(database: SqliteUtil) -> Dict[str,Node]:
    query = '''
        SELECT
            node_id,
            point
        FROM nodes;
    '''
    database.cursor.execute(query)
    rows = database.fetch_rows()
    rows = counter(rows, 'Loading node %s.')

    nodes: Dict[str,Node] = {}
    for uuid, point in rows:
        x, y = xy(point)
        node = Node(uuid, x, y)
        nodes[uuid] = node
    
    return nodes
예제 #20
0
def complete(database: SqliteUtil):
    done = False
    exists = database.table_exists('air_temperatures')
    if len(exists):
        log.warning(f'Database already has table air_temperatures.')
        done = True
    null, nnull = null_count(database, 'links', 'air_temperature')
    if nnull > 0:
        log.warning(f'Found {nnull}/{null} links with/without air '
                    'temperature profiles.')
        done = True
    null, nnull = null_count(database, 'parcels', 'air_temperature')
    if nnull > 0:
        log.warning(f'Found {nnull}/{null} parcels with/without air '
                    'temperature profiles.')
        done = True

    return done
예제 #21
0
def load_parcels(database: SqliteUtil):
    query = '''
        SELECT
            apn,
            type,
            cooling,
            center
        FROM parcels;
    '''
    database.cursor.execute(query)
    rows = database.fetch_rows()
    rows = counter(rows, 'Loading parcel %s.')

    parcels = []
    for apn, kind, cooling, center in rows:
        x, y = xy(center)
        parcel = Parcel(apn, kind, bool(cooling), x, y)
        parcels.append(parcel)

    return parcels
예제 #22
0
def load_links(database: SqliteUtil):
    query = '''
        SELECT
            links.link_id,
            nodes.point
        FROM links
        INNER JOIN nodes
        ON links.source_node = nodes.node_id;
    '''
    database.cursor.execute(query)
    rows = database.fetch_rows()
    rows = counter(rows, 'Loading link %s.')

    links = []
    for link_id, point in rows:
        x, y = xy(point)
        link = Link(link_id, x, y)
        links.append(link)

    return links
예제 #23
0
def main(database: SqliteUtil):
    query = '''
        SELECT
            agents.agent_id,
            persons.age,
            output_agents.exposure
        FROM persons
        INNER JOIN agents
        ON persons.hhid = agents.household_id
        AND persons.pnum = agents.household_idx
        INNER JOIN output_agents
        ON agents.agent_id = output_agents.agent_id;
    '''

    database.cursor.execute(query)
    persons = database.fetch_rows()
    total = {}
    data = []

    for _, age, exposure in persons:
        adj = (age // 5) * 5
        if adj in total:
            total[adj][0] += exposure
            total[adj][1] += 1
        else:
            total[adj] = [exposure, 1]

    for age, (total, count) in total.items():
        data.append((age, total / count))

    data = pd.DataFrame(data,
                        columns=('age (years)', 'average exposure (°C·sec)'))
    axes = sns.barplot(x=data['age (years)'],
                       y=data['average exposure (°C·sec)'],
                       color='royalblue')
    axes.set_title('Exposure By Age')
    plot = axes.get_figure()

    plot.savefig('result/age_exposure.png', bbox_inches='tight')
    plot.clf()
예제 #24
0
def load_links(database: SqliteUtil, nodes: Dict[str,Node]) -> Dict[str,Link]:
    query = '''
        SELECT
            link_id,
            source_node,
            terminal_node,
            freespeed,
            permlanes
        FROM links;
    '''
    database.cursor.execute(query)
    rows = database.fetch_rows()
    rows = counter(rows, 'Loading link %s.')

    links: Dict[str,Link] = {}
    for uuid, src, term, speed, lanes in rows:
        source_node = nodes[src]
        terminal_node = nodes[term]
        link = Link(uuid, source_node, terminal_node, lanes, speed)
        links[uuid] = link

    return links
예제 #25
0
def null_count(database: SqliteUtil, table: str, col: str):
    query = f'''
        SELECT
            CASE 
                WHEN {col} IS NULL 
                THEN 0 ELSE 1 
                END AS valid,
            COUNT(*) AS freq
        FROM {table}
        GROUP BY valid
        ORDER BY valid ASC;
    '''
    database.cursor.execute(query)
    rows = database.fetch_rows()

    null, nnull = 0, 0
    for value, freq in rows:
        if value == 0:
            null = freq
        elif value == 1:
            nnull = freq

    return null, nnull
예제 #26
0
handlers.append(log.StreamHandler())
if args.log is not None:
    handlers.append(log.FileHandler(args.log, 'w'))
log.basicConfig(
    format='%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s',
    level=getattr(log, args.level.upper()),
    handlers=handlers)

path = lambda x: os.path.abspath(os.path.join(args.folder, x))
home = path('')
config = ConfigUtil.load_config(path('config.json'))

log.info('Running daymet exposure analysis tool.')
log.info(f'Loading run data from {home}.')

database = SqliteUtil(path('database.db'))
exposure = Exposure(database)

if not exposure.ready():
    log.error('Dependent data not parsed or generated; see warnings.')
    exit(1)
elif exposure.complete():
    log.warn(
        'Exposure analysis already run. Would you like to run it again? [Y/n]')
    if input().lower() not in ('y', 'yes', 'yeet'):
        log.info(
            'User chose to keep existing exposure analysis; exiting analysis tool.'
        )
        exit()

try:
예제 #27
0
    ax = plt.subplot(111)
    ax.set_title('Maricopa County', fontsize=14)

    graph = nx.Graph()

    print('Adding nodes to the graph.')

    for node_id, point in nodes:
        x, y = xy(point)
        graph.add_node(node_id, pos=(x, y))

    print('Adding links to the graph.')

    for _, source_node, terminal_node in links:
        graph.add_edge(source_node, terminal_node)

    print('Drawing the graph.')

    pos = nx.get_node_attributes(graph, 'pos')
    nx.draw_networkx(graph, pos=pos, ax=ax, with_labels=False, node_size=0)

    print('Saving the graph.')

    plt.tight_layout()
    plt.savefig('result/network_usage.png', dpi=600)


if __name__ == '__main__':
    database = SqliteUtil('database.db')
    map_network_usage(database)
예제 #28
0
def export_links(database: SqliteUtil, filepath: str, src_epsg: int,
                 prj_epsg: int):

    transformer = Transformer.from_crs(f'epsg:{src_epsg}',
                                       f'epsg:{prj_epsg}',
                                       always_xy=True,
                                       skip_equivalent=True)
    project = transformer.transform

    prjpath = os.path.splitext(filepath)[0] + '.prj'
    with open(prjpath, 'w') as prjfile:
        info = get_wkt_string(prj_epsg)
        prjfile.write(info)

    query = '''
        SELECT
            links.link_id,
            links.source_node,
            links.terminal_node,
            links.length,
            links.freespeed,
            links.capacity,
            links.permlanes,
            links.oneway,
            links.modes,
            links.air_temperature,
            links.mrt_temperature,
            nodes1.point,
            nodes2.point
        FROM links
        INNER JOIN nodes AS nodes1
        ON links.source_node = nodes1.node_id
        INNER JOIN nodes AS nodes2
        ON links.terminal_node = nodes2.node_id;
    '''
    database.cursor.execute(query)
    rows = database.fetch_rows()
    rows = counter(rows, 'Exporting link %s.')

    links = shapefile.Writer(filepath, )
    links.field('link_id', 'C')
    links.field('source_node', 'C')
    links.field('terminal_node', 'C')
    links.field('length', 'N')
    links.field('freespeed', 'N')
    links.field('capacity', 'N')
    links.field('permlanes', 'N')
    links.field('oneway', 'N')
    links.field('modes', 'C')
    links.field('air_temperature', 'N')
    links.field('mrt_temperature', 'N')

    for row in rows:
        props = row[:-2]
        pt1, pt2 = row[-2:]
        x1, y1 = project(*xy(pt1))
        x2, y2 = project(*xy(pt2))

        try:
            links.record(*props)
        except:
            print(props)
            breakpoint()
            exit()
        links.line([((x1, y1), (x2, y2))])

    if links.recNum != links.shpNum:
        log.error('Record/shape misalignment; shapefile exporting failure.')
        raise RuntimeError

    links.close()
예제 #29
0
def main():
    parser = ArgumentParser()
    main = parser.add_argument_group('main')
    main.add_argument('file',
                      type=str,
                      help='file path to save the exported routes to')
    main.add_argument('--epsg',
                      dest='epsg',
                      type=int,
                      default=2223,
                      help='epsg system to convert routes to; default is 2223')

    common = parser.add_argument_group('common')
    common.add_argument(
        '--folder',
        type=str,
        dest='folder',
        default='.',
        help='file path to the directory containing Icarus run data'
        '; default is the working directory')
    common.add_argument(
        '--log',
        type=str,
        dest='log',
        default=None,
        help=
        'file path to save the process log; by default the log is not saved')
    common.add_argument(
        '--level',
        type=str,
        dest='level',
        default='info',
        help='verbosity level of the process log; default is "info"',
        choices=('notset', 'debug', 'info', 'warning', 'error', 'critical'))
    common.add_argument(
        '--replace',
        dest='replace',
        action='store_true',
        default=False,
        help='automatically replace existing data; do not prompt the user')
    args = parser.parse_args()

    handlers = []
    handlers.append(log.StreamHandler())
    if args.log is not None:
        handlers.append(log.FileHandler(args.log, 'w'))
    log.basicConfig(
        format='%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s',
        level=getattr(log, args.level.upper()),
        handlers=handlers)

    path = lambda x: os.path.abspath(os.path.join(args.folder, x))
    home = path('')

    log.info('Running link export tool.')
    log.info(f'Loading run data from {home}.')

    database = SqliteUtil(path('database.db'), readonly=True)

    try:
        export_links(database, args.file, 2223, args.epsg)
    except:
        log.exception('Critical error while exporting routes:')
        exit(1)

    database.close()
예제 #30
0
    level=getattr(log, args.level.upper()),
    handlers=handlers)

path = lambda x: os.path.abspath(os.path.join(args.folder, x))
home = path('')

config = ConfigUtil.load_config(path('config.json'))
modes = config['simulation']['modes']
conditions = config['simulation']['sample']
planspath = path('input/plans.xml.gz')
vehiclespath = path('input/vehicles.xml.gz')

log.info('Running population generation tool.')
log.info(f'Loading run data from {home}.')

database = SqliteUtil(path('database.db'))
plans = Plans(database)

if not plans.ready():
    log.warning('Dependent data not parsed or generated.')
    log.warning('Input plans depend on population generation.')
    exit(1)
elif plans.complete(planspath, vehiclespath):
    log.warning(
        'Input plans already generated. Would you like to replace it? [Y/n]')
    if input().lower() not in ('y', 'yes', 'yeet'):
        log.info('User chose to keep existing plan; exiting generation tool.')
        exit()

try:
    log.info('Starting population generation.')