예제 #1
0
    def validate_config(self, configpath, specspath):
        config = ConfigUtil.load_config(configpath)
        specs = ConfigUtil.load_specs(specspath)
        config = ConfigUtil.verify_config(specs, config)

        steps = config['select']['steps']
        if 86400 % steps:
            raise ValueError(
                'Parameter "run.steps" must divide 86400 but '
                f'found {steps} (86400 % {steps} = {86400 % steps}).')

        if len(config['run']['tmin']) != len(config['run']['tmax']):
            raise RuntimeError(
                'The number of "tmin" and "tmax" files must match.')

        files = zip(config['run']['tmax'], config['run']['tmin'])
        for tmax_file, tmin_file in files:
            tmaxnc = Dataset(tmax_file, 'r')
            tminnc = Dataset(tmin_file, 'r')
            if tmaxnc.variables['tmax'].shape != tminnc.variables['tmin'].shape:
                raise RuntimeError(
                    f'Tmax file "{tmax_file}" dimension does not '
                    f'match with that of tmin file "{tmin_file}".')

        return config
예제 #2
0
def main():
    parser = ArgumentParser('mag abm parser')
    parser.add_argument('--folder', type=str, dest='folder', default='.')
    parser.add_argument('--log', type=str, dest='log', default=None)
    parser.add_argument('--level',
                        type=str,
                        dest='level',
                        default='info',
                        choices=('notset', 'debug', 'info', 'warning', 'error',
                                 'critical'))
    args = parser.parse_args()

    handlers = []
    handlers.append(log.StreamHandler())
    if args.log is not None:
        handlers.append(log.FileHandler(args.log, 'w'))
    log.basicConfig(
        format='%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s',
        level=getattr(log, args.level.upper()),
        handlers=handlers)

    path = lambda x: os.path.abspath(os.path.join(args.folder, x))
    home = path('')

    log.info('Running MAG ABM parsing tool.')
    log.info(f'Loading run data from {home}.')

    database = SqliteUtil(path('database.db'))
    config = ConfigUtil.load_config(path('config.json'))

    trips_file = config['population']['trips_file']
    persons_file = config['population']['persons_file']
    households_file = config['population']['households_file']

    if not ready(trips_file, households_file, persons_file):
        log.warning('Dependent data not parsed or generated.')
        exit(1)
    elif complete(database):
        log.warning(
            'Population data already parsed. Would you like to replace it? [Y/n]'
        )
        if input().lower() not in ('y', 'yes', 'yeet'):
            log.info(
                'User chose to keep existing population data; exiting parsing tool.'
            )
            exit()

    try:
        log.info('Starting population parsing.')
        parse_abm(database, trips_file, households_file, persons_file)
    except:
        log.exception('Critical error while parsing population; '
                      'terminating process and exiting.')
        exit(1)
예제 #3
0
def main():
    parser = ArgumentParser('mrt temperature parser', add_help=False)
    
    parser.add_argument('--help', action='help', default=SUPPRESS,
        help='show this help menu and exit process')
    parser.add_argument('--dir', type=str, dest='dir', default='.',
        help='path to directory containing Icarus run data')
    parser.add_argument('--log', type=str, dest='log', default=None,
        help='path to file to save the process log; not saved by default')
    parser.add_argument('--level', type=str, dest='level', default='info',
        choices=('notset', 'debug', 'info', 'warning', 'error', 'critical'),
        help='verbosity of the process log')

    args = parser.parse_args()

    handlers = []
    handlers.append(log.StreamHandler())
    if args.log is not None:
        handlers.append(log.FileHandler(args.log, 'w'))
    log.basicConfig(
        format='%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s',
        level=getattr(log, args.level.upper()),
        handlers=handlers
    )

    path = lambda x: os.path.abspath(os.path.join(args.dir, x))
    home = path('')

    log.info('Running mrt temperature parsing tool.')
    log.info(f'Loading run data from {home}.')

    config = ConfigUtil.load_config(path('config.json'))
    database = SqliteUtil(path('database.db'))

    path = config['network']['exposure']['mrt_dir']

    try:
        log.info('Starting mrt temperature parsing.')
        parse_mrt(
            database, 
            path, 
            src_epsg=4326,
            prj_epsg=2223, 
            bounds=50,
            steps=96
        )
    except:
        log.exception('Critical error while running mrt temperature '
            'parsing; cleaning up and terminating.')
예제 #4
0
                    choices=('notset', 'debug', 'info', 'warning', 'error',
                             'critical'))
args = parser.parse_args()

handlers = []
handlers.append(log.StreamHandler())
if args.log is not None:
    handlers.append(log.FileHandler(args.log, 'w'))
log.basicConfig(
    format='%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s',
    level=getattr(log, args.level.upper()),
    handlers=handlers)

path = lambda x: os.path.abspath(os.path.join(args.folder, x))
home = path('')
config = ConfigUtil.load_config(path('config.json'))

log.info('Running daymet exposure analysis tool.')
log.info(f'Loading run data from {home}.')

database = SqliteUtil(path('database.db'))
exposure = Exposure(database)

if not exposure.ready():
    log.error('Dependent data not parsed or generated; see warnings.')
    exit(1)
elif exposure.complete():
    log.warn(
        'Exposure analysis already run. Would you like to run it again? [Y/n]')
    if input().lower() not in ('y', 'yes', 'yeet'):
        log.info(
예제 #5
0
    def validate_config(self, configpath, specspath):
        config = ConfigUtil.load_config(configpath)
        specs = ConfigUtil.load_specs(specspath)
        config = ConfigUtil.verify_config(specs, config)

        return config
예제 #6
0
def main():
    parser = ArgumentParser('daymet air temperature parser')

    parser.add_argument('--dir',
                        type=str,
                        dest='dir',
                        default='.',
                        help='path to directory containing Icarus run data')
    parser.add_argument(
        '--log',
        type=str,
        dest='log',
        default=None,
        help='path to file to save the process log; not saved by default')
    parser.add_argument('--level',
                        type=str,
                        dest='level',
                        default='info',
                        choices=('notset', 'debug', 'info', 'warning', 'error',
                                 'critical'),
                        help='verbosity of the process log')

    args = parser.parse_args()

    handlers = []
    handlers.append(log.StreamHandler())
    if args.log is not None:
        handlers.append(log.FileHandler(args.log, 'w'))
    log.basicConfig(
        format='%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s',
        level=getattr(log, args.level.upper()),
        handlers=handlers)

    path = lambda x: os.path.abspath(os.path.join(args.dir, x))
    home = path('')

    config = ConfigUtil.load_config(path('config.json'))
    database = SqliteUtil(path('database.db'))

    tmin_files = config['network']['exposure']['tmin_files']
    tmax_files = config['network']['exposure']['tmax_files']
    day = config['network']['exposure']['day']
    steps = config['network']['exposure']['steps']

    log.info('Running roads parsing tool.')
    log.info(f'Loading run data from {home}.')

    if not ready(database, tmin_files, tmax_files):
        log.error('Process dependencies not met; see warnings and '
                  'docuemntation for more details.')
        exit(1)
    if complete(database):
        log.info('All or some of this process is already complete. '
                 ' Would you like to proceed? [Y/n]')
        valid = ('y', 'n', 'yes', 'no', 'yee', 'naw')
        response = input().lower()
        while response not in valid:
            print('Try again; would you like to proceed? [Y/n]')
            response = input().lower()
        if response in ('n', 'no', 'naw'):
            log.info('User chose to terminate process.')
            exit()

    try:
        log.info('Starting road parsing.')
        parse_temperatures(database, tmin_files, tmax_files, steps, day, 4326,
                           2223)
    except:
        log.exception('Critical error while parsing roads; '
                      'terminating process and exiting.')
        exit(1)
예제 #7
0
def main():
    parser = ArgumentParser()
    main = parser.add_argument_group('main')
    main.add_argument('file',
                      type=str,
                      help='file path to save the exported routes to')
    main.add_argument(
        '--modes',
        type=str,
        nargs='+',
        dest='modes',
        help='list of modes to export routes for; defualt is all modes',
        default=('walk', 'pt', 'car', 'bike'),
        choices=('walk', 'pt', 'car', 'bike'))
    main.add_argument('--skip-empty',
                      dest='skip',
                      action='store_true',
                      default=False,
                      help='skip all legs that do not have routes')
    main.add_argument('--epsg',
                      dest='epsg',
                      type=int,
                      default=2223,
                      help='epsg system to convert routes to; default is 2223')

    common = parser.add_argument_group('common')
    common.add_argument(
        '--folder',
        type=str,
        dest='folder',
        default='.',
        help='file path to the directory containing Icarus run data'
        '; default is the working directory')
    common.add_argument(
        '--log',
        type=str,
        dest='log',
        default=None,
        help=
        'file path to save the process log; by default the log is not saved')
    common.add_argument(
        '--level',
        type=str,
        dest='level',
        default='info',
        help='verbosity level of the process log; default is "info"',
        choices=('notset', 'debug', 'info', 'warning', 'error', 'critical'))
    common.add_argument(
        '--replace',
        dest='replace',
        action='store_true',
        default=False,
        help='automatically replace existing data; do not prompt the user')
    args = parser.parse_args()

    handlers = []
    handlers.append(log.StreamHandler())
    if args.log is not None:
        handlers.append(log.FileHandler(args.log, 'w'))
    log.basicConfig(
        format='%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s',
        level=getattr(log, args.level.upper()),
        handlers=handlers)

    path = lambda x: os.path.abspath(os.path.join(args.folder, x))
    home = path('')

    log.info('Running route export tool.')
    log.info(f'Loading run data from {home}.')

    database = SqliteUtil(path('database.db'), readonly=True)
    config = ConfigUtil.load_config(path('config.json'))

    try:
        export_routes(database, args.modes, args.file, args.skip, args.epsg)
    except:
        log.exception('Critical error while exporting routes:')
        exit(1)

    database.close()