示例#1
0
def main(args):
    """The main entry point of the application

    The script follows a simple workflow in order to parse and persist
    the test run information to a database. It runs the main logic under a
    TestRun/PerfTestRun object designed to encapsulate information for a
    specific test run.

    The parser expects at least two arguments, an xml and a log file, in order
    to parse minimum information regarding the tests that have been run and
    the test environment.
    """
    # Parse arguments and check if they exist
    arg_parser = config.init_arg_parser()
    parsed_arguments = arg_parser.parse_args(args)

    path_validation = config.validate_input(parsed_arguments)
    if isinstance(path_validation, list):
        print("\n%s \n" % path_validation[1])
        print(arg_parser.parse_args(['-h']))
        sys.exit(0)

    config.setup_logging(default_level=int(parsed_arguments.loglevel))

    logger.debug('Parsing env variables')
    env.read_envfile(parsed_arguments.config)

    logger.info('Initializing TestRun object')
    if parsed_arguments.perf:
        test_run = PerfTestRun(parsed_arguments.perf, parsed_arguments.skipkvp)
    else:
        test_run = TestRun(skip_vm_check=parsed_arguments.skipkvp)

    logger.info('Parsing XML file - %s', parsed_arguments.xml_file_path)
    test_run.update_from_xml(parsed_arguments.xml_file_path)

    logger.info('Parsing log file - %s', parsed_arguments.log_file_path)
    test_run.update_from_ica(parsed_arguments.log_file_path)

    if not parsed_arguments.skipkvp:
        logger.info('Getting KVP values from VM')
        test_run.update_from_vm(['OSBuildNumber', 'OSName', 'OSMajorVersion'],
                                stop_vm=True)

    # Parse values to be inserted
    logger.info('Parsing test run for database insertion')
    insert_values = test_run.parse_for_db_insertion()
    # Connect to db and insert values in the table
    logger.info('Initializing database connection')
    db_connection, db_cursor = sql_utils.init_connection()

    logger.info('Executing insertion commands')
    for table_line in insert_values:
        sql_utils.insert_values(db_cursor, table_line)

    logger.info('Committing changes to the database')
    db_connection.commit()

    logger.info("Checking insert validity")
    sql_utils.check_insert(db_cursor, insert_values)
示例#2
0
def test(motor_record='XF:31IDA-OP{Tbl-Ax:X1}Mtr'):
    config.setup_logging([__name__, 'pypvserver.motor'])
    server = config.get_server()
    mrec = EpicsMotor(motor_record)

    # give the motor time to connect
    time.sleep(1.0)

    logger.info('--> PV Positioner, using put completion and a DONE pv')
    # PV positioner, put completion, done pv
    pos = PVPositioner(mrec.field_pv('VAL'),
                       readback=mrec.field_pv('RBV'),
                       done=mrec.field_pv('MOVN'), done_val=0,
                       stop=mrec.field_pv('STOP'), stop_val=1,
                       put_complete=True,
                       limits=(-2, 2),
                       )

    ppv_motor = PypvMotor('m1', pos, server=server)
    print(ppv_motor.severity)
    record_name = ppv_motor.full_pvname
    for i in range(2):
        epics.caput(record_name, i, wait=True)
        print(pos.position)
    return ppv_motor
示例#3
0
def main(args):
    """The main entry point of the application

    The script follows a simple workflow in order to parse and persist
    the test run information to a database. It runs the main logic under a
    TestRun/PerfTestRun object designed to encapsulate information for a
    specific test run.

    The parser expects at least two arguments, an xml and a log file, in order
    to parse minimum information regarding the tests that have been run and
    the test environment.
    """
    # Parse arguments and check if they exist
    arg_parser = config.init_arg_parser()
    parsed_arguments = arg_parser.parse_args(args)

    if not config.validate_input(parsed_arguments):
        print('Invalid command line arguments')
        print(arg_parser.parse_args(['-h']))
        sys.exit(0)

    config.setup_logging(
        default_level=int(parsed_arguments.loglevel)
    )

    logger.debug('Parsing env variables')
    env.read_envfile(parsed_arguments.config)

    logger.info('Initializing TestRun object')
    if parsed_arguments.perf:
        test_run = PerfTestRun(parsed_arguments.perf,
                               parsed_arguments.skipkvp)
    else:
        test_run = TestRun(skip_vm_check=parsed_arguments.skipkvp)

    logger.info('Parsing XML file - %s', parsed_arguments.xml_file_path)
    test_run.update_from_xml(parsed_arguments.xml_file_path)

    logger.info('Parsing log file - %s', parsed_arguments.log_file_path)
    test_run.update_from_ica(parsed_arguments.log_file_path)

    if not parsed_arguments.skipkvp:
        logger.info('Getting KVP values from VM')
        test_run.update_from_vm([
            'OSBuildNumber', 'OSName', 'OSMajorVersion'
        ], stop_vm=True)

    # Parse values to be inserted
    logger.info('Parsing test run for database insertion')
    insert_values = test_run.parse_for_db_insertion()
    # Connect to db and insert values in the table
    logger.info('Initializing database connection')
    db_connection, db_cursor = sql_utils.init_connection()

    logger.info('Executing insertion commands')
    for table_line in insert_values:
        sql_utils.insert_values(db_cursor, table_line)

    logger.info('Committing changes to the database')
    db_connection.commit()
示例#4
0
def test(motor_record='XF:31IDA-OP{Tbl-Ax:X1}Mtr'):
    config.setup_logging([__name__, 'pypvserver.motor'])
    server = config.get_server()
    mrec = EpicsMotor(motor_record)

    # give the motor time to connect
    time.sleep(1.0)

    logger.info('--> PV Positioner, using put completion and a DONE pv')
    # PV positioner, put completion, done pv
    pos = PVPositioner(
        mrec.field_pv('VAL'),
        readback=mrec.field_pv('RBV'),
        done=mrec.field_pv('MOVN'),
        done_val=0,
        stop=mrec.field_pv('STOP'),
        stop_val=1,
        put_complete=True,
        limits=(-2, 2),
    )

    ppv_motor = PypvMotor('m1', pos, server=server)
    print(ppv_motor.severity)
    record_name = ppv_motor.full_pvname
    for i in range(2):
        epics.caput(record_name, i, wait=True)
        print(pos.position)
    return ppv_motor
示例#5
0
def main():
    """Entrypoint for Cho on the CLI."""

    parser = argparse.ArgumentParser(description="Start a Cho Trivia worker.")
    parser.add_argument("--debug",
                        action='store_true',
                        default=False,
                        help="Enable debug logging.")
    parser.add_argument("--log", help="Specify a log file path to log to.")
    parser.add_argument("--shard", help="Discord shard (unused for now).")
    args = parser.parse_args()

    config.setup_logging(debug=args.debug, logpath=args.log)

    LOGGER.info("Starting Lorewalker Cho worker (shard ?).")
    LOGGER.debug("Debug logging activated.")

    # Connect to the postgres database and setup connection pools.
    sqlalchemy_url = config.get_postgres_url()
    engine = sa.create_engine(sqlalchemy_url,
                              pool_size=SQLALCHEMY_POOL_SIZE,
                              max_overflow=SQLALCHEMY_POOL_MAX)
    engine.connect()
    LOGGER.info("Started connection pool with size: %d", SQLALCHEMY_POOL_SIZE)

    discord_client = ChoClient(engine)
    discord_client.run(DISCORD_TOKEN)
    LOGGER.info("Shutting down... good bye!")
示例#6
0
文件: poezio.py 项目: krackou/poezio
def main():
    """
    Enter point
    """
    sys.stdout.write("\x1b]0;poezio\x07")
    sys.stdout.flush()
    import config
    config_path = config.check_create_config_dir()
    config.run_cmdline_args(config_path)
    config.create_global_config()
    config.check_create_log_dir()
    config.check_create_cache_dir()
    config.setup_logging()
    config.post_logging_setup()

    from config import options

    if options.check_config:
        config.check_config()
        sys.exit(0)

    import theming
    theming.update_themes_dir()

    import logger
    logger.create_logger()

    import roster
    roster.create_roster()

    import core

    log = logging.getLogger('')

    signal.signal(signal.SIGINT, signal.SIG_IGN) # ignore ctrl-c
    cocore = singleton.Singleton(core.Core)
    signal.signal(signal.SIGUSR1, cocore.sigusr_handler) # reload the config
    signal.signal(signal.SIGHUP, cocore.exit_from_signal)
    signal.signal(signal.SIGTERM, cocore.exit_from_signal)
    if options.debug:
        cocore.debug = True
    cocore.start()

    # Warning: asyncio must always be imported after the config. Otherwise
    # the asyncio logger will not follow our configuration and won't write
    # the tracebacks in the correct file, etc
    import asyncio
    loop = asyncio.get_event_loop()

    loop.add_reader(sys.stdin, cocore.on_input_readable)
    loop.add_signal_handler(signal.SIGWINCH, cocore.sigwinch_handler)
    cocore.xmpp.start()
    loop.run_forever()
    # We reach this point only when loop.stop() is called
    try:
        cocore.reset_curses()
    except:
        pass
示例#7
0
def main():
    setup_logging()
    logger = logging.getLogger(__name__)
    # logger.setLevel(logging.DEBUG)
    logger.info('This program is about to start, but first, let me introduce...')
    hello()
    for x in xrange(100):
        y = x * random()
        logger.debug("My important variable: {0}".format(y))
    logger.error('A fake error!')
    logger.info('The program has ended.')
示例#8
0
文件: poezio.py 项目: Perdu/poezio
def main():
    """
    Enter point
    """
    sys.stdout.write("\x1b]0;poezio\x07")
    sys.stdout.flush()
    import config
    config_path = config.check_create_config_dir()
    config.run_cmdline_args(config_path)
    config.create_global_config()
    config.check_create_log_dir()
    config.check_create_cache_dir()
    config.setup_logging()
    config.post_logging_setup()

    from config import options

    import theming
    theming.update_themes_dir()

    import logger
    logger.create_logger()

    import roster
    roster.create_roster()

    import core

    log = logging.getLogger('')

    signal.signal(signal.SIGINT, signal.SIG_IGN)  # ignore ctrl-c
    cocore = singleton.Singleton(core.Core)
    signal.signal(signal.SIGUSR1, cocore.sigusr_handler)  # reload the config
    signal.signal(signal.SIGHUP, cocore.exit_from_signal)
    signal.signal(signal.SIGTERM, cocore.exit_from_signal)
    if options.debug:
        cocore.debug = True
    cocore.start()

    # Warning: asyncio must always be imported after the config. Otherwise
    # the asyncio logger will not follow our configuration and won't write
    # the tracebacks in the correct file, etc
    import asyncio
    loop = asyncio.get_event_loop()

    loop.add_reader(sys.stdin, cocore.on_input_readable)
    loop.add_signal_handler(signal.SIGWINCH, cocore.sigwinch_handler)
    cocore.xmpp.start()
    loop.run_forever()
    # We reach this point only when loop.stop() is called
    try:
        cocore.reset_curses()
    except:
        pass
示例#9
0
def main(args):
    """The main entry point of the application

    The script follows a simple workflow in order to parse and persist
    the test run information to a database. It runs the main logic under a
    TestRun/PerfTestRun object designed to encapsulate information for a
    specific test run.

    The parser expects at least two arguments, an xml and a log file, in order
    to parse minimum information regarding the tests that have been run and
    the test environment.
    """
    # Parse arguments and check if they exist
    arg_parser = config.init_arg_parser()
    parsed_arguments = arg_parser.parse_args(args)
    config.setup_logging(default_level=int(parsed_arguments.loglevel))

    print(parsed_arguments)
    path_validation = config.validate_input(parsed_arguments)
    if isinstance(path_validation, list):
        print("\n%s \n" % path_validation[1])
        print(arg_parser.parse_args(['-h']))
        sys.exit(0)

    # Connect to db
    env.read_envfile(parsed_arguments.config)
    logger.info('Initializing database connection')
    db_connection, db_cursor = sql_utils.init_connection()
    # Parse results
    test_run = parse_results(parsed_arguments.xml_file_path,
                             parsed_arguments.log_file_path,
                             parsed_arguments.perf,
                             parsed_arguments.skipkvp,
                             parsed_arguments.snapshot,
                             db_cursor)

    insert_list = test_run.parse_for_db_insertion()
    if not parsed_arguments.nodbcommit:
        if test_run:
            commit_results(db_connection, db_cursor, insert_list)
        else:
            logger.warning('Results need to be parsed first.')
    else:
        logger.info('Skipping db insertion.') 

    if parsed_arguments.report:
        MonitorRuns.write_json(parsed_arguments.report, MonitorRuns.get_test_summary(insert_list))
    if parsed_arguments.summary:
        MonitorRuns(parsed_arguments.summary)()
示例#10
0
def main():
    app_config_name = os.getenv('APP_SETTINGS')
    app_config = import_string(app_config_name)

    setup_logging()

    RedisStorage.initialize(**app_config.REDIS_SETTINGS)

    moltin_api_session = MoltinApiSession(app_config.MOLTIN_API_URL,
                                          app_config.MOLTIN_CLIENT_ID,
                                          app_config.MOLTIN_CLIENT_SECRET)
    moltin_api = MoltinApi(moltin_api_session)

    telegram_bot = TelegramBot(app_config.TELEGRAM_BOT_TOKEN,
                               moltin_api=moltin_api)
    telegram_bot.start()
示例#11
0
def main(args):
    """The main entry point of the application

    """
    # Parse arguments and check if they exist
    parsed_arguments = config.parse_arguments(args)

    if not config.validate_input(parsed_arguments):
        print('Invalid command line arguments')
        sys.exit(0)

    config.setup_logging(
        default_level=int(parsed_arguments['level'])
    )

    logger.debug('Parsing env variables')
    env.read_envfile(parsed_arguments['env'])

    logger.info('Initializing TestRun object')
    test_run = TestRun()

    logger.info('Parsing XML file - %s', parsed_arguments['xml'])
    test_run.update_from_xml(parsed_arguments['xml'])

    logger.info('Parsing log file - %s', parsed_arguments['log'])
    test_run.update_from_ica(parsed_arguments['log'])

    if parsed_arguments['kvp']:
        logger.info('Getting KVP values from VM')
        test_run.update_from_vm([
            'OSBuildNumber', 'OSName', 'OSMajorVersion'
        ], stop_vm=True)

    # Parse values to be inserted
    logger.info('Parsing test run for database insertion')
    insert_values = test_run.parse_for_db_insertion()

    # Connect to db and insert values in the table
    logger.info('Initializing database connection')
    db_connection, db_cursor = sql_utils.init_connection()

    logger.info('Executing insertion commands')
    for table_line in insert_values:
        sql_utils.insert_values(db_cursor, table_line)

    logger.info('Committing changes to the database')
    db_connection.commit()
示例#12
0
def main():
    application_environment = os.getenv('APPLICATION_ENV')

    if application_environment == 'development':
        application_config = DevelopmentConfig
    elif application_environment == 'production':
        application_config = ProductionConfig
    else:
        sys.stdout.write(
            'Application environment setup required: env APPLICATION_ENV should be'
            'development or production')
        sys.exit(1)

    try:
        validate_config(application_config)
    except ConfigError as e:
        sys.stdout.write(str(e))
        sys.exit(1)

    setup_logging()

    RedisStorage.initialize(**application_config.REDIS_SETTINGS)

    arg_parser = create_parser()
    args = arg_parser.parse_args()

    if args.command == 'populate_db':
        populate_db.run_command(
            application_config.QUIZ_QUESTIONS_DIRECTORY,
            application_config.DEFAULT_ENCODING,
            application_config.QUIZ_QUESTIONS_FILEPARSING_LIMIT,
        )
    elif args.command == 'run':
        if args.platform == 'telegram':
            run_telegram_bot.run_command(application_config.TELEGRAM_BOT_TOKEN)
        elif args.platform == 'vk':
            run_vk_bot.run_command(application_config.VK_GROUP_TOKEN)
        else:
            sys.stdout.write('Unknown command. Please refer for help.')
            sys.exit(1)

    else:
        sys.stdout.write('Unknown command. Please refer for help.')
        sys.exit(1)
示例#13
0
    def __init__(self, *args):
        config.setup_logging()
        args = args[0]
        arg_parser = config.LT_arg_parser()
        parsed_arguments = arg_parser.parse_args(args)
        env.read_envfile(parsed_arguments.config)

        self.url = parsed_arguments.build
        self.functions = {}
        self.regexes = {}

        self.content = urlopen(self.url + "consoleText").read()
        self.suite_tests = self.compute_tests(parsed_arguments.tests)
        self.parse_regexes(parsed_arguments.regex)
        self.suite = re.search('(?<=job/)\D+/', self.url).group(0)[:-1]

        for function_name, regex in self.regexes.items():
            function = add_get_function(regex, self.content)
            setattr(self, "get_" + function_name, function)
            self.functions[function_name] = function
示例#14
0
文件: pv.py 项目: NSLS-II/pypvserver
def test():
    config.setup_logging([__name__, 'pypvserver.pv'])
    server = config.get_server()
    logger.info('Creating PV "pv1", a floating-point type')
    python_pv = PyPV('pv1', 123.0, server=server)

    # full_pvname includes the server prefix
    pvname = python_pv.full_pvname
    logger.info('... which is %s including the server prefix', pvname)

    signal = epics.PV(pvname)
    signal.add_callback(updated)

    time.sleep(0.1)

    for value in range(10):
        logger.info('Updating the value on the server-side to: %s', value)
        python_pv.value = value
        time.sleep(0.05)

    logger.info('Done')
示例#15
0
def main(args):
    """The main entry point of the application

    The script follows a simple workflow in order to parse and persist
    the test run information to a database. It runs the main logic under a
    TestRun/PerfTestRun object designed to encapsulate information for a
    specific test run.

    The parser expects at least two arguments, an xml and a log file, in order
    to parse minimum information regarding the tests that have been run and
    the test environment.
    """
    # Parse arguments and check if they exist
    arg_parser = config.init_arg_parser()
    parsed_arguments = arg_parser.parse_args(args)
    config.setup_logging(default_level=int(parsed_arguments.loglevel))

    print(parsed_arguments)
    path_validation = config.validate_input(parsed_arguments)
    if isinstance(path_validation, list):
        print("\n%s \n" % path_validation[1])
        print(arg_parser.parse_args(['-h']))
        sys.exit(0)
    test_run = parse_results(parsed_arguments.xml_file_path,
                             parsed_arguments.log_file_path,
                             parsed_arguments.perf, parsed_arguments.skipkvp,
                             parsed_arguments.snapshot)

    insert_list = test_run.parse_for_db_insertion()
    if not parsed_arguments.nodbcommit:
        if test_run: commit_results(insert_list, parsed_arguments.config)
        else: logger.warning('Results need to be parsed first.')
    else:
        logger.info('Skipping db insertion.')

    if parsed_arguments.report:
        MonitorRuns.write_json(parsed_arguments.report,
                               MonitorRuns.get_test_summary(insert_list))
    if parsed_arguments.summary: MonitorRuns(parsed_arguments.summary)()
示例#16
0
def main():
    """
    Main entry into the application.
    
    TODO: factor this out to a class, like VelouriaController
    """
    logger = logging.getLogger("velouria")

    parser = argparse.ArgumentParser(
        description="Velouria: a plugable, configurable, resource-conservative" "information kiosk application"
    )

    common_args(parser)

    options = parser.parse_args()
    try:
        config = VelouriaConfig(options.config_file)

        log_level = options.log_level
        log_file = options.log_file

        if not options.log_level:
            log_level = config.main.log_level
        if not log_file:
            log_file = config.main.log_file

        logger = setup_logging(log_level, log_file)

        logger.info("Starting Velouria v. %s...", VERSION)

        app = Velouria(config)
        # app.window.show_all()

        controller = VelouriaServer(app)

        logger.debug("Registering signal handlers")
        # wire up the signal handlers
        signal.signal(signal.SIGINT, controller.shutdown)
        signal.signal(signal.SIGINT, app.shutdown)

        # main loops
        logger.debug("Running asyncore.poll, attaching timeout")
        poll()
        logger.debug("Starting main GTK loop")
        Gtk.main()
    except RuntimeError:
        logger.error("X not running or does not allow connections. Check $DISPLAY variable")
    except exceptions.ConfigError, e:
        logger.error("Configuration problem: '%s'", e)
示例#17
0
    analysis_results_dictionary = run_analysis(output_db)

    # ----- render_layout -----
    # Render the map including analysis features, correct colours, subtitle, and addresses at risk count.

    # analysis_results_dictionary below is included for debugging so don't have to run_analysis:
    # analysis_results_dictionary = {'map_subtitle': 'debug subtitle', 'addresses_at_risk_count': 123}

    map_features = [('final_analysis', [255, 0, 0, 100]),
                    ('avoid_points_buf', [115, 178, 255, 100]),
                    ('Target_Addresses', [102, 119, 205, 100])]
    map_subtitle = analysis_results_dictionary['map_subtitle']
    map_spatial_reference = pcs
    address_count = analysis_results_dictionary['addresses_at_risk_count']
    render_layout(map_subtitle, map_features, map_spatial_reference,
                  address_count, output_db)

    # ----- generate_report -----
    # Generate a csv report in the WestNileOutbreak directory with the Target Addresses that require spraying.
    target_addresses_fc = set_path(output_db, 'Target_Addresses')
    generate_target_addresses_csv(target_addresses_fc)


if __name__ == '__main__':
    # Setup the logger to generate log file use commands: logger.debug(msg), logger.info(msg)
    setup_logging(level='DEBUG',
                  fn=f'{config_dict["proj_dir"]}/{config_dict["log_fn"]}')

    logger.info('Starting West Nile Virus Simulation')
    main(flush_output_db=True)
示例#18
0
def main(args=None):
    setup_logging()
    parser = argparse.ArgumentParser(
        description='Google Earth Engine Batch Asset Manager with Addons')

    subparsers = parser.add_subparsers()
    parser_ee_user = subparsers.add_parser(
        'ee_user', help='Allows you to associate/change GEE account to system')
    parser_ee_user.set_defaults(func=ee_user_from_parser)

    parser_create = subparsers.add_parser(
        'create',
        help=
        'Allows the user to create an asset collection or folder in Google Earth Engine'
    )
    parser_create.add_argument('--typ',
                               help='Specify type: collection or folder',
                               required=True)
    parser_create.add_argument(
        '--path',
        help=
        'This is the path for the earth engine asset to be created full path is needsed eg: users/johndoe/collection',
        required=True)
    parser_create.set_defaults(func=create_from_parser)

    parser_upload = subparsers.add_parser('upload',
                                          help='Batch Asset Uploader.')
    required_named = parser_upload.add_argument_group(
        'Required named arguments.')
    required_named.add_argument(
        '--source',
        help='Path to the directory with images for upload.',
        required=True)
    required_named.add_argument(
        '--dest',
        help=
        'Destination. Full path for upload to Google Earth Engine, e.g. users/pinkiepie/myponycollection',
        required=True)
    optional_named = parser_upload.add_argument_group(
        'Optional named arguments')
    optional_named.add_argument('-m',
                                '--metadata',
                                help='Path to CSV with metadata.')
    optional_named.add_argument(
        '-mf',
        '--manifest',
        help='Manifest type to be used,for planetscope use "planetscope"')
    optional_named.add_argument(
        '--large',
        action='store_true',
        help='(Advanced) Use multipart upload. Might help if upload of large '
        'files is failing on some systems. Might cause other issues.')
    optional_named.add_argument(
        '--nodata',
        type=int,
        help='The value to burn into the raster as NoData (missing data)')

    required_named.add_argument('-u',
                                '--user',
                                help='Google account name (gmail address).')
    optional_named.add_argument('-s',
                                '--service-account',
                                help='Google Earth Engine service account.')
    optional_named.add_argument('-k',
                                '--private-key',
                                help='Google Earth Engine private key file.')
    optional_named.add_argument('-b',
                                '--bucket',
                                help='Google Cloud Storage bucket name.')
    parser_upload.set_defaults(func=upload_from_parser)

    parser_lst = subparsers.add_parser(
        'lst', help='List assets in a folder/collection or write as text file')
    required_named = parser_lst.add_argument_group('Required named arguments.')
    required_named.add_argument(
        '--location',
        help='This it the location of your folder/collection',
        required=True)
    required_named.add_argument(
        '--typ',
        help=
        'Whether you want the list to be printed or output as text[print/report]',
        required=True)
    optional_named = parser_lst.add_argument_group('Optional named arguments')
    optional_named.add_argument('--items', help="Number of items to list")
    optional_named.add_argument(
        '--output', help="Folder location for report to be exported")
    parser_lst.set_defaults(func=lst_from_parser)

    parser_ee_report = subparsers.add_parser(
        'ee_report',
        help=
        'Prints a detailed report of all Earth Engine Assets includes Asset Type, Path,Number of Assets,size(MB),unit,owner,readers,writers'
    )
    parser_ee_report.add_argument(
        '--outfile',
        help='This it the location of your report csv file ',
        required=True)
    parser_ee_report.set_defaults(func=ee_report_from_parser)

    parser_assetsize = subparsers.add_parser(
        'assetsize',
        help='Prints collection size in Human Readable form & Number of assets'
    )
    parser_assetsize.add_argument(
        '--asset',
        help='Earth Engine Asset for which to get size properties',
        required=True)
    parser_assetsize.set_defaults(func=assetsize_from_parser)

    parser_tasks = subparsers.add_parser(
        'tasks',
        help=
        'Queries current task status [completed,running,ready,failed,cancelled]'
    )
    parser_tasks.set_defaults(func=tasks_from_parser)

    parser_genreport = subparsers.add_parser(
        'taskreport',
        help='Create a report of all tasks and exports to a CSV file')
    parser_genreport.add_argument(
        '--r', help='Folder Path where the reports will be saved')
    parser_genreport.set_defaults(func=genreport_from_parser)

    parser_delete = subparsers.add_parser(
        'delete',
        help=
        'Deletes collection and all items inside. Supports Unix-like wildcards.'
    )
    parser_delete.add_argument(
        'id',
        help=
        'Full path to asset for deletion. Recursively removes all folders, collections and images.'
    )
    parser_delete.set_defaults(func=delete_collection_from_parser)

    parser_mover = subparsers.add_parser(
        'mover', help='Moves all assets from one collection to another')
    parser_mover.add_argument('--assetpath', help='Existing path of assets')
    parser_mover.add_argument('--finalpath', help='New path for assets')
    parser_mover.set_defaults(func=mover_from_parser)

    parser_copy = subparsers.add_parser(
        'copy',
        help=
        'Copies all assets from one collection to another: Including copying from other users if you have read permission to their assets'
    )
    parser_copy.add_argument('--initial', help='Existing path of assets')
    parser_copy.add_argument('--final', help='New path for assets')
    parser_copy.set_defaults(func=copy_from_parser)

    parser_access = subparsers.add_parser(
        'access',
        help=
        'Sets Permissions for Images, Collection or all assets in EE Folder Example: python ee_permissions.py --mode "folder" --asset "users/john/doe" --user "[email protected]:R"'
    )
    parser_access.add_argument(
        '--mode',
        help=
        'This lets you select if you want to change permission or folder/collection/image',
        required=True)
    parser_access.add_argument(
        '--asset',
        help=
        'This is the path to the earth engine asset whose permission you are changing folder/collection/image',
        required=True)
    parser_access.add_argument(
        '--user',
        help=
        """This is the email address to whom you want to give read or write permission Usage: "[email protected]:R" or "[email protected]:W" R/W refers to read or write permission""",
        required=True,
        default=False)
    parser_access.set_defaults(func=access_from_parser)

    parser_collprop = subparsers.add_parser(
        'collprop', help='Sets Overall Properties for Image Collection')
    parser_collprop.add_argument('--coll', help='Path of Image Collection')
    parser_collprop.add_argument(
        '--p',
        help=
        '"system:description=Description"/"system:provider_url=url"/"system:tags=tags"/"system:title=title'
    )
    parser_collprop.set_defaults(func=collprop_from_parser)

    parser_cancel = subparsers.add_parser('cancel',
                                          help='Cancel all running tasks')
    parser_cancel.set_defaults(func=cancel_all_running_tasks_from_parser)

    args = parser.parse_args()

    ee.Initialize()
    args.func(args)
示例#19
0
import json
import requests
import re
import datetime
from pymongo.errors import DuplicateKeyError, BulkWriteError
from pytz import timezone

from config import setup_logging, setup_db

logger = setup_logging()

pattern_suc = re.compile("(\d+) Supercharger", flags=re.DOTALL | re.IGNORECASE)
pattern_dc = re.compile("(\d+) Tesla Connector", flags=re.DOTALL | re.IGNORECASE)

tz_zurich = timezone('Europe/Zurich')
tz_utc = timezone('UTC')


def chargers(s, location_id):
    m = pattern_suc.findall(s)
    if len(m) > 0:
        return int(m[0])

    m = pattern_dc.findall(s)
    if len(m):
        return int(m[0])

    logger.warn("No chargers found for %s from '%s'" % (location_id, s))
    return None

示例#20
0
def main(args=None):
    setup_logging()
    parser = GooeyParser(description='Planet and EE Pipeline')
    subparsers = parser.add_subparsers()
    ##Planet Assets Tools
    parser_planet_key = subparsers.add_parser('planet_key', help='Enter your planet API Key')
    parser_planet_key.set_defaults(func=planet_key_from_parser)
    
    parser_aoijson=subparsers.add_parser('aoijson',help='Convert KML/SHP/WKT/GeoJSON file to aoi.json file with structured query for use with Planet API 1.0')
    parser_aoijson.add_argument('--start', default='Start date in YYYY-MM-DD',help='Start date in YYYY-MM-DD?',widget='DateChooser')
    parser_aoijson.add_argument('--end', default='End date in YYYY-MM-DD',help='End date in YYYY-MM-DD?',widget='DateChooser')
    parser_aoijson.add_argument('--cloud', default='Maximum Cloud Cover(0-1)',help='Maximum Cloud Cover(0-1) representing 0-100')
    parser_aoijson.add_argument('--inputfile',default='Choose a KML/SHP/geojson/WKT file or Landsat WRS',choices=['KML', 'SHP','GJSON','WKT','WRS'],help='Choose a KML/SHP/geojson/WKT file or Landsat WRS')
    parser_aoijson.add_argument('--geo', default='map.geojson/aoi.kml/aoi.shp/aoi.wkt file or 6 digit WRS PathRow',help='map.geojson/aoi.kml/aoi.shp/aoi.wkt file',widget="MultiFileChooser")
    parser_aoijson.add_argument('--loc', help='Location where aoi.json file is to be stored',widget="MultiDirChooser")
    parser_aoijson.set_defaults(func=aoijson_from_parser)

    parser_activatepl=subparsers.add_parser('activatepl',description='Tool to query and/or activate Planet Assets')
    parser_activatepl.add_argument('--aoi',default='Choose JSON file to be used with Planet API/Created Earlier',help='Choose JSON file created earlier',widget="MultiFileChooser")
    parser_activatepl.add_argument('--action',choices=['check', 'activate'],help='Check/activate')
    parser_activatepl.add_argument('--asst',choices=['PSOrthoTile analytic','PSOrthoTile analytic_dn','PSOrthoTile visual','PSScene4Band analytic','PSScene4Band analytic_dn','PSScene3Band analytic','PSScene3Band analytic_dn','PSScene3Band visual','REOrthoTile analytic','REOrthoTile visual'],help='PSOrthoTile analytic,PSOrthoTile analytic_dn,PSOrthoTile visual,PSScene4Band analytic,PSScene4Band analytic_dn,PSScene3Band analytic,PSScene3Band analytic_dn,PSScene3Band visual,REOrthoTile analytic,REOrthoTile visual')
    parser_activatepl.set_defaults(func=activatepl_from_parser)

    parser_downloadpl=subparsers.add_parser('downloadpl',help='Tool to download Planet Assets')
    parser_downloadpl.add_argument('--aoi', default='Choose JSON file to be used with Planet API/Created Earlier',help='Choose JSON file created earlier',widget="MultiFileChooser")
    parser_downloadpl.add_argument('--action', default='download',help='choose download')
    parser_downloadpl.add_argument('--asst',choices=['PSOrthoTile analytic','PSOrthoTile analytic_dn','PSOrthoTile visual','PSScene4Band analytic','PSScene4Band analytic_dn','PSScene3Band analytic','PSScene3Band analytic_dn','PSScene3Band visual','REOrthoTile analytic','REOrthoTile visual','PSOrthoTile analytic_xml','PSOrthoTile analytic_dn_xml','PSOrthoTile visual_xml','PSScene4Band analytic_xml','PSScene4Band analytic_dn_xml','PSScene3Band analytic_xml','PSScene3Band analytic_dn_xml','PSScene3Band visual_xml','REOrthoTile analytic_xml','REOrthoTile visual_xml'],help='PSOrthoTile analytic,PSOrthoTile analytic_dn,PSOrthoTile visual,PSScene4Band analytic,PSScene4Band analytic_dn,PSScene3Band analytic,PSScene3Band analytic_dn,PSScene3Band visual,REOrthoTile analytic,REOrthoTile visual')
    parser_downloadpl.add_argument('--pathway',default='Folder where you want to save assets',help='Folder Path where PlanetAssets are saved example ./PlanetScope ./RapidEye',widget="MultiDirChooser")
    parser_downloadpl.set_defaults(func=downloadpl_from_parser)

    parser_metadata=subparsers.add_parser('metadata',help='Tool to tabulate and convert all metadata files from Planet or Digital Globe Assets')
    parser_metadata.add_argument('--asset', default='PS',choices=['PSO','PSO_DN','PSO_V','PS4B','PS4B_DN','PS3B','PS3B_DN','PS3B_V','REO','REO_V','DGMS','DGP'],help='RapidEye/PlantScope/DigitalGlobe MS/DigitalGlobe Pan(RE/PS/DGMS/DGP)?')
    parser_metadata.add_argument('--mf', default='Metadata folder',help='Metadata folder',widget="MultiDirChooser")
    parser_metadata.add_argument('--mfile',default='Metadata filename browse and create file and click open',help='Metadata filename to be exported with Path.csv',widget="MultiFileChooser")
    parser_metadata.add_argument('--errorlog',default='Error log browse and create file and click open',help='Errorlog to be exported along with Path.csv',widget="MultiFileChooser")
    parser_metadata.set_defaults(func=metadata_from_parser)

    ##Earth Engine Tools
    parser_ee_user = subparsers.add_parser('ee_user', help='Get Earth Engine API Key & Paste it back to Command line/shell to change user')
    parser_ee_user.set_defaults(func=ee_user_from_parser)

    parser_create = subparsers.add_parser('create',help='Allows the user to create an asset collection or folder in Google Earth Engine')
    parser_create.add_argument('--typ', help='Specify type: collection or folder', required=True)
    parser_create.add_argument('--path', help='This is the path for the earth engine asset to be created full path is needsed eg: users/johndoe/collection', required=True)
    parser_create.set_defaults(func=create_from_parser)
    
    parser_upload = subparsers.add_parser('upload', help='Batch Asset Uploader to Earth Engine.')
    required_named = parser_upload.add_argument_group('Required named arguments.')
    required_named.add_argument('-u', '--user', help='Google account name (gmail address).', required=True)
    required_named.add_argument('--source', help='Path to the directory with images for upload.', required=True)
    required_named.add_argument('--dest', help='Destination. Full path for upload to Google Earth Engine, e.g. users/pinkiepie/myponycollection', required=True)
    optional_named = parser_upload.add_argument_group('Optional named arguments')
    optional_named.add_argument('-m', '--metadata', help='Path to CSV with metadata.')
    optional_named.add_argument('--nodata', type=int, help='The value to burn into the raster as NoData (missing data)')
    parser_upload.set_defaults(func=upload_from_parser)

    parser_lst = subparsers.add_parser('lst',help='List assets in a folder/collection or write as text file')
    parser_lst.add_argument('--location', help='This it the location of your folder/collection', required=True)
    parser_lst.add_argument('--type', help='Whether you want the list to be printed or output as text', required=True)
    parser_lst.add_argument('--items', help="Number of items to list")
    parser_lst.add_argument('--folder',help="Folder location for report to be exported")
    parser_lst.set_defaults(func=lst_from_parser)

    parser_tasks=subparsers.add_parser('tasks',help='Queries currently running, enqued,failed')
    parser_tasks.set_defaults(func=tasks_from_parser)
    
    parser_taskquery=subparsers.add_parser('taskquery',help='Queries currently running, enqued,failed ingestions and uploaded assets')
    parser_taskquery.add_argument('--destination',default='users/folder/collection',help='Full path to asset where you are uploading files')
    parser_taskquery.set_defaults(func=taskquery_from_parser)

    parser_genreport=subparsers.add_parser('report',help='Create a report of all tasks and exports to a CSV file')
    parser_genreport.add_argument('--r',default='Folder Path where the reports will be saved',help='Folder Path where the reports will be saved',widget="MultiDirChooser")
    parser_genreport.set_defaults(func=genreport_from_parser)

    parser_cancel = subparsers.add_parser('cancel', help='Cancel all running tasks')
    parser_cancel.set_defaults(func=cancel_all_running_tasks_from_parser)
    
    parser_mover=subparsers.add_parser('mover',help='Moves all assets from one collection to another')
    parser_mover.add_argument('--assetpath',default='users/folder/collection1',help='Existing path of assets')
    parser_mover.add_argument('--finalpath',default='users/folder/collection2',help='New path for assets')
    parser_mover.set_defaults(func=mover_from_parser)

    parser_copy=subparsers.add_parser('copy',help='Copies all assets from one collection to another: Including copying from other users if you have read permission to their assets')
    parser_copy.add_argument('--initial',default='users/folder/collection1',help='Existing path of assets')
    parser_copy.add_argument('--final',default='users/folder/collection2',help='New path for assets')
    parser_copy.set_defaults(func=copy_from_parser)

    parser_collprop=subparsers.add_parser('collprop',help='Sets Overall Properties for Image Collection')
    parser_collprop.add_argument('--coll',default='users/folder/collection',help='Path of Image Collection')
    parser_collprop.add_argument('--p',default='system:description=Description',help='system:description=Description|system:title=title')
    parser_collprop.set_defaults(func=collprop_from_parser)
    
    parser_ft = subparsers.add_parser('access',help='Sets Permissions for Images, Collection or all assets in EE Folder Example: python ee_permissions.py --mode "folder" --asset "users/john/doe" --user "[email protected]:R"')
    parser_ft.add_argument('--mode', default='folder|collection|image',choices=['folder','collection','image'],help='This lets you select if you want to change permission or folder/collection/image', required=True)
    parser_ft.add_argument('--asset', default='users/folder/collection',help='This is the path to the earth engine asset whose permission you are changing folder/collection/image', required=True)
    parser_ft.add_argument('--user', default='[email protected]:R',help="""This is the email address to whom you want to give read or write permission Usage: "[email protected]:R" or "[email protected]:W" R/W refers to read or write permission""", required=True)
    parser_ft.set_defaults(func=access_from_parser)

    parser_delete = subparsers.add_parser('delete', help='Deletes collection and all items inside. Supports Unix-like wildcards.')
    parser_delete.add_argument('id', default='users/folder/collection',help='Full path to asset for deletion. Recursively removes all folders, collections and images.')
    parser_delete.set_defaults(func=delete_collection_from_parser)
    
    parser_ft = subparsers.add_parser('convert2ft',help='Uploads a given feature collection to Google Fusion Table.')
    parser_ft.add_argument('--i', help='input feature source (KML, SHP, SpatiLite, etc.)', required=True,widget="MultiFileChooser",default='input feature source (KML, SHP, SpatiLite, etc.)')
    parser_ft.add_argument('--o', help='output Fusion Table name', required=True)
    parser_ft.add_argument('--add_missing', help='add missing features from the last inserted feature index', action='store_true', required=False, default=False)
    parser_ft.set_defaults(func=ft_from_parser)

    parser_cleanout=subparsers.add_parser('cleanout',help='Clear folders with datasets from earlier downloaded')
    parser_cleanout.add_argument('--dirpath',help='Folder you want to delete after all processes have been completed',widget="MultiDirChooser")
    parser_cleanout.set_defaults(func=cleanout_from_parser)

    args = parser.parse_args()

    ee.Initialize()
    args.func(args)
    display_message()
示例#21
0

if __name__ == '__main__':
    parser = config.default_parser(defaults)
    parser.add_option("--profile", default="",
        dest="profile", help="unit to profile doctest [default: %default]")
    parser.add_option('--psyco', dest='psyco', default='',
            help="specialized python compiler for speed without debugging")
    
    import sys
    (options, args) = config.parse_args(parser, sys.argv)
    configuration.set(options.__dict__)
    configuration.subprocess_gateway = eval(configuration.subprocess_gateway)
    configuration.setup_client = eval(configuration.setup_client)
    configuration.globe_class = eval(configuration.globe_class)
    config.setup_logging(configuration.verbose)

    if setup_flash_master == configuration.setup_client:
        # TODO:  Master client class
        set_property = slave_set_property
        dispatch_event = slave_dispatch_event
        mouse_down_and_sleep = slave_mouse_down_and_sleep
        mouse_down_and_news = slave_mouse_down_and_news

    #from optparse import OptionParser
    #parser = OptionParser()
    #parser.add_option("--unit", default="",
    #    dest="unit", help="unit to doctest [default: %default]")
    #parser.add_option("--debug", default="",
    #    dest="debug", help="unit to debug in doctest [default: %default]")
    #parser.add_option('-v', '--verbose', dest='verbose', default='warning',
示例#22
0
# from psycopg2.pool import SimpleConnectionPool

from Modules.rat_cache import RatCache

# Set argv to keep cli arguments meant for pytest from polluting our things

sys.argv = ["test",
            "--config-file", "testing.json",
            "--clean-log",
            "--verbose",
            ]

# This import statement is where the config gets read
from config import setup_logging

setup_logging("logs/unit_tests.log")

from Modules.permissions import Permission
from tests.mock_bot import MockBot
from Modules.rat_board import RatBoard
from Modules.rat_rescue import Rescue
from Modules.rat import Rat
from utils.ratlib import Platforms
from Modules.context import Context
from Modules.epic import Epic
from Modules.user import User
from Modules.mark_for_deletion import MarkForDeletion
from tests.mock_callables import CallableMock, AsyncCallableMock
from database import DatabaseManager
from Modules.fact import Fact
示例#23
0
from flask import Flask

import config
from resizer.routes import resizer
from utils.routes import utils

app = Flask(__name__)

config.load_config(app)
config.setup_logging(app)
app.register_blueprint(resizer)
app.register_blueprint(utils)

if __name__ == '__main__':
    app.run(host='0.0.0.0')  # pragma: no cover
示例#24
0
def main(args=None):
    setup_logging()
    parser = argparse.ArgumentParser(description='Google Earth Engine Batch Asset Manager with Addons')
    subparsers = parser.add_subparsers()
    parser_ee_user=subparsers.add_parser('ee_user',help='Allows you to associate/change GEE account to system')
    parser_ee_user.set_defaults(func=ee_user_from_parser)

    parser_quota = subparsers.add_parser('quota', help='Print Earth Engine total quota and used quota')
    parser_quota.set_defaults(func=quota_from_parser)

    parser_create = subparsers.add_parser('create',help='Allows the user to create an asset collection or folder in Google Earth Engine')
    parser_create.add_argument('--typ', help='Specify type: collection or folder', required=True)
    parser_create.add_argument('--path', help='This is the path for the earth engine asset to be created full path is needsed eg: users/johndoe/collection', required=True)
    parser_create.set_defaults(func=create_from_parser)

    parser_lst = subparsers.add_parser('lst',help='List assets in a folder/collection or write as text file')
    required_named = parser_lst.add_argument_group('Required named arguments.')
    required_named.add_argument('--location', help='This it the location of your folder/collection', required=True)
    required_named.add_argument('--typ', help='Whether you want the list to be printed or output as text[print/report]', required=True)
    optional_named = parser_lst.add_argument_group('Optional named arguments')
    optional_named.add_argument('--items', help="Number of items to list")
    optional_named.add_argument('--output',help="Folder location for report to be exported")
    parser_lst.set_defaults(func=lst_from_parser)

    parser_ee_report = subparsers.add_parser('ee_report',help='Prints a detailed report of all Earth Engine Assets includes Asset Type, Path,Number of Assets,size(MB),unit,owner,readers,writers')
    parser_ee_report.add_argument('--outfile', help='This it the location of your report csv file ', required=True)
    parser_ee_report.set_defaults(func=ee_report_from_parser)

    parser_assetsize = subparsers.add_parser('assetsize',help='Prints collection size in Human Readable form & Number of assets')
    parser_assetsize.add_argument('--asset', help='Earth Engine Asset for which to get size properties', required=True)
    parser_assetsize.set_defaults(func=assetsize_from_parser)

    parser_tasks=subparsers.add_parser('tasks',help='Queries current task status [completed,running,ready,failed,cancelled]')
    parser_tasks.set_defaults(func=tasks_from_parser)

    parser_genreport=subparsers.add_parser('taskreport',help='Create a report of all tasks and exports to a CSV file')
    parser_genreport.add_argument('--r',help='Path to csv report file')
    parser_genreport.set_defaults(func=genreport_from_parser)


    parser_delete = subparsers.add_parser('delete', help='Deletes collection and all items inside. Supports Unix-like wildcards.')
    parser_delete.add_argument('id', help='Full path to asset for deletion. Recursively removes all folders, collections and images.')
    parser_delete.set_defaults(func=delete_collection_from_parser)

    parser_mover=subparsers.add_parser('mover',help='Moves all assets from one collection to another')
    parser_mover.add_argument('--assetpath',help='Existing path of assets')
    parser_mover.add_argument('--finalpath',help='New path for assets')
    parser_mover.set_defaults(func=mover_from_parser)

    parser_copy=subparsers.add_parser('copy',help='Copies all assets from one collection to another: Including copying from other users if you have read permission to their assets')
    parser_copy.add_argument('--initial',help='Existing path of assets')
    parser_copy.add_argument('--final',help='New path for assets')
    parser_copy.set_defaults(func=copy_from_parser)

    parser_access = subparsers.add_parser('access',help='Sets Permissions for items in folder')
    parser_access.add_argument('--asset', help='This is the path to the earth engine asset whose permission you are changing folder/collection/image', required=True)
    parser_access.add_argument('--user', help='Full email address of the user, try using "AllUsers" to make it public', required=True, default=False)
    parser_access.add_argument('--role', help='Choose between reader, writer or delete', required=True)
    parser_access.set_defaults(func=access_from_parser)

    parser_delete_metadata = subparsers.add_parser('delete_metadata',help='Use with caution: delete any metadata from collection or image')
    parser_delete_metadata.add_argument('--asset', help='This is the path to the earth engine asset whose permission you are changing collection/image', required=True)
    parser_delete_metadata.add_argument('--property', help='Metadata name that you want to delete', required=True, default=False)
    parser_delete_metadata.set_defaults(func=delete_metadata_from_parser)

    parser_cancel = subparsers.add_parser('cancel', help='Cancel all running tasks')
    parser_cancel.set_defaults(func=cancel_all_running_tasks_from_parser)

    args = parser.parse_args()

    #ee.Initialize()
    args.func(args)
示例#25
0
文件: run.py 项目: PeerBay/PeerStream
from config import HOST
from config import PORT
from config import setup_logging
from config import torrent_session

from libtorrent import save_resume_data_alert ,bencode
from signal import signal, SIGPIPE, SIG_IGN
import  io
from omxplayer import gallery
signal(SIGPIPE,SIG_IGN) 

Log = logging.getLogger('simpleHttpServer.run')


if __name__ == '__main__':
	setup_logging()

	try:
	    # gallery()
	    run(host=HOST, port=PORT)
	except KeyboardInterrupt:
		Log.info('simpleHttpServer stopped')
		torrents=torrent_session.get_torrents()
		torrents_len=len(torrents)
		for h in torrents:
		    h.pause()
		    h.save_resume_data()
		received=[]
		while received != torrents_len*[True]:   
		    torrent_session.wait_for_alert(1000)
		    a = torrent_session.pop_alert()
示例#26
0
from app import Velouria
import exceptions
from controller import VelouriaServer, VelouriaController
import signal
import asyncore
from gi.repository import Gtk, GLib
from config import VelouriaConfig, setup_logging, common_args

import sys

import argparse

import logging

# set egg-wide default of INFO level, with output going to STDOUT
logger = setup_logging("info", "STDOUT")


def ctl():
    """
    Velouria-control entry point - sends signals to the main application
    to control it
    """
    VelouriaController()


def poll():
    """
    Wiring the poll function from asyncore to the main GTK loop
    """
    asyncore.poll(timeout=0.0)
示例#27
0
def main(args=None):
    setup_logging()
    parser = argparse.ArgumentParser(description='Planet Pipeline with Google Earth Engine Batch Addons')

    subparsers = parser.add_subparsers()
    parser_pp1 = subparsers.add_parser(' ', help='---------------------------------------')
    parser_P = subparsers.add_parser(' ', help='-----Choose from Planet Tools Below-----')
    parser_pp2 = subparsers.add_parser(' ', help='---------------------------------------')

    parser_planet_key = subparsers.add_parser('planetkey', help='Enter your planet API Key')
    optional_named = parser_planet_key.add_argument_group('Optional named arguments')
    optional_named.add_argument('--type', help='For direct key entry type --type quiet')
    optional_named.add_argument('--key', help='Your Planet API Key')
    parser_planet_key.set_defaults(func=planet_key_entry)

    parser_planet_quota = subparsers.add_parser('pquota', help='Prints your Planet Quota Details')
    parser_planet_quota.set_defaults(func=planet_quota_from_parser)

    parser_dasync=subparsers.add_parser('dasync',help='Uses the Planet Client Async Downloader to download Planet Assets: Does not require activation')
    parser_dasync.add_argument('--infile',help='Choose a geojson from geojson.io or the aoi-json you created earlier using ppipe aoijson', required=True)
    parser_dasync.add_argument('--item',help='Choose from Planet Item types Example: PSScene4Band, PSOrthoTile, REOrthoTile etc', required=True)
    parser_dasync.add_argument('--asset',help='Choose an asset type example: anlaytic, analytic_dn,analytic_sr,analytic_xml etc', required=True)
    parser_dasync.add_argument('--local',help='Local Path where Planet Item and asset types are saved', required=True)
    parser_dasync.add_argument('--start', help='Start date filter format YYYY-MM-DD', required=True)
    parser_dasync.add_argument('--end', help='End date filter format YYYY-MM-DD', required=True)
    optional_named = parser_dasync.add_argument_group('Optional named arguments')
    optional_named.add_argument('--cmin', help='Cloud cover minimum between 0-1',default=None)
    optional_named.add_argument('--cmax', help='Cloud cover maximum between 0-1',default=None)
    parser_dasync.set_defaults(func=dasync_from_parser)

    parser_savedsearch=subparsers.add_parser('savedsearch',help='Tool to download saved searches from Planet Explorer')
    parser_savedsearch.add_argument('--name',help='Name of your saved search(It is case sensitive)')
    parser_savedsearch.add_argument('--asset',help='Choose asset type analytic, analytic_xml, analytic_sr, analytic_dn etc')
    parser_savedsearch.add_argument('--local',help='Local Path (full path address) where PlanetAssets are saved')
    optional_named = parser_savedsearch.add_argument_group('Optional named arguments')
    optional_named.add_argument('--limit', help='Choose number of assets you want to download')
    parser_savedsearch.set_defaults(func=savedsearch_from_parser)

    parser_metadata=subparsers.add_parser('metadata',help='Tool to tabulate and convert all metadata files from Planet or Digital Globe Assets')
    parser_metadata.add_argument('--asset', help='Choose PS OrthoTile(PSO)|PS OrthoTile DN(PSO_DN)|PS OrthoTile Visual(PSO_V)|PS4Band Analytic(PS4B)|PS4Band DN(PS4B_DN)|PS4Band SR(PS4B_SR)|PS3Band Analytic(PS3B)|PS3Band DN(PS3B_DN)|PS3Band Visual(PS3B_V)|RE OrthoTile (REO)|RE OrthoTile Visual(REO_V)|DigitalGlobe MultiSpectral(DGMS)|DigitalGlobe Panchromatic(DGP)|PolarGeospatial CenterDEM Strip(PGCDEM)?')
    parser_metadata.add_argument('--mf', help='Metadata folder?')
    parser_metadata.add_argument('--mfile',help='Metadata filename to be exported along with Path.csv')
    parser_metadata.add_argument('--errorlog',default='./errorlog.csv',help='Errorlog to be exported along with Path.csv')
    optional_named = parser_metadata.add_argument_group('Optional named arguments')
    optional_named.add_argument('--dir', help='Path to Image Directory to be used to get ImageTags with metadata. use only with PS4B_SR')
    parser_metadata.set_defaults(func=metadata_from_parser)

    parser_EE1 = subparsers.add_parser(' ', help='-------------------------------------------')
    parser_EE = subparsers.add_parser(' ', help='----Choose from Earth Engine Tools Below----')
    parser_EE2 = subparsers.add_parser(' ', help='-------------------------------------------')

    parser_update=subparsers.add_parser('update',help='Updates Selenium drivers for firefox [windows or linux systems]')
    parser_update.set_defaults(func=update_from_parser)

    parser_ee_user = subparsers.add_parser('ee_user', help='Get Earth Engine API Key & Paste it back to Command line/shell to change user')
    parser_ee_user.set_defaults(func=ee_user_from_parser)

    parser_quota = subparsers.add_parser('quota', help='Print Earth Engine total quota and used quota')
    parser_quota.set_defaults(func=quota_from_parser)

    parser_create = subparsers.add_parser('create',help='Allows the user to create an asset collection or folder in Google Earth Engine')
    parser_create.add_argument('--typ', help='Specify type: collection or folder', required=True)
    parser_create.add_argument('--path', help='This is the path for the earth engine asset to be created full path is needsed eg: users/johndoe/collection', required=True)
    parser_create.set_defaults(func=create_from_parser)

    parser_selupload = subparsers.add_parser('selupload', help='Batch Asset Uploader for Planet Items & Assets using Selenium')
    required_named = parser_selupload.add_argument_group('Required named arguments.')
    required_named.add_argument('--source', help='Path to the directory with images for upload.', required=True)
    required_named.add_argument('--dest', help='Destination. Full path for upload to Google Earth Engine, e.g. users/pinkiepie/myponycollection', required=True)
    required_named.add_argument('-m', '--metadata', help='Path to CSV with metadata.')
    required_named.add_argument('-mf','--manifest',help='Manifest type to be used,Choose PS OrthoTile(PSO)|PS OrthoTile DN(PSO_DN)|PS OrthoTile Visual(PSO_V)|PS4Band Analytic(PS4B)|PS4Band DN(PS4B_DN)|PS4Band SR(PS4B_SR)|PS3Band Analytic(PS3B)|PS3Band DN(PS3B_DN)|PS3Band Visual(PS3B_V)|RE OrthoTile (REO)|RE OrthoTile Visual(REO_V)')
    optional_named = parser_selupload.add_argument_group('Optional named arguments')
    optional_named.add_argument('--nodata', type=int, help='The value to burn into the raster as NoData (missing data)')
    required_named.add_argument('-u', '--user', help='Google account name (gmail address).')
    optional_named.add_argument('-b', '--bucket', help='Google Cloud Storage bucket name.')

    parser_selupload.set_defaults(func=selupload_from_parser)

    parser_lst = subparsers.add_parser('lst',help='List assets in a folder/collection or write as text file')
    required_named = parser_lst.add_argument_group('Required named arguments.')
    required_named.add_argument('--location', help='This it the location of your folder/collection', required=True)
    required_named.add_argument('--typ', help='Whether you want the list to be printed or output as text[print/report]', required=True)
    optional_named = parser_lst.add_argument_group('Optional named arguments')
    optional_named.add_argument('--items', help="Number of items to list")
    optional_named.add_argument('--output',help="Folder location for report to be exported")
    parser_lst.set_defaults(func=lst_from_parser)

    parser_assetsize = subparsers.add_parser('assetsize',help='Prints collection size in Human Readable form & Number of assets')
    parser_assetsize.add_argument('--asset', help='Earth Engine Asset for which to get size properties', required=True)
    parser_assetsize.set_defaults(func=assetsize_from_parser)

    parser_tasks=subparsers.add_parser('tasks',help='Queries current task status [completed,running,ready,failed,cancelled]')
    parser_tasks.set_defaults(func=tasks_from_parser)

    parser_access = subparsers.add_parser('access',help='Sets Permissions for items in folder')
    parser_access.add_argument('--asset', help='This is the path to the earth engine asset whose permission you are changing folder/collection/image', required=True)
    parser_access.add_argument('--user', help='Full email address of the user, try using "AllUsers" to make it public', required=True, default=False)
    parser_access.add_argument('--role', help='Choose between reader, writer or delete', required=True)
    parser_access.set_defaults(func=access_from_parser)

    parser_cancel = subparsers.add_parser('cancel', help='Cancel all running tasks')
    parser_cancel.set_defaults(func=cancel_all_running_tasks_from_parser)

    args = parser.parse_args()

    #ee.Initialize()
    args.func(args)
示例#28
0
    terminal = params[0]
    if not terminal.is_open():
        try:
            terminal.reopen()
        except ReaderError:
            import time
            time.sleep(1)
            return -0xFF

terminal_set_state = load('terminal_set_state', (Terminal, c_uint8, P(TerminalState)), check_params=check)
terminal_reset_entries = load('terminal_reset_entries', (Terminal, c_uint8,), check_params=check)
terminal_get_entries = load('terminal_get_entries', (Terminal, c_uint8, P(TerminalEntries)), check_params=check)
terminal_set_counters = load('terminal_set_counters', (Terminal, c_uint8, P(TerminalCounters)), check_params=check)
terminal_get_readers = load('terminal_get_readers', (Terminal, c_uint8, P(TerminalReaders)), check_params=check)
terminal_ack_readers = load('terminal_ack_readers', (Terminal, c_uint8,), check_params=check)
terminal_get_barcode = load('terminal_get_barcode', (Terminal, c_uint8, P(TerminalBarcode)), check_params=check)
terminal_ack_barcode = load('terminal_ack_barcode', (Terminal, c_uint8,), check_params=check)
terminal_set_strings = load('terminal_set_strings', (Terminal, c_uint8, P(TerminalStrings)), check_params=check)
terminal_set_time = load('terminal_set_time', (Terminal, c_uint8, P(TerminalTime)), check_params=check)
terminal_show_message = load('terminal_show_message', (Terminal, c_uint8, c_char_p, c_uint8, c_uint8), check_params=check)

if __name__ == '__main__':
    import config
    from db import DB
    d = DB()

    config.setup_logging()

    t = Terminal()

    TerminalStrings(d).set(t, 2)
示例#29
0
def main(args=None):
    setup_logging()
    parser = argparse.ArgumentParser(description='Planet Pipeline with Google Earth Engine Batch Addons')

    subparsers = parser.add_subparsers()
    parser_pp1 = subparsers.add_parser(' ', help='---------------------------------------')
    parser_P = subparsers.add_parser(' ', help='-----Choose from Planet Tools Below-----')
    parser_pp2 = subparsers.add_parser(' ', help='---------------------------------------')
    
    parser_planet_key = subparsers.add_parser('planetkey', help='Enter your planet API Key')
    parser_planet_key.set_defaults(func=planet_key_from_parser)
    
    parser_aoijson=subparsers.add_parser('aoijson',help='Tool to convert KML, Shapefile,WKT,GeoJSON or Landsat WRS PathRow file to AreaOfInterest.JSON file with structured query for use with Planet API 1.0')
    parser_aoijson.add_argument('--start', help='Start date in YYYY-MM-DD?')
    parser_aoijson.add_argument('--end', help='End date in YYYY-MM-DD?')
    parser_aoijson.add_argument('--cloud', help='Maximum Cloud Cover(0-1) representing 0-100')
    parser_aoijson.add_argument('--inputfile',help='Choose a kml/shapefile/geojson or WKT file for AOI(KML/SHP/GJSON/WKT) or WRS (6 digit RowPath Example: 023042)')
    parser_aoijson.add_argument('--geo', default='./map.geojson',help='map.geojson/aoi.kml/aoi.shp/aoi.wkt file')
    parser_aoijson.add_argument('--loc', help='Location where aoi.json file is to be stored')
    parser_aoijson.set_defaults(func=aoijson_from_parser)

    parser_activatepl=subparsers.add_parser('activatepl',help='Tool to query and/or activate Planet Assets')
    parser_activatepl.add_argument('--aoi', help='Choose aoi.json file created earlier')
    parser_activatepl.add_argument('--action', help='choose between check/activate')
    parser_activatepl.add_argument('--asst',help='Choose between planet asset types (PSOrthoTile analytic/PSOrthoTile analytic_dn/PSOrthoTile visual/PSScene4Band analytic/PSScene4Band analytic_dn/PSScene3Band analytic/PSScene3Band analytic_dn/PSScene3Band visual/REOrthoTile analytic/REOrthoTile visual')
    parser_activatepl.set_defaults(func=activatepl_from_parser)

    parser_space=subparsers.add_parser('space',help='Tool to query total download size of activated assets & local space left for download')
    parser_space.add_argument('--aoi', help='Choose aoi.json file created earlier')
    parser_space.add_argument('--local', help='local path where you are downloading assets')
    parser_space.add_argument('--asset',help='Choose between planet asset types (PSOrthoTile analytic/PSOrthoTile analytic_dn/PSOrthoTile visual/PSScene4Band analytic/PSScene4Band analytic_dn/PSScene3Band analytic/PSScene3Band analytic_dn/PSScene3Band visual/REOrthoTile analytic/REOrthoTile visual')
    parser_space.set_defaults(func=space_from_parser)

    parser_downloadpl=subparsers.add_parser('downloadpl',help='Tool to download Planet Assets')
    parser_downloadpl.add_argument('--aoi', help='Choose aoi.json file created earlier')
    parser_downloadpl.add_argument('--asst',help='Choose between planet asset types or for Metadata follow by _xml Eg: PSOrthoTile analytic_xml--->Assets Include:(PSOrthoTile analytic/PSOrthoTile analytic_dn/PSOrthoTile visual/PSScene4Band analytic/PSScene4Band analytic_dn/PSScene3Band analytic/PSScene3Band analytic_dn/PSScene3Band visual/REOrthoTile analytic/REOrthoTile visual')
    parser_downloadpl.add_argument('--pathway',help='Folder Pathways where PlanetAssets are saved exampled ./PlanetScope ./RapidEye')
    parser_downloadpl.set_defaults(func=downloadpl_from_parser)

    parser_metadata=subparsers.add_parser('metadata',help='Tool to tabulate and convert all metadata files from Planet or Digital Globe Assets')
    parser_metadata.add_argument('--asset', help='Choose PS OrthoTile(PSO)|PS OrthoTile DN(PSO_DN)|PS OrthoTile Visual(PSO_V)|PS4Band Analytic(PS4B)|PS4Band DN(PS4B_DN)|PS4Band SR(PS4B_SR)|PS3Band Analytic(PS3B)|PS3Band DN(PS3B_DN)|PS3Band Visual(PS3B_V)|RE OrthoTile (REO)|RE OrthoTile Visual(REO_V)|DigitalGlobe MultiSpectral(DGMS)|DigitalGlobe Panchromatic(DGP)|PolarGeospatial CenterDEM Strip(PGCDEM)?')
    parser_metadata.add_argument('--mf', help='Metadata folder?')
    parser_metadata.add_argument('--mfile',help='Metadata filename to be exported along with Path.csv')
    parser_metadata.add_argument('--errorlog',default='./errorlog.csv',help='Errorlog to be exported along with Path.csv')
    optional_named = parser_metadata.add_argument_group('Optional named arguments')
    optional_named.add_argument('--dir', help='Path to Image Directory to be used to get ImageTags with metadata. use only with PS4B_SR')
    parser_metadata.set_defaults(func=metadata_from_parser)

    parser_EE1 = subparsers.add_parser(' ', help='-------------------------------------------')
    parser_EE = subparsers.add_parser(' ', help='----Choose from Earth Engine Tools Below----')
    parser_EE2 = subparsers.add_parser(' ', help='-------------------------------------------')

    parser_ee_user = subparsers.add_parser('ee_user', help='Get Earth Engine API Key & Paste it back to Command line/shell to change user')
    parser_ee_user.set_defaults(func=ee_user_from_parser)
    
    parser_create = subparsers.add_parser('create',help='Allows the user to create an asset collection or folder in Google Earth Engine')
    parser_create.add_argument('--typ', help='Specify type: collection or folder', required=True)
    parser_create.add_argument('--path', help='This is the path for the earth engine asset to be created full path is needsed eg: users/johndoe/collection', required=True)
    parser_create.set_defaults(func=create_from_parser)

    parser_upload = subparsers.add_parser('upload', help='Batch Asset Uploader.')
    required_named = parser_upload.add_argument_group('Required named arguments.')
    required_named.add_argument('--source', help='Path to the directory with images for upload.', required=True)
    required_named.add_argument('--dest', help='Destination. Full path for upload to Google Earth Engine, e.g. users/pinkiepie/myponycollection', required=True)
    optional_named = parser_upload.add_argument_group('Optional named arguments')
    optional_named.add_argument('-m', '--metadata', help='Path to CSV with metadata.')
    optional_named.add_argument('-mf','--manifest',help='Manifest type to be used,for PlanetScope Orthotile|"PSO" or PS4Band Surface Reflectance|"PS4B_SR"')
    optional_named.add_argument('--large', action='store_true', help='(Advanced) Use multipart upload. Might help if upload of large '
                                                                     'files is failing on some systems. Might cause other issues.')
    optional_named.add_argument('--nodata', type=int, help='The value to burn into the raster as NoData (missing data)')

    required_named.add_argument('-u', '--user', help='Google account name (gmail address).')
    optional_named.add_argument('-s', '--service-account', help='Google Earth Engine service account.')
    optional_named.add_argument('-k', '--private-key', help='Google Earth Engine private key file.')
    optional_named.add_argument('-b', '--bucket', help='Google Cloud Storage bucket name.')
    parser_upload.set_defaults(func=upload_from_parser)

    parser_lst = subparsers.add_parser('lst',help='List assets in a folder/collection or write as text file')
    required_named = parser_lst.add_argument_group('Required named arguments.')
    required_named.add_argument('--location', help='This it the location of your folder/collection', required=True)
    required_named.add_argument('--typ', help='Whether you want the list to be printed or output as text[print/report]', required=True)
    optional_named = parser_lst.add_argument_group('Optional named arguments')
    optional_named.add_argument('--items', help="Number of items to list")
    optional_named.add_argument('--output',help="Folder location for report to be exported")
    parser_lst.set_defaults(func=lst_from_parser)

    parser_ee_report = subparsers.add_parser('ee_report',help='Prints a detailed report of all Earth Engine Assets includes Asset Type, Path,Number of Assets,size(MB),unit,owner,readers,writers')
    parser_ee_report.add_argument('--outfile', help='This it the location of your report csv file ', required=True)
    parser_ee_report.set_defaults(func=ee_report_from_parser)

    parser_assetsize = subparsers.add_parser('assetsize',help='Prints collection size in Human Readable form & Number of assets')
    parser_assetsize.add_argument('--asset', help='Earth Engine Asset for which to get size properties', required=True)
    parser_assetsize.set_defaults(func=assetsize_from_parser)

    parser_tasks=subparsers.add_parser('tasks',help='Queries current task status [completed,running,ready,failed,cancelled]')
    parser_tasks.set_defaults(func=tasks_from_parser)

    parser_genreport=subparsers.add_parser('taskreport',help='Create a report of all tasks and exports to a CSV file')
    parser_genreport.add_argument('--r',help='Folder Path where the reports will be saved')
    parser_genreport.set_defaults(func=genreport_from_parser)


    parser_delete = subparsers.add_parser('delete', help='Deletes collection and all items inside. Supports Unix-like wildcards.')
    parser_delete.add_argument('id', help='Full path to asset for deletion. Recursively removes all folders, collections and images.')
    parser_delete.set_defaults(func=delete_collection_from_parser)

    parser_mover=subparsers.add_parser('mover',help='Moves all assets from one collection to another')
    parser_mover.add_argument('--assetpath',help='Existing path of assets')
    parser_mover.add_argument('--finalpath',help='New path for assets')
    parser_mover.set_defaults(func=mover_from_parser)

    parser_copy=subparsers.add_parser('copy',help='Copies all assets from one collection to another: Including copying from other users if you have read permission to their assets')
    parser_copy.add_argument('--initial',help='Existing path of assets')
    parser_copy.add_argument('--final',help='New path for assets')
    parser_copy.set_defaults(func=copy_from_parser)

    parser_access = subparsers.add_parser('access',help='Sets Permissions for Images, Collection or all assets in EE Folder Example: python ee_permissions.py --mode "folder" --asset "users/john/doe" --user "[email protected]:R"')
    parser_access.add_argument('--mode', help='This lets you select if you want to change permission or folder/collection/image', required=True)
    parser_access.add_argument('--asset', help='This is the path to the earth engine asset whose permission you are changing folder/collection/image', required=True)
    parser_access.add_argument('--user', help="""This is the email address to whom you want to give read or write permission Usage: "[email protected]:R" or "[email protected]:W" R/W refers to read or write permission""", required=True, default=False)
    parser_access.set_defaults(func=access_from_parser)

    parser_collprop=subparsers.add_parser('collprop',help='Sets Overall Properties for Image Collection')
    parser_collprop.add_argument('--coll',help='Path of Image Collection')
    parser_collprop.add_argument('--p',help='"system:description=Description"/"system:provider_url=url"/"system:tags=tags"/"system:title=title')
    parser_collprop.set_defaults(func=collprop_from_parser)

    parser_cancel = subparsers.add_parser('cancel', help='Cancel all running tasks')
    parser_cancel.set_defaults(func=cancel_all_running_tasks_from_parser)

    args = parser.parse_args()

    ee.Initialize()
    args.func(args)
示例#30
0
from flask import Flask

import config
from apis.v1 import blueprint as api1

config.setup_logging()
SERVER = Flask(__name__)
SERVER.register_blueprint(api1)

if __name__ == "__main__":
    SERVER.env = config.DEV_ENV
    SERVER.run(host=config.DEV_HOST, port=config.DEV_PORT, debug=True)
示例#31
0
def hello():
    setup_logging()
    logger = logging.getLogger(__name__)
    logger.info('Making an entry from another module.')
示例#32
0
def run_model():
    # setup the logger to generate log file use commands: logger.debug(msg), logger.info(msg)
    setup_logging(level='DEBUG',
                  fn=f'{config_dict["proj_dir"]}/{config_dict["log_fn"]}')

    # Start Input GUI
    user_inputs = input_gui()
    logger.info('Starting West Nile Virus Simulation')
    logger.info(f'Simulation Parameters: {user_inputs}')

    # setup arcpy environment
    output_db = config_dict.get('output_gdb_dir')
    arcpy.AddMessage(f'output db: {output_db}')
    aprx_path = set_path(config_dict.get('proj_dir'), 'WestNileOutbreak.aprx')
    aprx = arcpy.mp.ArcGISProject(aprx_path)
    arcpy.AddMessage(f'aprx path: {aprx.filePath}')
    mp = get_map(aprx, 'Map')

    # Buffer Analysis
    buf_fc_list = [
        'Mosquito_Larval_Sites', 'Wetlands_Regulatory', 'Lakes_and_Reservoirs',
        'OSMP_Properties', 'avoid_points'
    ]
    for fc in buf_fc_list:
        buf_distance = user_inputs['buf_distance']
        input_fc_name = fc
        buf_fc_name = f'{fc}_buf'
        buf_fc = set_path(output_db, buf_fc_name)
        buffer(mp, input_fc_name, buf_fc, buf_fc_name, buf_distance)
        aprx.save()

    # Intersect Analysis
    # for loop is used to create intersect_fc_list for intersect function (including paths to output_db)
    intersect_fc_list = []
    for fn in buf_fc_list:
        if fn == 'avoid_points':
            arcpy.AddMessage(
                '\nSkipping avoid_points for Intersect Analysis they will be used for Symmetrical Difference.\n'
            )
        else:
            intersect_fn = set_path(output_db, f'{fn}_buf')
            intersect_fc_list.append(intersect_fn)
    intersect_fc_name = user_inputs['intersect_fc']
    inter = set_path(output_db, intersect_fc_name)
    intersect(mp, intersect_fc_list, inter, intersect_fc_name)
    aprx.save()

    # Query by Location
    logger.debug('Starting Spatial Join geoprocessing.')
    join_output_name = 'IntersectAnalysis_Join_BoulderAddresses'
    jofc = set_path(output_db, join_output_name)
    sp = arcpy.SpatialJoin_analysis('Boulder_Addresses',
                                    inter,
                                    jofc,
                                    join_type="KEEP_COMMON",
                                    match_option="WITHIN")
    check_status(sp)
    logger.debug('Spatial Join geoprocessing complete.')

    # Record Count
    logger.debug('Starting Get Count geoprocessing.')
    record_count = arcpy.GetCount_management(jofc)
    arcpy.AddMessage(f'\nBoulder Addresses at-risk =  {record_count[0]}\n')
    logger.debug('Get Count geoprocessing complete.')

    # Clip (Analysis)
    # https://pro.arcgis.com/en/pro-app/latest/tool-reference/analysis/clip.htm
    logger.debug('Starting Clip geoprocessing.')
    inFeatures = set_path(output_db, 'avoid_points_buf')
    clipFeatures = set_path(output_db, user_inputs['intersect_fc'])
    clipOutput = set_path(output_db, 'clip_intersect')

    # Execute Clip
    c = arcpy.Clip_analysis(inFeatures, clipFeatures, clipOutput)
    check_status(c)
    logger.debug('Clip geoprocessing complete.')

    # Record re-count
    logger.debug('Starting Spatial Join geoprocessing.')
    join_output_name = 'clip_intersect_Join_BoulderAddresses'
    jofc = set_path(output_db, join_output_name)
    sp = arcpy.SpatialJoin_analysis('Boulder_Addresses',
                                    set_path(output_db, 'clip_intersect'),
                                    jofc,
                                    join_type="KEEP_COMMON",
                                    match_option="WITHIN")
    check_status(sp)
    logger.debug('Spatial Join geoprocessing complete.')
    logger.debug('Starting Get Count geoprocessing.')
    record_count = arcpy.GetCount_management(jofc)
    logger.debug('Get Count geoprocessing complete.')
    arcpy.AddMessage(
        f'\nBoulder Addresses in risk zone that need to be opted out of pesticide spraying =  {record_count[0]}\n'
    )

    # Add desired features to output map and colour the features
    map_features = [(user_inputs['intersect_fc'], [255, 235, 190, 100]),
                    ('avoid_points_buf', [115, 178, 255, 100]),
                    ('clip_intersect_Join_BoulderAddresses',
                     [102, 119, 205, 100])]
    for f, c in map_features:
        fc_name = f
        fc = set_path(output_db, f)
        colour = c
        add_feature_to_map(mp, fc_name, fc, colour)
    aprx.save()

    # Export final map
    export_map(user_inputs['map_subtitle'])
    aprx.save()
def main(args=None):
    setup_logging()
    parser = argparse.ArgumentParser(
        description="Planet Pipeline with Google Earth Engine Batch Addons")

    subparsers = parser.add_subparsers()
    parser_pp1 = subparsers.add_parser(
        " ", help="---------------------------------------")
    parser_P = subparsers.add_parser(
        " ", help="-----Choose from Planet Tools Below-----")
    parser_pp2 = subparsers.add_parser(
        " ", help="---------------------------------------")

    parser_planet_key = subparsers.add_parser("planetkey",
                                              help="Enter your planet API Key")
    optional_named = parser_planet_key.add_argument_group(
        "Optional named arguments")
    optional_named.add_argument("--type",
                                help="For direct key entry type --type quiet")
    optional_named.add_argument("--key", help="Your Planet API Key")
    parser_planet_key.set_defaults(func=planet_key_entry)

    parser_planet_quota = subparsers.add_parser(
        "pquota", help="Prints your Planet Quota Details")
    parser_planet_quota.set_defaults(func=planet_quota_from_parser)

    parser_dasync = subparsers.add_parser(
        "dasync",
        help=
        "Uses the Planet Client Async Downloader to download Planet Assets: Does not require activation",
    )
    parser_dasync.add_argument(
        "--infile",
        help=
        "Choose a geojson from geojson.io or the aoi-json you created earlier using ppipe aoijson",
        required=True,
    )
    parser_dasync.add_argument(
        "--item",
        help=
        "Choose from Planet Item types Example: PSScene4Band, PSOrthoTile, REOrthoTile etc",
        required=True,
    )
    parser_dasync.add_argument(
        "--asset",
        help=
        "Choose an asset type example: anlaytic, analytic_dn,analytic_sr,analytic_xml etc",
        required=True,
    )
    parser_dasync.add_argument(
        "--local",
        help="Local Path where Planet Item and asset types are saved",
        required=True,
    )
    parser_dasync.add_argument("--start",
                               help="Start date filter format YYYY-MM-DD",
                               required=True)
    parser_dasync.add_argument("--end",
                               help="End date filter format YYYY-MM-DD",
                               required=True)
    optional_named = parser_dasync.add_argument_group(
        "Optional named arguments")
    optional_named.add_argument("--cmin",
                                help="Cloud cover minimum between 0-1",
                                default=None)
    optional_named.add_argument("--cmax",
                                help="Cloud cover maximum between 0-1",
                                default=None)
    parser_dasync.set_defaults(func=dasync_from_parser)

    parser_savedsearch = subparsers.add_parser(
        "savedsearch",
        help="Tool to download saved searches from Planet Explorer")
    parser_savedsearch.add_argument(
        "--name", help="Name of your saved search(It is case sensitive)")
    parser_savedsearch.add_argument(
        "--asset",
        help=
        "Choose asset type analytic, analytic_xml, analytic_sr, analytic_dn etc",
    )
    parser_savedsearch.add_argument(
        "--local",
        help="Local Path (full path address) where PlanetAssets are saved")
    optional_named = parser_savedsearch.add_argument_group(
        "Optional named arguments")
    optional_named.add_argument(
        "--limit", help="Choose number of assets you want to download")
    parser_savedsearch.set_defaults(func=savedsearch_from_parser)

    parser_metadata = subparsers.add_parser(
        "metadata",
        help=
        "Tool to tabulate and convert all metadata files from Planet or Digital Globe Assets",
    )
    parser_metadata.add_argument(
        "--asset",
        help=
        "Choose PS OrthoTile(PSO)|PS OrthoTile DN(PSO_DN)|PS OrthoTile Visual(PSO_V)|PS4Band Analytic(PS4B)|PS4Band DN(PS4B_DN)|PS4Band SR(PS4B_SR)|PS3Band Analytic(PS3B)|PS3Band DN(PS3B_DN)|PS3Band Visual(PS3B_V)|RE OrthoTile (REO)|RE OrthoTile Visual(REO_V)|DigitalGlobe MultiSpectral(DGMS)|DigitalGlobe Panchromatic(DGP)|PolarGeospatial CenterDEM Strip(PGCDEM)?",
    )
    parser_metadata.add_argument("--mf", help="Metadata folder?")
    parser_metadata.add_argument(
        "--mfile", help="Metadata filename to be exported along with Path.csv")
    parser_metadata.add_argument(
        "--errorlog",
        default="./errorlog.csv",
        help="Errorlog to be exported along with Path.csv",
    )
    optional_named = parser_metadata.add_argument_group(
        "Optional named arguments")
    optional_named.add_argument(
        "--dir",
        help=
        "Path to Image Directory to be used to get ImageTags with metadata. use only with PS4B_SR",
    )
    parser_metadata.set_defaults(func=metadata_from_parser)

    parser_EE1 = subparsers.add_parser(
        " ", help="-------------------------------------------")
    parser_EE = subparsers.add_parser(
        " ", help="----Choose from Earth Engine Tools Below----")
    parser_EE2 = subparsers.add_parser(
        " ", help="-------------------------------------------")

    parser_update = subparsers.add_parser(
        "update", help="Updates Selenium drivers for firefox")
    parser_update.set_defaults(func=update_from_parser)

    parser_selsetup = subparsers.add_parser(
        "selsetup",
        help=
        "Non headless setup for new google account, use if upload throws errors",
    )
    parser_selsetup.set_defaults(func=selsetup_from_parser)

    parser_ee_user = subparsers.add_parser(
        "ee_user",
        help=
        "Get Earth Engine API Key & Paste it back to Command line/shell to change user",
    )
    parser_ee_user.set_defaults(func=ee_user_from_parser)

    parser_quota = subparsers.add_parser(
        "quota", help="Print Earth Engine total quota and used quota")
    parser_quota.set_defaults(func=quota_from_parser)

    parser_create = subparsers.add_parser(
        "create",
        help=
        "Allows the user to create an asset collection or folder in Google Earth Engine",
    )
    parser_create.add_argument("--typ",
                               help="Specify type: collection or folder",
                               required=True)
    parser_create.add_argument(
        "--path",
        help=
        "This is the path for the earth engine asset to be created full path is needsed eg: users/johndoe/collection",
        required=True,
    )
    parser_create.set_defaults(func=create_from_parser)

    parser_selupload = subparsers.add_parser(
        "selupload",
        help="Batch Asset Uploader for Planet Items & Assets using Selenium",
    )
    required_named = parser_selupload.add_argument_group(
        "Required named arguments.")
    required_named.add_argument(
        "--source",
        help="Path to the directory with images for upload.",
        required=True)
    required_named.add_argument(
        "--dest",
        help=
        "Destination. Full path for upload to Google Earth Engine, e.g. users/pinkiepie/myponycollection",
        required=True,
    )
    required_named.add_argument("-m",
                                "--metadata",
                                help="Path to CSV with metadata.")
    required_named.add_argument(
        "-mf",
        "--manifest",
        help=
        "Manifest type to be used,Choose PS OrthoTile(PSO)|PS OrthoTile DN(PSO_DN)|PS OrthoTile Visual(PSO_V)|PS4Band Analytic(PS4B)|PS4Band DN(PS4B_DN)|PS4Band SR(PS4B_SR)|PS3Band Analytic(PS3B)|PS3Band DN(PS3B_DN)|PS3Band Visual(PS3B_V)|RE OrthoTile (REO)|RE OrthoTile Visual(REO_V)",
    )
    optional_named = parser_selupload.add_argument_group(
        "Optional named arguments")
    optional_named.add_argument(
        "--nodata",
        type=int,
        help="The value to burn into the raster as NoData (missing data)",
    )
    required_named.add_argument("-u",
                                "--user",
                                help="Google account name (gmail address).")
    optional_named.add_argument("-b",
                                "--bucket",
                                help="Google Cloud Storage bucket name.")

    parser_selupload.set_defaults(func=selupload_from_parser)

    parser_lst = subparsers.add_parser(
        "lst", help="List assets in a folder/collection or write as text file")
    required_named = parser_lst.add_argument_group("Required named arguments.")
    required_named.add_argument(
        "--location",
        help="This it the location of your folder/collection",
        required=True,
    )
    required_named.add_argument(
        "--typ",
        help=
        "Whether you want the list to be printed or output as text[print/report]",
        required=True,
    )
    optional_named = parser_lst.add_argument_group("Optional named arguments")
    optional_named.add_argument("--items", help="Number of items to list")
    optional_named.add_argument(
        "--output", help="Folder location for report to be exported")
    parser_lst.set_defaults(func=lst_from_parser)

    parser_assetsize = subparsers.add_parser(
        "assetsize",
        help="Prints collection size in Human Readable form & Number of assets",
    )
    parser_assetsize.add_argument(
        "--asset",
        help="Earth Engine Asset for which to get size properties",
        required=True,
    )
    parser_assetsize.set_defaults(func=assetsize_from_parser)

    parser_tasks = subparsers.add_parser(
        "tasks",
        help=
        "Queries current task status [completed,running,ready,failed,cancelled]",
    )
    parser_tasks.set_defaults(func=tasks_from_parser)

    parser_access = subparsers.add_parser(
        "access", help="Sets Permissions for items in folder")
    parser_access.add_argument(
        "--asset",
        help=
        "This is the path to the earth engine asset whose permission you are changing folder/collection/image",
        required=True,
    )
    parser_access.add_argument(
        "--user",
        help=
        'Full email address of the user, try using "AllUsers" to make it public',
        required=True,
        default=False,
    )
    parser_access.add_argument("--role",
                               help="Choose between reader, writer or delete",
                               required=True)
    parser_access.set_defaults(func=access_from_parser)

    parser_cancel = subparsers.add_parser("cancel",
                                          help="Cancel all running tasks")
    parser_cancel.set_defaults(func=cancel_all_running_tasks_from_parser)

    args = parser.parse_args()

    # ee.Initialize()
    args.func(args)
示例#34
0
import json
import re
from flask import request, make_response, render_template, jsonify
from flask import Flask
from flask import Response

from config import setup_logging, track_stats, setup_sso
from hpc.sso.flask_client import exempt_from_sso

import logging
import sys
import traceback
import db

setup_logging()
app = Flask(__name__)
app.debug = True
app.db = db.get_basic_client()

hulu_sso = setup_sso(app)

logger = logging.getLogger(__name__)


@app.route('/')
@track_stats.timed("ResponseTimeByEndpoint.index")
def index():
    return render_template('index.html')


@app.route('/static/<path:path>')
示例#35
0
"""
Aqui se cargan las configuraciones y se incia el servidor como tal.
"""
import socket
import logging
import sys
from servidor_http.servidor import run
from config import HOST
from config import PORT
from config import setup_logging

Log = logging.getLogger('StarLord.run')


if __name__ == '__main__':

    print(chr(27) + "[2J") #limpiamos la terminal

    #configuramos la herramienta de logging
    setup_logging(True);

    try:
        #ejecutamos el servidor con el HOST y el PORT especificados
        #en el archivo config.py. A menos de que se especifique que se quiere que
        #el host sea global (osea 0.0.0.0) acepta conexiones provenientes de cualquier IP
        print "Escuchando en todas las interfaces en el puerto: "+str(PORT)
        run(host='0.0.0.0', port=PORT)

    except KeyboardInterrupt:
        Log.info('terminando StarLord...')