def main(): parser = argparse.ArgumentParser(description='blood_flow_rest: the rest service for blood_flow') parser.add_argument('-c', '--config', default="api.json", help="config file, can be overridden by parameters") parser.add_argument('-l', '--logfile', default=None, help="Logfile to write to, default is stdout") parser.add_argument('-p', '--port', help="Port to bind to") parser.add_argument('-v', '--verbose', default=4, action="count", help="Increase the verbosity of logging output") args = parser.parse_args() config = config_utils.readin_config_file( args.config ) if args.port: config.server.port = args.port if args.logfile: config.logfile = args.logfile logger.init(name=config.name, log_file=config.logfile ) logger.set_log_level( args.verbose ) if 'database' in config: global db db = <PROJECT>_db.DB() db.connect( config.database ) urls = [('/', RootHandler), ] + oauth.init( **config.oauth ) tornado.run_app( urls, **config.server )
def init(args): global config if args.config: config = config_utils.readin_config_file(args.config[0]) logger.init(name=program_name, log_file=config.ecc.get('logfile', None)) logger.set_log_level(args.verbose) logger.info(f'{program_name} (v:{version})') config.ecc.name_regex = config.ecc.name_template.format("(\d+)") ecc.set_config(config) ecc.openstack_connect(config.openstack) cloudflare_utils.init(config.ecc.cloudflare_apikey, config.ecc.cloudflare_email) else: logger.init(name=program_name) logger.set_log_level(args.verbose) logger.info(f'{program_name} (v:{version})')
def main(): parser = argparse.ArgumentParser(description='consumes a mq ') parser.add_argument('-c', '--config', required=True, help="conductor config file ", default="conductor.yml") parser.add_argument('-l', '--logfile', default=None, help="Logfile to write to, default is stdout") parser.add_argument('-v', '--verbose', default=4, action="count", help="Increase the verbosity of logging output") args = parser.parse_args() config = init(args.config) if args.logfile: logger.init(name='nga_runner', log_file=args.logfile, rotate_logs=True) else: logger.init(name='nga_runner') logger.set_log_level(args.verbose) logger.info(f'startup (v:{version})') api_requests.set_token(config['key']) global mq mq.connect(uri=config['mq_uri'], prefetch_count=1) try: mq.consume(route='default', callback=do_work) except KeyboardInterrupt: mq.channel.stop_consuming() # Wait for all to complete # logger.debug('waiting for threads') mq.channel.close()
def main(): # Endpoints setup parser = argparse.ArgumentParser( description='nels-galaxy-api: extending the functionality of galaxy') parser.add_argument('-c', '--config-file', required=True, help="nels-galaxy-api config file") parser.add_argument('-l', '--logfile', default=None, help="Logfile to write to, default is stdout") parser.add_argument('-v', '--verbose', default=4, action="count", help="Increase the verbosity of logging output") parser.add_argument('-D', '--development', default=False, action="store_true", help="run in developemt mode") args = parser.parse_args() if args.development: global DEV DEV = True if args.logfile: logger.init(name='nels-galaxy-api', log_file=args.logfile) else: logger.init(name='nels-galaxy-api') logger.set_log_level(args.verbose) logger.info(f'startup nels_galaxy_api (v:{version})') config = init(args.config_file) # Base functionality urls = [ ('/', RootHandler), # Done (r'/info/?$', Info), # Done (r'/state/(\w+)/?$', State), # Done # for the cli... (r'/users/?$', Users), # Done (r"/user/({email_match})/histories/?$".format( email_match=string_utils.email_match), UserHistories), # Done (r"/user/({email_match})/exports/?$".format( email_match=string_utils.email_match), UserExports), # all, brief is default #Done (r"/user/({email_match})/imports/?$".format( email_match=string_utils.email_match), UserImports), # all, brief is default #Done (r"/user/({email_match})/api-key/?$".format( email_match=string_utils.email_match), UserApikey), # to test # for proxying into the usegalaxy tracking api, will get user email and instance from the galaxy client. (r"/user/exports/?$", ExportsListProxy), # done (r"/user/export/(\w+)/?$", UserExport), # done (r"/user/imports/?$", UserImportsList), # (r"/user/import/(\w+)/?$", UserImport), # done (r'/user/(\w+)/?$', User), # Done (r'/history/export/request/?$', HistoryExportRequest), # Register export request #Done (r'/history/import/request/?$', HistoryImportRequest), # (r'/history/export/(\w+)?$', HistoryExport), # export_id, last one pr history is default # skip (r'/history/export/?$', HistoryExport ), # possible to search by history_id # ship (r'/history/import/(\w+)?$', HistoryImport), # export_id, last one pr history is default # skip (r'/history/import/?$', HistoryImport ), # possible to search by history_id # ship (r'/history/exports/(all)/?$', HistoryExportsList ), # for the local instance, all, brief is default # done (r'/history/exports/?$', HistoryExportsList ), # for the local instance, all, brief is default # done (r'/history/imports/(all)/?$', HistoryImportsList ), # for the local instance, all, brief is default # done (r'/history/imports/?$', HistoryImportsList ), # for the local instance, all, brief is default # done (r'/history/download/(\w+)/?$', HistoryDownload ), # fetching exported histories # skip ] # Terms of service server: if 'tos_server' in config and config['tos_server']: urls += [(r'/tos/?$', Tos)] # for the orchestrator functionality: if 'master' in config and config['master']: logger.debug("setting the master endpoints") urls += [ (r'/rabbitmq_status/?$', RabbitMQStatus), # check rabbitmq connection status (r'/export/(\w+)/requeue/?$', RequeueExport), # requeue export request (r'/import/(\w+)/requeue/?$', RequeueImport), # requeue export request (r"/export/(\w+)/(\w+)/?$", Export), # instance-id, state-id (post) #done (r'/export/(\w+)/?$', Export), # get or patch an export request # skip (r"/import/(\w+)/?$", Import), # state-id (post) # (r"/exports/({email_match})/?$".format( email_match=string_utils.email_match), ExportsList), # user_email # done (r"/exports/({email_match})/(\w+)/?$".format( email_match=string_utils.email_match), ExportsList), # user_email, instance. If user_email == all, export all entries for instance # done (r"/exports/(all)/(\w+)/?$", ExportsList), # done (r"/imports/({email_match})/?$".format( email_match=string_utils.email_match), ImportsList), # user_id # # user_email, instance. If user_email == all, export all entries for instance (r'/exports/?$', ExportsList ), # All entries in the table, for the cli (differnt key?) # done (r'/imports/?$', ImportsList ), # All entries in the table, for the cli (differnt key?) # done (r'/jobs/?$', JobsList ), # All entries in the table, for the cli (differnt key?) # done # For testing the setup -> ONLY FOR THE MASTER (r'/proxy/?$', ProxyTest ), # an endpoint for testing the proxy connection #done # Might drop these two (r'/decrypt/(\w+)/?$', Decrypt), (r'/encrypt/(\w+)/?$', Encrypt) ] if DEV: sid = states.set({'id': 1234, 'name': 'tyt'}) logger.info(f"TEST STATE ID: {sid}") logger.info(f"Running on port: {config.get('port', 8008)}") try: tornado.run_app(urls, port=config.get('port', 8008)) except KeyboardInterrupt: logger.info(f'stopping nels_galaxy_api')
def main(): parser = argparse.ArgumentParser( description='ehos_status: print ehos status in telegraf format') # parser.add_argument('config_file', metavar='config-file', nargs="*", help="yaml formatted config file", # default=ecc.utils.find_config_file('ecc.yaml')) parser.add_argument('config_file', metavar='config-file', nargs="*", help="yaml formatted config file", default='ecc.yaml') parser.add_argument('--list', action='store_true') # expected by ansible parser.add_argument( '-H', '--host-group', default='slurm', help='host group to put the nodes in') # expected by ansible parser.add_argument( '-u', '--ansible-user', default='centos', help='host group to put the nodes in') # expected by ansible parser.add_argument( '-t', '--trusted-host', default='yes', help='host group to put the nodes in') # expected by ansible args = parser.parse_args() config = config_utils.readin_config_file(args.config_file) logger.init(name='ecc_nodes', log_file=None) logger.set_log_level(0) hosts = readin_inventory(config.ecc.ansible_dir) config.ecc.name_regex = config.ecc.name_template.format("([01-99])") ecc.openstack_connect(config.openstack) nodes = ecc.servers(config.ecc.name_regex) # get the current nodes #instances.update( condor.nodes() ) #nodes = instances.node_state_counts() if args.host_group not in hosts: hosts[args.host_group] = {"hosts": []} for node in nodes: # print( node ) if len(node['ip']) == 0: continue ip_addr = node['ip'][0] node_name = node['name'] hosts[f"{args.host_group}"]['hosts'].append(node_name) hosts["_meta"]['hostvars'][node_name] = { 'ansible_host': ip_addr, 'ansible_user': args.ansible_user, 'trusted_host': args.trusted_host } # pp.pprint( hosts ) print(json.dumps(hosts))