def add_job(args): """Add jenkins job.""" # get user's SDS conf settings conf = SettingsConf() # if using OAuth token, check its defined if args.token: if conf.get('GIT_OAUTH_TOKEN') is None: logger.error("Cannot use OAuth token. Undefined in SDS config.") return 1 u = urlparse(args.repo) repo_url = u._replace(netloc="{}@{}".format( conf.get('GIT_OAUTH_TOKEN'), u.netloc)).geturl() else: repo_url = args.repo logger.debug("repo_url: {}".format(repo_url)) # add jenkins job for branch or release if args.branch is None: execute(fab.add_ci_job_release, repo_url, args.storage, roles=['ci']) else: execute(fab.add_ci_job, repo_url, args.storage, args.branch, roles=['ci']) # reload execute(fab.reload_configuration, roles=['ci'])
def export(args): """Export HySDS user rules.""" # get user's SDS conf settings conf = SettingsConf() # query for mozart and grq rules mozart_es_url = "http://{}:9200".format(conf.get('MOZART_ES_PVT_IP')) grq_es_url = "http://{}:9200".format(conf.get('GRQ_ES_PVT_IP')) rules = {} for comp, es_url in [('mozart', mozart_es_url), ('grq', grq_es_url)]: hits = run_query(es_url, "user_rules", {"query": { "match_all": {} }}, doc_type=".percolator") if len(hits) == 0: logger.error("No user rules found on {}.".format(comp)) rules[comp] = [] else: rules[comp] = [i['_source'] for i in hits] logger.debug("rules: {}".format(json.dumps(rules, indent=2))) # set export directory outfile = normpath(args.outfile) export_dir = os.path.dirname(outfile) logger.debug("export_dir: {}".format(export_dir)) # create export directory validate_dir(export_dir) # dump user rules JSON with open(outfile, 'w') as f: json.dump(rules, f, indent=2, sort_keys=True)
def __init__(self, file): """Construct YamlConf instance.""" logger.debug("file: {}".format(file)) self._file = file with open(self._file) as f: self._cfg = yaml.load(f, Loader=yaml.FullLoader)
def kibana(args): """Update SDS components.""" sds_type = args.type logger.debug("sds_type: %s" % sds_type) func = get_adapter_func(sds_type, 'update', 'kibana') logger.debug("func: %s" % func) func(args.job_type, args.debug, args.force)
def ls(args): """List HySDS packages.""" hits = mozart_es.query(index=CONTAINERS_INDEX) # query for containers for hit in hits: logger.debug(json.dumps(hit, indent=2)) print((hit['_id'])) return
def status(comp, debug=False): """Component status.""" # get user's SDS conf settings conf = SettingsConf() logger.debug("Status for %s component(s)" % comp) status_comp(comp, conf, debug)
def pip_install_with_req(node_type, dest, ndeps): with prefix('source ~/%s/bin/activate' % node_type): with cd(dest): if ndeps: logger.debug("ndeps is set, so running pip with --no-deps") run('pip install --no-deps -e .') else: logger.debug( "ndeps is NOT set, so running pip without --no-deps") run('pip install -e .')
def get_func(mod_name, func_name): """Import function and return.""" mod = get_module(mod_name) logger.debug("mod: %s" % mod) try: return getattr(mod, func_name) except AttributeError: logger.error('Failed to get function "%s" from module "%s".' % (func_name, mod_name)) raise
def rabbitmq_queues_flush(): ctx = get_context() url = 'http://%s:15672/api/queues' % ctx['MOZART_RABBIT_PVT_IP'] r = requests.get('%s?columns=name' % url, auth=(ctx['MOZART_RABBIT_USER'], ctx['MOZART_RABBIT_PASSWORD'])) r.raise_for_status() res = r.json() for i in res: r = requests.delete('%s/%%2f/%s' % (url, i['name']), auth=(ctx['MOZART_RABBIT_USER'], ctx['MOZART_RABBIT_PASSWORD'])) r.raise_for_status() logger.debug("Deleted queue %s." % i['name'])
def print_rabbitmq_status(user, password, host): """Print status of RabbitMQ server.""" amqp_url = "amqp://{user}:{password}@{host}:5672//".format( user=user, password=password, host=host) logger.debug("amqp_url: {}".format(amqp_url)) try: conn = kombu.Connection(amqp_url) conn.ensure_connection(max_retries=3) print(("RabbitMQ: ", highlight("RUNNING"))) except Exception as e: print(("RabbitMQ: ", blink(highlight("NOT RUNNING", 'red', True)))) print(e)
def dispatch(args): """Dispatch to appropriate function.""" # turn on debugging if args.debug: logger.setLevel(logging.DEBUG) logger.debug("args: %s" % args) if args.func: return args.func(args) else: logger.error("No func specified for args %s" % args) return 1
def import_rules(args): """Import HySDS user rules.""" # get user's SDS conf settings conf = SettingsConf() # user rules JSON file rules_file = normpath(args.file) if not os.path.isfile(rules_file): logger.error( "HySDS user rules file {} doesn't exist.".format(rules_file)) return 1 logger.debug("rules_file: {}".format(rules_file)) # read in user rules with open(rules_file) as f: rules = json.load(f) logger.debug("rules: {}".format( json.dumps(rules_file, indent=2, sort_keys=True))) # get ES endpoints mozart_es_url = "http://{}:9200".format(conf.get('MOZART_ES_PVT_IP')) grq_es_url = "http://{}:9200".format(conf.get('GRQ_ES_PVT_IP')) # index user rules in ES for comp, es_url in [('mozart', mozart_es_url), ('grq', grq_es_url)]: for rule in rules[comp]: r = requests.post("{}/user_rules/.percolator/".format(es_url), data=json.dumps(rule)) logger.debug(r.content) r.raise_for_status() logger.debug(r.json())
def ls(args): """List HySDS packages.""" # get user's SDS conf settings conf = SettingsConf() # query for containers es_url = "http://{}:9200".format(conf.get('MOZART_ES_PVT_IP')) hits = run_query(es_url, "containers", {"query": {"match_all": {}}}) # list for hit in hits: logger.debug(json.dumps(hit, indent=2)) print(hit['_id']) return
def stop(comp, debug=False, force=False): """Stop components.""" # prompt user if not force: cont = prompt(get_prompt_tokens=lambda x: [(Token.Alert, "Stopping component[s]: {}. Continue [y/n]: ".format(comp)), (Token, " ")], validator=YesNoValidator(), style=prompt_style) == 'y' if not cont: return 0 # get user's SDS conf settings conf = SettingsConf() logger.debug("Stopping %s" % comp) stop_comp(comp, conf)
def start(comp, debug=False, force=False): """Start TPS components.""" # prompt user if not force: cont = prompt(get_prompt_tokens=lambda x: [(Token.Alert, "Starting TPS on component[s]: {}. Continue [y/n]: ".format(comp)), (Token, " ")], validator=YesNoValidator(), style=prompt_style) == 'y' if not cont: return 0 # get user's SDS conf settings conf = SettingsConf() logger.debug("Starting %s" % comp) if debug: start_comp(comp, conf) else: with hide('everything'): start_comp(comp, conf)
def storage(args): """Cloud storage management functions.""" # print args logger.debug("In storage(): {}".format(args)) # get user's SDS conf settings conf = SettingsConf() # get func try: func = get_func( 'sdscli.cloud.{}.{}'.format(args.cloud, args.subparser), args.subparser2) except (ImportError, AttributeError): logger.error('Not implemented yet. Mahalo for trying. ;)') return 1 # run return func(args, conf)
def kibana(job_type, debug=False, force=False): """Update components.""" # prompt user if not force: cont = prompt(get_prompt_tokens=lambda x: [( Token.Alert, "Updating Kibana: {}. Continue [y/n]: ".format( job_type)), (Token, " ")], validator=YesNoValidator(), style=prompt_style) == 'y' if not cont: return 0 # get user's SDS conf settings conf = SettingsConf() logger.debug("Processing %s" % job_type) if debug: process_kibana_job(job_type, conf) else: with hide('everything'): process_kibana_job(job_type, conf)
def ship(args): """Ship verdi code/config bundle.""" logger.debug("got to ship(): %s" % args) sds_type = args.type logger.debug("sds_type: %s" % sds_type) func = get_adapter_func(sds_type, 'update', 'ship') logger.debug("func: %s" % func) func(args.encrypt, args.debug)
def update(args): """Update SDS components.""" logger.debug("got to update(): %s" % args) sds_type = args.type logger.debug("sds_type: %s" % sds_type) func = get_adapter_func(sds_type, 'update', 'update') logger.debug("func: %s" % func) func(args.component, args.debug, args.force)
def configure(args): """Configure SDS config file.""" logger.debug("got to configure(): %s" % args) sds_type = args.type logger.debug("sds_type: %s" % sds_type) func = get_adapter_func(sds_type, 'configure', 'configure') logger.debug("func: %s" % func) func()
def cloud(args): """SDS cloud managment functions.""" logger.debug("got to cloud(): %s" % args) sds_type = args.type logger.debug("sds_type: %s" % sds_type) func = get_adapter_func(sds_type, 'cloud', args.subparser) logger.debug("func: %s" % func) func(args)
def status(args): """SDS component status.""" logger.debug("got to status(): %s" % args) sds_type = args.type logger.debug("sds_type: %s" % sds_type) func = get_adapter_func(sds_type, 'status', 'status') logger.debug("func: %s" % func) func(args.component, args.debug)
def ci(args): """Continuous integration functions.""" logger.debug("got to ci(): %s" % args) sds_type = args.type logger.debug("sds_type: %s" % sds_type) func = get_adapter_func(sds_type, 'ci', args.subparser) logger.debug("func: %s" % func) func(args)
def start_tps(args): """Start TPS on SDS components.""" logger.debug("got to start_tps(): %s" % args) sds_type = args.type logger.debug("sds_type: %s" % sds_type) func = get_adapter_func(sds_type, 'start_tps', 'start') logger.debug("func: %s" % func) func(args.component, args.debug, args.force)
def stop(args): """Stop SDS components.""" logger.debug("got to stop(): %s" % args) sds_type = args.type logger.debug("sds_type: %s" % sds_type) func = get_adapter_func(sds_type, 'stop', 'stop') logger.debug("func: %s" % func) func(args.component, args.debug, args.force)
def reset(args): """Reset SDS components.""" logger.debug("got to reset(): %s" % args) sds_type = args.type logger.debug("sds_type: %s" % sds_type) func = get_adapter_func(sds_type, 'reset', 'reset') logger.debug("func: %s" % func) func(args.component, args.debug, args.force)
def pkg(args): """SDS package managment functions.""" logger.debug("got to pkg(): %s" % args) sds_type = args.type logger.debug("sds_type: %s" % sds_type) func = get_adapter_func(sds_type, 'pkg', 'import_pkg' if args.subparser == 'import' else args.subparser) logger.debug("func: %s" % func) func(args)
def rules(args): """SDS user rules managment functions.""" logger.debug("got to pkg(): %s" % args) sds_type = args.type logger.debug("sds_type: %s" % sds_type) func = get_adapter_func(sds_type, 'rules', 'import_rules' if args.subparser == 'import' else args.subparser) logger.debug("func: %s" % func) func(args)
def import_rules(args): """ Import HySDS user rules. rules json structure: { "mozart": [...], "grq": [...], } """ rules_file = normpath(args.file) # user rules JSON file logger.debug("rules_file: {}".format(rules_file)) if not os.path.isfile(rules_file): logger.error( "HySDS user rules file {} doesn't exist.".format(rules_file)) return 1 with open(rules_file) as f: user_rules = json.load(f) # read in user rules logger.debug("rules: {}".format( json.dumps(rules_file, indent=2, sort_keys=True))) for rule in user_rules['mozart']: now = datetime.utcnow().isoformat() + 'Z' if not rule.get('creation_time', None): rule['creation_time'] = now if not rule.get('modified_time', None): rule['modified_time'] = now result = mozart_es.index_document(index=USER_RULES_MOZART, body=rule) # indexing mozart rules logger.debug(result) for rule in user_rules['grq']: now = datetime.utcnow().isoformat() + 'Z' if not rule.get('creation_time', None): rule['creation_time'] = now if not rule.get('modified_time', None): rule['modified_time'] = now result = mozart_es.index_document(index=USER_RULES_GRQ, body=rule) # indexing GRQ rules logger.debug(result)
def export(args): """Export HySDS user rules.""" rules = {} mozart_rules = mozart_es.query(index=USER_RULES_MOZART) rules['mozart'] = [rule['_source'] for rule in mozart_rules] logger.debug('%d mozart user rules found' % len(mozart_rules)) grq_rules = mozart_es.query(index=USER_RULES_GRQ) rules['grq'] = [rule['_source'] for rule in grq_rules] logger.debug('%d grq user rules found' % len(grq_rules)) logger.debug("rules: {}".format(json.dumps(rules, indent=2))) outfile = normpath(args.outfile) # set export directory export_dir = os.path.dirname(outfile) logger.debug("export_dir: {}".format(export_dir)) validate_dir(export_dir) # create export directory with open(outfile, 'w') as f: json.dump(rules, f, indent=2, sort_keys=True) # dump user rules JSON