def export(args): """Export HySDS user rules.""" # get user's SDS conf settings conf = SettingsConf() # query for mozart and grq rules mozart_es_url = "http://{}:9200".format(conf.get('MOZART_ES_PVT_IP')) grq_es_url = "http://{}:9200".format(conf.get('GRQ_ES_PVT_IP')) rules = {} for comp, es_url in [('mozart', mozart_es_url), ('grq', grq_es_url)]: hits = run_query(es_url, "user_rules", {"query": { "match_all": {} }}, doc_type=".percolator") if len(hits) == 0: logger.error("No user rules found on {}.".format(comp)) rules[comp] = [] else: rules[comp] = [i['_source'] for i in hits] logger.debug("rules: {}".format(json.dumps(rules, indent=2))) # set export directory outfile = normpath(args.outfile) export_dir = os.path.dirname(outfile) logger.debug("export_dir: {}".format(export_dir)) # create export directory validate_dir(export_dir) # dump user rules JSON with open(outfile, 'w') as f: json.dump(rules, f, indent=2, sort_keys=True)
def export(args): """Export HySDS user rules.""" rules = {} mozart_rules = mozart_es.query(index=USER_RULES_MOZART) rules['mozart'] = [rule['_source'] for rule in mozart_rules] logger.debug('%d mozart user rules found' % len(mozart_rules)) grq_rules = mozart_es.query(index=USER_RULES_GRQ) rules['grq'] = [rule['_source'] for rule in grq_rules] logger.debug('%d grq user rules found' % len(grq_rules)) logger.debug("rules: {}".format(json.dumps(rules, indent=2))) outfile = normpath(args.outfile) # set export directory export_dir = os.path.dirname(outfile) logger.debug("export_dir: {}".format(export_dir)) validate_dir(export_dir) # create export directory with open(outfile, 'w') as f: json.dump(rules, f, indent=2, sort_keys=True) # dump user rules JSON
def copy_files(): """Copy templates and files to user config files.""" files_path = get_user_files_path() logger.debug('files_path: %s' % files_path) validate_dir(files_path, mode=0700) sds_files_path = resource_filename( 'sdscli', os.path.join('adapters', 'hysds', 'files')) sds_files = glob(os.path.join(sds_files_path, '*')) for sds_file in sds_files: if os.path.basename(sds_file) == 'cluster.py': user_file = os.path.join(os.path.dirname(get_user_config_path()), os.path.basename(sds_file)) if not os.path.exists(user_file): shutil.copy(sds_file, user_file) else: user_file = os.path.join(files_path, os.path.basename(sds_file)) if os.path.isdir(sds_file) and not os.path.exists(user_file): shutil.copytree(sds_file, user_file) logger.debug("Copying dir %s to %s" % (sds_file, user_file)) elif os.path.isfile(sds_file) and not os.path.exists(user_file): shutil.copy(sds_file, user_file) logger.debug("Copying file %s to %s" % (sds_file, user_file))
def export(args): """Export HySDS package.""" cont_id = args.id # container id # query for container cont = mozart_es.get_by_id(index=CONTAINERS_INDEX, id=cont_id, ignore=404) if cont['found'] is False: logger.error("SDS package id {} not found.".format(cont_id)) return 1 cont_info = cont['_source'] logger.debug("cont_info: %s" % json.dumps(cont_info, indent=2)) # set export directory outdir = normpath(args.outdir) export_name = "{}.sdspkg".format(cont_id.replace(':', '-')) export_dir = os.path.join(outdir, export_name) logger.debug("export_dir: %s" % export_dir) if os.path.exists(export_dir): # if directory exists, stop logger.error( "SDS package export directory {} exists. Not continuing.".format( export_dir)) return 1 validate_dir(export_dir) # create export directory # download container get(cont_info['url'], export_dir) cont_info['url'] = os.path.basename(cont_info['url']) query = { "query": { "term": { "container.keyword": cont_id } # query job specs } } job_specs = mozart_es.query(index=JOB_SPECS_INDEX, body=query) job_specs = [job_spec['_source'] for job_spec in job_specs] logger.debug("job_specs: %s" % json.dumps(job_specs, indent=2)) # backwards-compatible query if len(job_specs) == 0: logger.debug("Got no job_specs. Checking deprecated mappings:") query = { "query": { "query_string": { "query": "container:\"{}\"".format(cont_id) } } } job_specs = mozart_es.query(index=JOB_SPECS_INDEX, body=query) job_specs = [job_spec['_source'] for job_spec in job_specs] logger.debug("job_specs: %s" % json.dumps(job_specs, indent=2)) hysds_ios = [ ] # pull hysds_ios for each job_spec and download any dependency images dep_images = {} for job_spec in job_specs: # download dependency images for d in job_spec.get('dependency_images', []): if d['container_image_name'] in dep_images: d['container_image_url'] = dep_images[ d['container_image_name']] else: # download container get(d['container_image_url'], export_dir) d['container_image_url'] = os.path.basename( d['container_image_url']) dep_images[ d['container_image_name']] = d['container_image_url'] # collect hysds_ios from mozart query = { "query": { "term": { "job-specification.keyword": job_spec['id'] } } } mozart_hysds_ios = mozart_es.query(index=HYSDS_IOS_MOZART_INDEX, body=query) mozart_hysds_ios = [ hysds_io['_source'] for hysds_io in mozart_hysds_ios ] logger.debug("Found %d hysds_ios on mozart for %s." % (len(mozart_hysds_ios), job_spec['id'])) # backwards-compatible query if len(mozart_hysds_ios) == 0: logger.debug( "Got no hysds_ios from mozart. Checking deprecated mappings:") query = { "query": { "query_string": { "query": "job-specification:\"{}\"".format(job_spec['id']) } } } mozart_hysds_ios = mozart_es.query(index=HYSDS_IOS_MOZART_INDEX, body=query) mozart_hysds_ios = [ hysds_io['_source'] for hysds_io in mozart_hysds_ios ] logger.debug("Found %d hysds_ios on mozart for %s." % (len(mozart_hysds_ios), job_spec['id'])) hysds_ios.extend(mozart_hysds_ios) # collect hysds_ios from grq query = { "query": { "term": { "job-specification.keyword": job_spec['id'] } } } grq_hysds_ios = mozart_es.query(index=HYSDS_IOS_GRQ_INDEX, body=query) grq_hysds_ios = [hysds_io['_source'] for hysds_io in grq_hysds_ios] logger.debug("Found %d hysds_ios on grq for %s." % (len(grq_hysds_ios), job_spec['id'])) # backwards-compatible query if len(mozart_hysds_ios) == 0: logger.debug( "Got no hysds_ios from grq. Checking deprecated mappings:") query = { "query": { "query_string": { "query": "job-specification:\"{}\"".format(job_spec['id']) } } } grq_hysds_ios = mozart_es.query(index=HYSDS_IOS_GRQ_INDEX, body=query) grq_hysds_ios = [hysds_io['_source'] for hysds_io in grq_hysds_ios] logger.debug("Found %d hysds_ios on grq for %s." % (len(grq_hysds_ios), job_spec['id'])) hysds_ios.extend(grq_hysds_ios) logger.debug("Found %d hysds_ios total." % (len(hysds_ios))) # export allowed accounts if not args.accounts: for hysds_io in hysds_ios: if 'allowed_accounts' in hysds_io: del hysds_io['allowed_accounts'] # dump manifest JSON manifest = { "containers": cont_info, "job_specs": job_specs, "hysds_ios": hysds_ios, } manifest_file = os.path.join(export_dir, 'manifest.json') with open(manifest_file, 'w') as f: json.dump(manifest, f, indent=2, sort_keys=True) # tar up hysds package tar_file = os.path.join(outdir, "{}.tar".format(export_name)) with tarfile.open(tar_file, "w") as tar: tar.add(export_dir, arcname=os.path.relpath(export_dir, outdir)) shutil.rmtree(export_dir) # remove package dir
def export(args): """Export HySDS package.""" # get user's SDS conf settings conf = SettingsConf() # container id cont_id = args.id # query for container mozart_es_url = "http://{}:9200".format(conf.get('MOZART_ES_PVT_IP')) grq_es_url = "http://{}:9200".format(conf.get('GRQ_ES_PVT_IP')) hits = run_query(mozart_es_url, "containers", {"query": { "term": { "_id": cont_id } }}) if len(hits) == 0: logger.error("SDS package id {} not found.".format(cont_id)) return 1 cont_info = hits[0]['_source'] logger.debug("cont_info: {}".format(json.dumps(cont_info, indent=2))) # set export directory outdir = normpath(args.outdir) export_name = "{}.sdspkg".format(cont_id) export_dir = os.path.join(outdir, export_name) logger.debug("export_dir: {}".format(export_dir)) # if directory exists, stop if os.path.exists(export_dir): logger.error( "SDS package export directory {} exists. Not continuing.".format( export_dir)) return 1 # create export directory validate_dir(export_dir) # download container get(cont_info['url'], export_dir) cont_info['url'] = os.path.basename(cont_info['url']) # query job specs job_specs = [ i['_source'] for i in run_query(mozart_es_url, "job_specs", {"query": { "term": { "container.raw": cont_id } }}) ] logger.debug("job_specs: {}".format(json.dumps(job_specs, indent=2))) # pull hysds_ios for each job_spec and download any dependency images hysds_ios = [] dep_images = {} for job_spec in job_specs: # download dependency images for d in job_spec.get('dependency_images', []): if d['container_image_name'] in dep_images: d['container_image_url'] = dep_images[ d['container_image_name']] else: # download container get(d['container_image_url'], export_dir) d['container_image_url'] = os.path.basename( d['container_image_url']) dep_images[ d['container_image_name']] = d['container_image_url'] # collect hysds_ios from mozart mozart_hysds_ios = [ i['_source'] for i in run_query( mozart_es_url, "hysds_ios", {"query": { "term": { "job-specification.raw": job_spec['id'] } }}) ] logger.debug("Found {} hysds_ios on mozart for {}.".format( len(mozart_hysds_ios), job_spec['id'])) hysds_ios.extend(mozart_hysds_ios) # collect hysds_ios from mozart grq_hysds_ios = [ i['_source'] for i in run_query( grq_es_url, "hysds_ios", {"query": { "term": { "job-specification.raw": job_spec['id'] } }}) ] logger.debug("Found {} hysds_ios on grq for {}.".format( len(grq_hysds_ios), job_spec['id'])) hysds_ios.extend(grq_hysds_ios) logger.debug("Found {} hysds_ios total.".format(len(hysds_ios))) # clean out allowed accounts for hysds_io in hysds_ios: if 'allowed_accounts' in hysds_io: del hysds_io['allowed_accounts'] # dump manifest JSON manifest = { "containers": cont_info, "job_specs": job_specs, "hysds_ios": hysds_ios, } manifest_file = os.path.join(export_dir, 'manifest.json') with open(manifest_file, 'w') as f: json.dump(manifest, f, indent=2, sort_keys=True) # tar up hysds package tar_file = os.path.join(outdir, "{}.tar".format(export_name)) with tarfile.open(tar_file, "w") as tar: tar.add(export_dir, arcname=os.path.relpath(export_dir, outdir)) # remove package dir shutil.rmtree(export_dir)
def configure(): """Configure SDS config file for HySDS.""" # copy templates/files copy_files() # config file cfg_file = get_user_config_path() if os.path.exists(cfg_file): cont = prompt(get_prompt_tokens=lambda x: [ (Token, cfg_file), (Token, " already exists. "), (Token.Alert, "Customizations will be lost or overwritten!"), (Token, " Continue [y/n]: ") ], validator=YesNoValidator(), style=prompt_style) == 'y' #validator=YesNoValidator(), default='n', style=prompt_style) == 'y' if not cont: return 0 with open(cfg_file) as f: cfg = yaml.load(f) else: cfg = {} # mozart for k, d in CFG_DEFAULTS['mozart']: v = prompt(get_prompt_tokens=lambda x: [(Token, "Enter value for "), (Token.Param, "%s" % k), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style) cfg[k] = v # mozart components comps = [('mozart-rabbit', 'rabbitMQ'), ('mozart-redis', 'redis'), ('mozart-es', 'elasticsearch')] for grp, comp in comps: reuse = prompt("Is mozart %s on a different IP [y/n]: " % comp, validator=YesNoValidator(), default='n') == 'n' for k, d in CFG_DEFAULTS[grp]: if reuse: if k.endswith('_PVT_IP'): cfg[k] = cfg['MOZART_PVT_IP'] continue elif k.endswith('_PUB_IP'): cfg[k] = cfg['MOZART_PUB_IP'] continue elif k.endswith('_FQDN'): cfg[k] = cfg['MOZART_FQDN'] continue if k == 'MOZART_RABBIT_PASSWORD': while True: p1 = prompt(get_prompt_tokens=lambda x: [ (Token, "Enter RabbitMQ password for user "), (Token.Username, "%s" % cfg['MOZART_RABBIT_USER']), (Token, ": ") ], default=unicode(cfg.get(k, d)), style=prompt_style, is_password=True) p2 = prompt(get_prompt_tokens=lambda x: [ (Token, "Re-enter RabbitMQ password for user "), (Token.Username, "%s" % cfg['MOZART_RABBIT_USER']), (Token, ": ") ], default=unicode(cfg.get(k, d)), style=prompt_style, is_password=True) if p1 == p2: if p1 == "": print("Password can't be empty.") continue v = p1 break print("Passwords don't match.") elif k == 'MOZART_REDIS_PASSWORD': while True: p1 = prompt(get_prompt_tokens=lambda x: [( Token, "Enter Redis password: "******"Re-enter Redis password: "******"Passwords don't match.") else: v = prompt( get_prompt_tokens=lambda x: [(Token, "Enter value for "), (Token.Param, "%s" % k), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style) cfg[k] = v # ops for k, d in CFG_DEFAULTS['ops']: if k == 'OPS_PASSWORD_HASH': while True: p1 = prompt(get_prompt_tokens=lambda x: [ (Token, "Enter web interface password for ops user "), (Token.Username, "%s" % cfg['OPS_USER']), (Token, ": ") ], default="", style=prompt_style, is_password=True) p2 = prompt(get_prompt_tokens=lambda x: [ (Token, "Re-enter web interface password for ops user "), (Token.Username, "%s" % cfg['OPS_USER']), (Token, ": ") ], default="", style=prompt_style, is_password=True) if p1 == p2: if p1 == "": print("Password can't be empty.") continue v = hashlib.sha224(p1).hexdigest() break print("Passwords don't match.") else: v = prompt( get_prompt_tokens=lambda x: [(Token, "Enter value for "), (Token.Param, "%s" % k), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style) cfg[k] = v # metrics for k, d in CFG_DEFAULTS['metrics']: v = prompt(get_prompt_tokens=lambda x: [(Token, "Enter value for "), (Token.Param, "%s" % k), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style) cfg[k] = v # metrics components comps = [('metrics-redis', 'redis'), ('metrics-es', 'elasticsearch')] for grp, comp in comps: reuse = prompt("Is metrics %s on a different IP [y/n]: " % comp, validator=YesNoValidator(), default='n') == 'n' for k, d in CFG_DEFAULTS[grp]: if reuse: if k.endswith('_PVT_IP'): cfg[k] = cfg['METRICS_PVT_IP'] continue elif k.endswith('_PUB_IP'): cfg[k] = cfg['METRICS_PUB_IP'] continue elif k.endswith('_FQDN'): cfg[k] = cfg['METRICS_FQDN'] continue if k == 'METRICS_REDIS_PASSWORD': while True: p1 = prompt(get_prompt_tokens=lambda x: [( Token, "Enter Redis password: "******"Re-enter Redis password: "******"Passwords don't match.") else: v = prompt( get_prompt_tokens=lambda x: [(Token, "Enter value for "), (Token.Param, "%s" % k), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style) cfg[k] = v # grq for k, d in CFG_DEFAULTS['grq']: v = prompt(get_prompt_tokens=lambda x: [(Token, "Enter value for "), (Token.Param, "%s" % k), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style) cfg[k] = v # grq components comps = [('grq-es', 'elasticsearch')] for grp, comp in comps: reuse = prompt("Is grq %s on a different IP [y/n]: " % comp, validator=YesNoValidator(), default='n') == 'n' for k, d in CFG_DEFAULTS[grp]: if reuse: if k.endswith('_PVT_IP'): cfg[k] = cfg['GRQ_PVT_IP'] continue elif k.endswith('_PUB_IP'): cfg[k] = cfg['GRQ_PUB_IP'] continue elif k.endswith('_FQDN'): cfg[k] = cfg['GRQ_FQDN'] continue v = prompt( get_prompt_tokens=lambda x: [(Token, "Enter value for "), (Token.Param, "%s" % k), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style) cfg[k] = v # factotum for k, d in CFG_DEFAULTS['factotum']: v = prompt(get_prompt_tokens=lambda x: [(Token, "Enter value for "), (Token.Param, "%s" % k), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style) cfg[k] = v # ci for k, d in CFG_DEFAULTS['ci']: if k in ('JENKINS_API_KEY', 'GIT_OAUTH_TOKEN'): while True: p1 = prompt( get_prompt_tokens=lambda x: [(Token, "Enter value for "), (Token.Param, "%s" % k), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style, is_password=True) p2 = prompt(get_prompt_tokens=lambda x: [( Token, "Re-enter value for "), (Token.Param, "%s" % k), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style, is_password=True) if p1 == p2: v = p1 break print("Values don't match.") else: v = prompt( get_prompt_tokens=lambda x: [(Token, "Enter value for "), (Token.Param, "%s" % k), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style) cfg[k] = v # verdi for k, d in CFG_DEFAULTS['verdi']: v = prompt(get_prompt_tokens=lambda x: [(Token, "Enter value for "), (Token.Param, "%s" % k), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style) cfg[k] = v # webdav for k, d in CFG_DEFAULTS['webdav']: if k == 'DAV_PASSWORD': while True: p1 = prompt(get_prompt_tokens=lambda x: [(Token, "Enter webdav password for user "), (Token.Username, "%s" % cfg['DAV_USER']), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style, is_password=True) p2 = prompt(get_prompt_tokens=lambda x: [(Token, "Re-enter webdav password for ops user "), (Token.Username, "%s" % cfg['DAV_USER']), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style, is_password=True) if p1 == p2: v = p1 break print("Passwords don't match.") else: v = prompt( get_prompt_tokens=lambda x: [(Token, "Enter value for "), (Token.Param, "%s" % k), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style) cfg[k] = v # aws-dataset for k, d in CFG_DEFAULTS['aws-dataset']: if k == 'DATASET_AWS_SECRET_KEY': if cfg['DATASET_AWS_ACCESS_KEY'] == "": cfg['DATASET_AWS_SECRET_KEY'] = "" continue while True: p1 = prompt(get_prompt_tokens=lambda x: [ (Token, "Enter AWS secret key for "), (Token.Username, "%s" % cfg['DATASET_AWS_ACCESS_KEY']), (Token, ": ") ], default=unicode(cfg.get(k, d)), style=prompt_style, is_password=True) p2 = prompt(get_prompt_tokens=lambda x: [ (Token, "Re-enter AWS secret key for "), (Token.Username, "%s" % cfg['DATASET_AWS_ACCESS_KEY']), (Token, ": ") ], default=unicode(cfg.get(k, d)), style=prompt_style, is_password=True) if p1 == p2: v = p1 break print("Keys don't match.") elif k == 'DATASET_AWS_ACCESS_KEY': v = prompt(get_prompt_tokens=lambda x: [(Token, "Enter value for "), (Token.Param, "%s" % k), (Token, ". If using instance roles, just press enter"), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style) else: v = prompt( get_prompt_tokens=lambda x: [(Token, "Enter value for "), (Token.Param, "%s" % k), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style) cfg[k] = v # aws-asg for k, d in CFG_DEFAULTS['aws-asg']: if k == 'AWS_SECRET_KEY': if cfg['AWS_ACCESS_KEY'] == "": cfg['AWS_SECRET_KEY'] = "" continue while True: p1 = prompt(get_prompt_tokens=lambda x: [(Token, "Enter AWS secret key for "), (Token.Username, "%s" % cfg['AWS_ACCESS_KEY']), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style, is_password=True) p2 = prompt(get_prompt_tokens=lambda x: [(Token, "Re-enter AWS secret key for "), (Token.Username, "%s" % cfg['AWS_ACCESS_KEY']), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style, is_password=True) if p1 == p2: v = p1 break print("Keys don't match.") elif k == 'AWS_ACCESS_KEY': v = prompt(get_prompt_tokens=lambda x: [(Token, "Enter value for "), (Token.Param, "%s" % k), (Token, ". If using instance roles, just press enter"), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style) else: v = prompt( get_prompt_tokens=lambda x: [(Token, "Enter value for "), (Token.Param, "%s" % k), (Token, ": ")], default=unicode(cfg.get(k, d)), style=prompt_style) cfg[k] = v # ensure directory exists validate_dir(os.path.dirname(cfg_file), mode=0700) yml = CFG_TMPL.format(**cfg) with open(cfg_file, 'w') as f: f.write(yml)