Example #1
0
def import_rules(args):
    """Import HySDS user rules."""

    # get user's SDS conf settings
    conf = SettingsConf()

    # user rules JSON file
    rules_file = normpath(args.file)
    if not os.path.isfile(rules_file):
        logger.error(
            "HySDS user rules file {} doesn't exist.".format(rules_file))
        return 1
    logger.debug("rules_file: {}".format(rules_file))

    # read in user rules
    with open(rules_file) as f:
        rules = json.load(f)
    logger.debug("rules: {}".format(
        json.dumps(rules_file, indent=2, sort_keys=True)))

    # get ES endpoints
    mozart_es_url = "http://{}:9200".format(conf.get('MOZART_ES_PVT_IP'))
    grq_es_url = "http://{}:9200".format(conf.get('GRQ_ES_PVT_IP'))

    # index user rules in ES
    for comp, es_url in [('mozart', mozart_es_url), ('grq', grq_es_url)]:
        for rule in rules[comp]:
            r = requests.post("{}/user_rules/.percolator/".format(es_url),
                              data=json.dumps(rule))
            logger.debug(r.content)
            r.raise_for_status()
            logger.debug(r.json())
Example #2
0
def export(args):
    """Export HySDS user rules."""

    # get user's SDS conf settings
    conf = SettingsConf()

    # query for mozart and grq rules
    mozart_es_url = "http://{}:9200".format(conf.get('MOZART_ES_PVT_IP'))
    grq_es_url = "http://{}:9200".format(conf.get('GRQ_ES_PVT_IP'))
    rules = {}
    for comp, es_url in [('mozart', mozart_es_url), ('grq', grq_es_url)]:
        hits = run_query(es_url,
                         "user_rules", {"query": {
                             "match_all": {}
                         }},
                         doc_type=".percolator")
        if len(hits) == 0:
            logger.error("No user rules found on {}.".format(comp))
            rules[comp] = []
        else:
            rules[comp] = [i['_source'] for i in hits]
    logger.debug("rules: {}".format(json.dumps(rules, indent=2)))

    # set export directory
    outfile = normpath(args.outfile)
    export_dir = os.path.dirname(outfile)
    logger.debug("export_dir: {}".format(export_dir))

    # create export directory
    validate_dir(export_dir)

    # dump user rules JSON
    with open(outfile, 'w') as f:
        json.dump(rules, f, indent=2, sort_keys=True)
Example #3
0
def add_job(args):
    """Add jenkins job."""

    # get user's SDS conf settings
    conf = SettingsConf()

    # if using OAuth token, check its defined
    if args.token:
        if conf.get('GIT_OAUTH_TOKEN') is None:
            logger.error("Cannot use OAuth token. Undefined in SDS config.")
            return 1
        u = urlparse(args.repo)
        repo_url = u._replace(netloc="{}@{}".format(
            conf.get('GIT_OAUTH_TOKEN'), u.netloc)).geturl()
    else:
        repo_url = args.repo

    logger.debug("repo_url: {}".format(repo_url))

    # add jenkins job for branch or release
    if args.branch is None:
        execute(fab.add_ci_job_release, repo_url, args.storage, roles=['ci'])
    else:
        execute(fab.add_ci_job, repo_url, args.storage,
                args.branch, roles=['ci'])

    # reload
    execute(fab.reload_configuration, roles=['ci'])
Example #4
0
def ls(args):
    """List HySDS packages."""

    # get user's SDS conf settings
    conf = SettingsConf()

    # query for containers
    es_url = "http://{}:9200".format(conf.get('MOZART_ES_PVT_IP'))
    hits = run_query(es_url, "containers", {"query": {"match_all": {}}})

    # list
    for hit in hits:
        logger.debug(json.dumps(hit, indent=2))
        print(hit['_id'])
    return
Example #5
0
def logs(comp, debug=False, follow=False):
    """Show logs for components."""

    # get user's SDS conf settings
    conf = SettingsConf()

    logs_comp(comp, conf, follow)
Example #6
0
def run(comp, cmd):
    """Run command in components."""

    # get user's SDS conf settings
    conf = SettingsConf()

    run_comp(comp, cmd, conf)
Example #7
0
def ps(comp, debug=False):
    """List containers for components."""

    # get user's SDS conf settings
    conf = SettingsConf()

    ps_comp(comp, conf)
Example #8
0
def status(comp, debug=False):
    """Component status."""

    # get user's SDS conf settings
    conf = SettingsConf()

    logger.debug("Status for %s component(s)" % comp)

    status_comp(comp, conf, debug)
Example #9
0
def ship(encrypt, debug=False):
    """Update components."""

    # get user's SDS conf settings
    conf = SettingsConf()

    if debug: ship_verdi(conf, encrypt)
    else:
        with hide('everything'):
            ship_verdi(conf, encrypt)
Example #10
0
def build_job(args):
    """Build jenkins job."""

    # get user's SDS conf settings
    conf = SettingsConf()

    # build jenkins job for branch or release
    if args.branch is None:
        execute(fab.build_ci_job, args.repo, roles=['ci'])
    else:
        execute(fab.build_ci_job, args.repo, args.branch, roles=['ci'])
Example #11
0
def remove_job(args):
    """Remove jenkins jobs."""

    # get user's SDS conf settings
    conf = SettingsConf()

    # remove jenkins job for branch or release
    if args.branch is None:
        execute(fab.remove_ci_job, args.repo, roles=['ci'])
    else:
        execute(fab.remove_ci_job, args.repo, args.branch, roles=['ci'])
Example #12
0
def ls(args):
    """List HySDS packages."""

    # get user's SDS conf settings
    conf = SettingsConf()

    # check which cloud platforms configured
    for importer, mod_name, ispkg in pkgutil.iter_modules(
            sdscli.cloud.__path__):
        mod = get_module('sdscli.cloud.{}.utils'.format(mod_name))
        print("{}: {}".format(
            mod_name,
            highlight("configured", 'green') if mod.is_configured() else
            highlight("unimplemented or not configured", 'red')))
Example #13
0
def stop(comp, debug=False, force=False):
    """Stop components."""

    # prompt user
    if not force:
        cont = prompt(get_prompt_tokens=lambda x: [(Token.Alert,
                                                    "Stopping component[s]: {}. Continue [y/n]: ".format(comp)), (Token, " ")],
                      validator=YesNoValidator(), style=prompt_style) == 'y'
        if not cont:
            return 0

    # get user's SDS conf settings
    conf = SettingsConf()

    logger.debug("Stopping %s" % comp)

    stop_comp(comp, conf)
Example #14
0
def start(comp, debug=False, force=False):
    """Start TPS components."""

    # prompt user
    if not force:
        cont = prompt(get_prompt_tokens=lambda x: [(Token.Alert, 
                      "Starting TPS on component[s]: {}. Continue [y/n]: ".format(comp)), (Token, " ")],
                      validator=YesNoValidator(), style=prompt_style) == 'y'
        if not cont: return 0

    # get user's SDS conf settings
    conf = SettingsConf()

    logger.debug("Starting %s" % comp)

    if debug: start_comp(comp, conf)
    else:
        with hide('everything'):
            start_comp(comp, conf)
Example #15
0
def storage(args):
    """Cloud storage management functions."""

    # print args
    logger.debug("In storage(): {}".format(args))

    # get user's SDS conf settings
    conf = SettingsConf()

    # get func
    try:
        func = get_func(
            'sdscli.cloud.{}.{}'.format(args.cloud, args.subparser),
            args.subparser2)
    except (ImportError, AttributeError):
        logger.error('Not implemented yet. Mahalo for trying. ;)')
        return 1

    # run
    return func(args, conf)
Example #16
0
def kibana(job_type, debug=False, force=False):
    """Update components."""

    # prompt user
    if not force:
        cont = prompt(get_prompt_tokens=lambda x: [(
            Token.Alert, "Updating Kibana: {}. Continue [y/n]: ".format(
                job_type)), (Token, " ")],
                      validator=YesNoValidator(),
                      style=prompt_style) == 'y'
        if not cont: return 0

    # get user's SDS conf settings
    conf = SettingsConf()

    logger.debug("Processing %s" % job_type)

    if debug: process_kibana_job(job_type, conf)
    else:
        with hide('everything'):
            process_kibana_job(job_type, conf)
Example #17
0
def import_pkg(args):
    """Import HySDS package."""

    # get user's SDS conf settings
    conf = SettingsConf()

    # package tar file
    tar_file = normpath(args.file)
    if not os.path.isfile(tar_file):
        logger.error("HySDS package file {} doesn't exist.".format(tar_file))
        return 1
    logger.debug("tar_file: {}".format(tar_file))

    # extract
    outdir = os.path.dirname(tar_file)
    with tarfile.open(tar_file) as tar:
        export_name = tar.getnames()[0]
        tar.extractall(outdir)
    export_dir = os.path.join(outdir, export_name)
    logger.debug("export_dir: {}".format(export_dir))

    # detect export dir
    if not os.path.isdir(export_dir):
        logger.error("Cannot find HySDS package dir {}.".format(export_dir))
        return 1

    # read in manifest
    manifest_file = os.path.join(export_dir, 'manifest.json')
    with open(manifest_file) as f:
        manifest = json.load(f)
    logger.debug("manifest: {}".format(
        json.dumps(manifest, indent=2, sort_keys=True)))

    # get code bucket
    code_bucket = conf.get('CODE_BUCKET')
    code_bucket_url = "s3://{}/{}".format(conf.get('S3_ENDPOINT'), code_bucket)
    logger.debug("code_bucket: {}".format(code_bucket))
    logger.debug("code_bucket_url: {}".format(code_bucket_url))

    # get ES endpoints
    mozart_es_url = "http://{}:9200".format(conf.get('MOZART_ES_PVT_IP'))
    grq_es_url = "http://{}:9200".format(conf.get('GRQ_ES_PVT_IP'))

    # upload container image and index container in ES
    cont_info = manifest['containers']
    cont_image = os.path.join(export_dir, cont_info['url'])
    cont_info['url'] = "{}/{}".format(code_bucket_url, cont_info['url'])
    put(cont_image, cont_info['url'])
    r = requests.put("{}/containers/container/{}".format(
        mozart_es_url, cont_info['id']),
                     data=json.dumps(cont_info))
    r.raise_for_status()
    logger.debug(r.json())

    # index job_specs in ES and upload any dependency containers
    dep_images = {}
    for job_spec in manifest['job_specs']:
        # download dependency images
        for d in job_spec.get('dependency_images', []):
            if d['container_image_name'] in dep_images:
                d['container_image_url'] = dep_images[
                    d['container_image_name']]
            else:
                # upload container
                dep_img = os.path.join(export_dir, d['container_image_url'])
                d['container_image_url'] = "{}/{}".format(
                    code_bucket_url, d['container_image_url'])
                put(dep_img, d['container_image_url'])
                dep_images[
                    d['container_image_name']] = d['container_image_url']
        r = requests.put("{}/job_specs/job_spec/{}".format(
            mozart_es_url, job_spec['id']),
                         data=json.dumps(job_spec))
        r.raise_for_status()
        logger.debug(r.json())

    # index hysds_ios in ES
    for hysds_io in manifest['hysds_ios']:
        component = hysds_io.get('component', 'tosca')
        es_url = mozart_es_url if component == 'mozart' else grq_es_url
        r = requests.put("{}/hysds_ios/hysds_io/{}".format(
            es_url, hysds_io['id']),
                         data=json.dumps(hysds_io))
        r.raise_for_status()
        logger.debug(r.json())

    # remove package dir
    shutil.rmtree(export_dir)
Example #18
0
def configure():
    """Configure SDS config file for SDSKit."""

    logger.debug("Got here for SDSKit")
    conf = SettingsConf()
    logger.debug(conf)
Example #19
0
def import_pkg(args):
    """Import HySDS package."""

    conf = SettingsConf()  # get user's SDS conf settings

    # package tar file
    tar_file = normpath(args.file)
    if not os.path.isfile(tar_file):
        logger.error("HySDS package file %s doesn't exist." % tar_file)
        return 1
    logger.debug("tar_file: %s" % tar_file)

    # extract
    outdir = os.path.dirname(tar_file)
    with tarfile.open(tar_file) as tar:
        export_name = tar.getnames()[0]
        tar.extractall(outdir)
    export_dir = os.path.join(outdir, export_name)
    logger.debug("export_dir: %s" % export_dir)

    # detect export dir
    if not os.path.isdir(export_dir):
        logger.error("Cannot find HySDS package dir %s." % export_dir)
        return 1

    # read in manifest
    manifest_file = os.path.join(export_dir, 'manifest.json')
    with open(manifest_file) as f:
        manifest = json.load(f)
    logger.debug("manifest: %s" %
                 json.dumps(manifest, indent=2, sort_keys=True))

    # get code bucket
    code_bucket = conf.get('CODE_BUCKET')
    code_bucket_url = "s3://%s/%s" % (conf.get('S3_ENDPOINT'), code_bucket)
    logger.debug("code_bucket: %s" % code_bucket)
    logger.debug("code_bucket_url: %s" % code_bucket_url)

    # upload container image to s3
    cont_info = manifest['containers']
    cont_image = os.path.join(export_dir, cont_info['url'])
    cont_info['url'] = "{}/{}".format(code_bucket_url, cont_info['url'])
    put(cont_image, cont_info['url'])

    # index container in ES
    indexed_container = mozart_es.index_document(index=CONTAINERS_INDEX,
                                                 body=cont_info,
                                                 id=cont_info['id'])
    logger.debug(indexed_container)

    # index job_specs in ES and upload any dependency containers
    dep_images = {}
    for job_spec in manifest['job_specs']:
        # download dependency images
        for d in job_spec.get('dependency_images', []):
            if d['container_image_name'] in dep_images:
                d['container_image_url'] = dep_images[
                    d['container_image_name']]
            else:
                # upload container
                dep_img = os.path.join(export_dir, d['container_image_url'])
                d['container_image_url'] = "%s/%s" % (code_bucket_url,
                                                      d['container_image_url'])
                put(dep_img, d['container_image_url'])
                dep_images[
                    d['container_image_name']] = d['container_image_url']

        indexed_job_spec = mozart_es.index_document(index=JOB_SPECS_INDEX,
                                                    body=job_spec,
                                                    id=job_spec['id'])
        logger.debug(indexed_job_spec)

    # index hysds_ios to ES
    for hysds_io in manifest['hysds_ios']:
        component = hysds_io.get('component', 'tosca')

        hysds_io_id = hysds_io['id']
        if component in ('mozart', 'figaro'):
            indexed_hysds_io = mozart_es.index_document(
                index=HYSDS_IOS_MOZART_INDEX, body=hysds_io, id=hysds_io_id)
            logger.debug(indexed_hysds_io)
        else:
            indexed_hysds_io = mozart_es.index_document(
                index=HYSDS_IOS_GRQ_INDEX, body=hysds_io, id=hysds_io_id)
            logger.debug(indexed_hysds_io)

    shutil.rmtree(export_dir)  # remove package dir
Example #20
0
def rm(args):
    """Remove HySDS package."""

    # get user's SDS conf settings
    conf = SettingsConf()

    # container id
    cont_id = args.id

    # query for container
    mozart_es_url = "http://{}:9200".format(conf.get('MOZART_ES_PVT_IP'))
    grq_es_url = "http://{}:9200".format(conf.get('GRQ_ES_PVT_IP'))
    hits = run_query(mozart_es_url, "containers",
                     {"query": {
                         "term": {
                             "_id": cont_id
                         }
                     }})
    if len(hits) == 0:
        logger.error("SDS package id {} not found.".format(cont_id))
        return 1
    cont_info = hits[0]['_source']
    logger.debug("cont_info: {}".format(json.dumps(cont_info, indent=2)))

    # delete container from code bucket and ES
    rmall(cont_info['url'])
    r = requests.delete("{}/containers/container/{}".format(
        mozart_es_url, cont_info['id']))
    r.raise_for_status()
    logger.debug(r.json())

    # query job specs
    job_specs = [
        i['_source']
        for i in run_query(mozart_es_url, "job_specs",
                           {"query": {
                               "term": {
                                   "container.raw": cont_id
                               }
                           }})
    ]
    logger.debug("job_specs: {}".format(json.dumps(job_specs, indent=2)))

    # delete job_specs and hysds_ios
    for job_spec in job_specs:
        # collect hysds_ios from mozart
        mozart_hysds_ios = [
            i['_source'] for i in run_query(
                mozart_es_url, "hysds_ios",
                {"query": {
                    "term": {
                        "job-specification.raw": job_spec['id']
                    }
                }})
        ]
        logger.debug("Found {} hysds_ios on mozart for {}.".format(
            len(mozart_hysds_ios), job_spec['id']))
        for hysds_io in mozart_hysds_ios:
            r = requests.delete("{}/hysds_ios/hysds_io/{}".format(
                mozart_es_url, hysds_io['id']))
            r.raise_for_status()
            logger.debug(r.json())

        # collect hysds_ios from mozart
        grq_hysds_ios = [
            i['_source'] for i in run_query(
                grq_es_url, "hysds_ios",
                {"query": {
                    "term": {
                        "job-specification.raw": job_spec['id']
                    }
                }})
        ]
        logger.debug("Found {} hysds_ios on grq for {}.".format(
            len(grq_hysds_ios), job_spec['id']))
        for hysds_io in grq_hysds_ios:
            r = requests.delete("{}/hysds_ios/hysds_io/{}".format(
                grq_es_url, hysds_io['id']))
            r.raise_for_status()
            logger.debug(r.json())

        # delete job_spec from ES
        r = requests.delete("{}/job_specs/job_spec/{}".format(
            mozart_es_url, job_spec['id']))
        r.raise_for_status()
        logger.debug(r.json())
Example #21
0
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import

from sdscli.conf_utils import SettingsConf

try:
    conf = SettingsConf()
except Exception as e:
    pass
Example #22
0
def export(args):
    """Export HySDS package."""

    # get user's SDS conf settings
    conf = SettingsConf()

    # container id
    cont_id = args.id

    # query for container
    mozart_es_url = "http://{}:9200".format(conf.get('MOZART_ES_PVT_IP'))
    grq_es_url = "http://{}:9200".format(conf.get('GRQ_ES_PVT_IP'))
    hits = run_query(mozart_es_url, "containers",
                     {"query": {
                         "term": {
                             "_id": cont_id
                         }
                     }})
    if len(hits) == 0:
        logger.error("SDS package id {} not found.".format(cont_id))
        return 1
    cont_info = hits[0]['_source']
    logger.debug("cont_info: {}".format(json.dumps(cont_info, indent=2)))

    # set export directory
    outdir = normpath(args.outdir)
    export_name = "{}.sdspkg".format(cont_id)
    export_dir = os.path.join(outdir, export_name)
    logger.debug("export_dir: {}".format(export_dir))

    # if directory exists, stop
    if os.path.exists(export_dir):
        logger.error(
            "SDS package export directory {} exists. Not continuing.".format(
                export_dir))
        return 1

    # create export directory
    validate_dir(export_dir)

    # download container
    get(cont_info['url'], export_dir)
    cont_info['url'] = os.path.basename(cont_info['url'])

    # query job specs
    job_specs = [
        i['_source']
        for i in run_query(mozart_es_url, "job_specs",
                           {"query": {
                               "term": {
                                   "container.raw": cont_id
                               }
                           }})
    ]
    logger.debug("job_specs: {}".format(json.dumps(job_specs, indent=2)))

    # pull hysds_ios for each job_spec and download any dependency images
    hysds_ios = []
    dep_images = {}
    for job_spec in job_specs:
        # download dependency images
        for d in job_spec.get('dependency_images', []):
            if d['container_image_name'] in dep_images:
                d['container_image_url'] = dep_images[
                    d['container_image_name']]
            else:
                # download container
                get(d['container_image_url'], export_dir)
                d['container_image_url'] = os.path.basename(
                    d['container_image_url'])
                dep_images[
                    d['container_image_name']] = d['container_image_url']

        # collect hysds_ios from mozart
        mozart_hysds_ios = [
            i['_source'] for i in run_query(
                mozart_es_url, "hysds_ios",
                {"query": {
                    "term": {
                        "job-specification.raw": job_spec['id']
                    }
                }})
        ]
        logger.debug("Found {} hysds_ios on mozart for {}.".format(
            len(mozart_hysds_ios), job_spec['id']))
        hysds_ios.extend(mozart_hysds_ios)

        # collect hysds_ios from mozart
        grq_hysds_ios = [
            i['_source'] for i in run_query(
                grq_es_url, "hysds_ios",
                {"query": {
                    "term": {
                        "job-specification.raw": job_spec['id']
                    }
                }})
        ]
        logger.debug("Found {} hysds_ios on grq for {}.".format(
            len(grq_hysds_ios), job_spec['id']))
        hysds_ios.extend(grq_hysds_ios)
    logger.debug("Found {} hysds_ios total.".format(len(hysds_ios)))

    # clean out allowed accounts
    for hysds_io in hysds_ios:
        if 'allowed_accounts' in hysds_io:
            del hysds_io['allowed_accounts']

    # dump manifest JSON
    manifest = {
        "containers": cont_info,
        "job_specs": job_specs,
        "hysds_ios": hysds_ios,
    }
    manifest_file = os.path.join(export_dir, 'manifest.json')
    with open(manifest_file, 'w') as f:
        json.dump(manifest, f, indent=2, sort_keys=True)

    # tar up hysds package
    tar_file = os.path.join(outdir, "{}.tar".format(export_name))
    with tarfile.open(tar_file, "w") as tar:
        tar.add(export_dir, arcname=os.path.relpath(export_dir, outdir))

    # remove package dir
    shutil.rmtree(export_dir)