コード例 #1
0
ファイル: rules.py プロジェクト: torresal/sdscli
def import_rules(args):
    """Import HySDS user rules."""

    # get user's SDS conf settings
    conf = SettingsConf()

    # user rules JSON file
    rules_file = normpath(args.file)
    if not os.path.isfile(rules_file):
        logger.error(
            "HySDS user rules file {} doesn't exist.".format(rules_file))
        return 1
    logger.debug("rules_file: {}".format(rules_file))

    # read in user rules
    with open(rules_file) as f:
        rules = json.load(f)
    logger.debug("rules: {}".format(
        json.dumps(rules_file, indent=2, sort_keys=True)))

    # get ES endpoints
    mozart_es_url = "http://{}:9200".format(conf.get('MOZART_ES_PVT_IP'))
    grq_es_url = "http://{}:9200".format(conf.get('GRQ_ES_PVT_IP'))

    # index user rules in ES
    for comp, es_url in [('mozart', mozart_es_url), ('grq', grq_es_url)]:
        for rule in rules[comp]:
            r = requests.post("{}/user_rules/.percolator/".format(es_url),
                              data=json.dumps(rule))
            logger.debug(r.content)
            r.raise_for_status()
            logger.debug(r.json())
コード例 #2
0
ファイル: rules.py プロジェクト: torresal/sdscli
def export(args):
    """Export HySDS user rules."""

    # get user's SDS conf settings
    conf = SettingsConf()

    # query for mozart and grq rules
    mozart_es_url = "http://{}:9200".format(conf.get('MOZART_ES_PVT_IP'))
    grq_es_url = "http://{}:9200".format(conf.get('GRQ_ES_PVT_IP'))
    rules = {}
    for comp, es_url in [('mozart', mozart_es_url), ('grq', grq_es_url)]:
        hits = run_query(es_url,
                         "user_rules", {"query": {
                             "match_all": {}
                         }},
                         doc_type=".percolator")
        if len(hits) == 0:
            logger.error("No user rules found on {}.".format(comp))
            rules[comp] = []
        else:
            rules[comp] = [i['_source'] for i in hits]
    logger.debug("rules: {}".format(json.dumps(rules, indent=2)))

    # set export directory
    outfile = normpath(args.outfile)
    export_dir = os.path.dirname(outfile)
    logger.debug("export_dir: {}".format(export_dir))

    # create export directory
    validate_dir(export_dir)

    # dump user rules JSON
    with open(outfile, 'w') as f:
        json.dump(rules, f, indent=2, sort_keys=True)
コード例 #3
0
ファイル: rules.py プロジェクト: sdskit/sdscli
def import_rules(args):
    """
    Import HySDS user rules.
    rules json structure: {
        "mozart": [...],
        "grq": [...],
    }
    """

    rules_file = normpath(args.file)  # user rules JSON file
    logger.debug("rules_file: {}".format(rules_file))

    if not os.path.isfile(rules_file):
        logger.error(
            "HySDS user rules file {} doesn't exist.".format(rules_file))
        return 1

    with open(rules_file) as f:
        user_rules = json.load(f)  # read in user rules
    logger.debug("rules: {}".format(
        json.dumps(rules_file, indent=2, sort_keys=True)))

    for rule in user_rules['mozart']:
        now = datetime.utcnow().isoformat() + 'Z'

        if not rule.get('creation_time', None):
            rule['creation_time'] = now
        if not rule.get('modified_time', None):
            rule['modified_time'] = now

        result = mozart_es.index_document(index=USER_RULES_MOZART,
                                          body=rule)  # indexing mozart rules
        logger.debug(result)

    for rule in user_rules['grq']:
        now = datetime.utcnow().isoformat() + 'Z'

        if not rule.get('creation_time', None):
            rule['creation_time'] = now
        if not rule.get('modified_time', None):
            rule['modified_time'] = now

        result = mozart_es.index_document(index=USER_RULES_GRQ,
                                          body=rule)  # indexing GRQ rules
        logger.debug(result)
コード例 #4
0
ファイル: rules.py プロジェクト: sdskit/sdscli
def export(args):
    """Export HySDS user rules."""
    rules = {}

    mozart_rules = mozart_es.query(index=USER_RULES_MOZART)
    rules['mozart'] = [rule['_source'] for rule in mozart_rules]
    logger.debug('%d mozart user rules found' % len(mozart_rules))

    grq_rules = mozart_es.query(index=USER_RULES_GRQ)
    rules['grq'] = [rule['_source'] for rule in grq_rules]
    logger.debug('%d grq user rules found' % len(grq_rules))

    logger.debug("rules: {}".format(json.dumps(rules, indent=2)))

    outfile = normpath(args.outfile)  # set export directory
    export_dir = os.path.dirname(outfile)
    logger.debug("export_dir: {}".format(export_dir))

    validate_dir(export_dir)  # create export directory

    with open(outfile, 'w') as f:
        json.dump(rules, f, indent=2, sort_keys=True)  # dump user rules JSON
コード例 #5
0
def export(args):
    """Export HySDS package."""
    cont_id = args.id  # container id

    # query for container
    cont = mozart_es.get_by_id(index=CONTAINERS_INDEX, id=cont_id, ignore=404)
    if cont['found'] is False:
        logger.error("SDS package id {} not found.".format(cont_id))
        return 1

    cont_info = cont['_source']
    logger.debug("cont_info: %s" % json.dumps(cont_info, indent=2))

    # set export directory
    outdir = normpath(args.outdir)
    export_name = "{}.sdspkg".format(cont_id.replace(':', '-'))
    export_dir = os.path.join(outdir, export_name)
    logger.debug("export_dir: %s" % export_dir)

    if os.path.exists(export_dir):  # if directory exists, stop
        logger.error(
            "SDS package export directory {} exists. Not continuing.".format(
                export_dir))
        return 1

    validate_dir(export_dir)  # create export directory

    # download container
    get(cont_info['url'], export_dir)
    cont_info['url'] = os.path.basename(cont_info['url'])

    query = {
        "query": {
            "term": {
                "container.keyword": cont_id
            }  # query job specs
        }
    }
    job_specs = mozart_es.query(index=JOB_SPECS_INDEX, body=query)
    job_specs = [job_spec['_source'] for job_spec in job_specs]
    logger.debug("job_specs: %s" % json.dumps(job_specs, indent=2))

    # backwards-compatible query
    if len(job_specs) == 0:
        logger.debug("Got no job_specs. Checking deprecated mappings:")
        query = {
            "query": {
                "query_string": {
                    "query": "container:\"{}\"".format(cont_id)
                }
            }
        }
        job_specs = mozart_es.query(index=JOB_SPECS_INDEX, body=query)
        job_specs = [job_spec['_source'] for job_spec in job_specs]
        logger.debug("job_specs: %s" % json.dumps(job_specs, indent=2))

    hysds_ios = [
    ]  # pull hysds_ios for each job_spec and download any dependency images
    dep_images = {}
    for job_spec in job_specs:
        # download dependency images
        for d in job_spec.get('dependency_images', []):
            if d['container_image_name'] in dep_images:
                d['container_image_url'] = dep_images[
                    d['container_image_name']]
            else:
                # download container
                get(d['container_image_url'], export_dir)
                d['container_image_url'] = os.path.basename(
                    d['container_image_url'])
                dep_images[
                    d['container_image_name']] = d['container_image_url']

        # collect hysds_ios from mozart
        query = {
            "query": {
                "term": {
                    "job-specification.keyword": job_spec['id']
                }
            }
        }
        mozart_hysds_ios = mozart_es.query(index=HYSDS_IOS_MOZART_INDEX,
                                           body=query)
        mozart_hysds_ios = [
            hysds_io['_source'] for hysds_io in mozart_hysds_ios
        ]
        logger.debug("Found %d hysds_ios on mozart for %s." %
                     (len(mozart_hysds_ios), job_spec['id']))

        # backwards-compatible query
        if len(mozart_hysds_ios) == 0:
            logger.debug(
                "Got no hysds_ios from mozart. Checking deprecated mappings:")
            query = {
                "query": {
                    "query_string": {
                        "query":
                        "job-specification:\"{}\"".format(job_spec['id'])
                    }
                }
            }
            mozart_hysds_ios = mozart_es.query(index=HYSDS_IOS_MOZART_INDEX,
                                               body=query)
            mozart_hysds_ios = [
                hysds_io['_source'] for hysds_io in mozart_hysds_ios
            ]
            logger.debug("Found %d hysds_ios on mozart for %s." %
                         (len(mozart_hysds_ios), job_spec['id']))
        hysds_ios.extend(mozart_hysds_ios)

        # collect hysds_ios from grq
        query = {
            "query": {
                "term": {
                    "job-specification.keyword": job_spec['id']
                }
            }
        }
        grq_hysds_ios = mozart_es.query(index=HYSDS_IOS_GRQ_INDEX, body=query)
        grq_hysds_ios = [hysds_io['_source'] for hysds_io in grq_hysds_ios]
        logger.debug("Found %d hysds_ios on grq for %s." %
                     (len(grq_hysds_ios), job_spec['id']))

        # backwards-compatible query
        if len(mozart_hysds_ios) == 0:
            logger.debug(
                "Got no hysds_ios from grq. Checking deprecated mappings:")
            query = {
                "query": {
                    "query_string": {
                        "query":
                        "job-specification:\"{}\"".format(job_spec['id'])
                    }
                }
            }
            grq_hysds_ios = mozart_es.query(index=HYSDS_IOS_GRQ_INDEX,
                                            body=query)
            grq_hysds_ios = [hysds_io['_source'] for hysds_io in grq_hysds_ios]
            logger.debug("Found %d hysds_ios on grq for %s." %
                         (len(grq_hysds_ios), job_spec['id']))

        hysds_ios.extend(grq_hysds_ios)
    logger.debug("Found %d hysds_ios total." % (len(hysds_ios)))

    # export allowed accounts
    if not args.accounts:
        for hysds_io in hysds_ios:
            if 'allowed_accounts' in hysds_io:
                del hysds_io['allowed_accounts']

    # dump manifest JSON
    manifest = {
        "containers": cont_info,
        "job_specs": job_specs,
        "hysds_ios": hysds_ios,
    }
    manifest_file = os.path.join(export_dir, 'manifest.json')
    with open(manifest_file, 'w') as f:
        json.dump(manifest, f, indent=2, sort_keys=True)

    # tar up hysds package
    tar_file = os.path.join(outdir, "{}.tar".format(export_name))
    with tarfile.open(tar_file, "w") as tar:
        tar.add(export_dir, arcname=os.path.relpath(export_dir, outdir))

    shutil.rmtree(export_dir)  # remove package dir
コード例 #6
0
def import_pkg(args):
    """Import HySDS package."""

    conf = SettingsConf()  # get user's SDS conf settings

    # package tar file
    tar_file = normpath(args.file)
    if not os.path.isfile(tar_file):
        logger.error("HySDS package file %s doesn't exist." % tar_file)
        return 1
    logger.debug("tar_file: %s" % tar_file)

    # extract
    outdir = os.path.dirname(tar_file)
    with tarfile.open(tar_file) as tar:
        export_name = tar.getnames()[0]
        tar.extractall(outdir)
    export_dir = os.path.join(outdir, export_name)
    logger.debug("export_dir: %s" % export_dir)

    # detect export dir
    if not os.path.isdir(export_dir):
        logger.error("Cannot find HySDS package dir %s." % export_dir)
        return 1

    # read in manifest
    manifest_file = os.path.join(export_dir, 'manifest.json')
    with open(manifest_file) as f:
        manifest = json.load(f)
    logger.debug("manifest: %s" %
                 json.dumps(manifest, indent=2, sort_keys=True))

    # get code bucket
    code_bucket = conf.get('CODE_BUCKET')
    code_bucket_url = "s3://%s/%s" % (conf.get('S3_ENDPOINT'), code_bucket)
    logger.debug("code_bucket: %s" % code_bucket)
    logger.debug("code_bucket_url: %s" % code_bucket_url)

    # upload container image to s3
    cont_info = manifest['containers']
    cont_image = os.path.join(export_dir, cont_info['url'])
    cont_info['url'] = "{}/{}".format(code_bucket_url, cont_info['url'])
    put(cont_image, cont_info['url'])

    # index container in ES
    indexed_container = mozart_es.index_document(index=CONTAINERS_INDEX,
                                                 body=cont_info,
                                                 id=cont_info['id'])
    logger.debug(indexed_container)

    # index job_specs in ES and upload any dependency containers
    dep_images = {}
    for job_spec in manifest['job_specs']:
        # download dependency images
        for d in job_spec.get('dependency_images', []):
            if d['container_image_name'] in dep_images:
                d['container_image_url'] = dep_images[
                    d['container_image_name']]
            else:
                # upload container
                dep_img = os.path.join(export_dir, d['container_image_url'])
                d['container_image_url'] = "%s/%s" % (code_bucket_url,
                                                      d['container_image_url'])
                put(dep_img, d['container_image_url'])
                dep_images[
                    d['container_image_name']] = d['container_image_url']

        indexed_job_spec = mozart_es.index_document(index=JOB_SPECS_INDEX,
                                                    body=job_spec,
                                                    id=job_spec['id'])
        logger.debug(indexed_job_spec)

    # index hysds_ios to ES
    for hysds_io in manifest['hysds_ios']:
        component = hysds_io.get('component', 'tosca')

        hysds_io_id = hysds_io['id']
        if component in ('mozart', 'figaro'):
            indexed_hysds_io = mozart_es.index_document(
                index=HYSDS_IOS_MOZART_INDEX, body=hysds_io, id=hysds_io_id)
            logger.debug(indexed_hysds_io)
        else:
            indexed_hysds_io = mozart_es.index_document(
                index=HYSDS_IOS_GRQ_INDEX, body=hysds_io, id=hysds_io_id)
            logger.debug(indexed_hysds_io)

    shutil.rmtree(export_dir)  # remove package dir
コード例 #7
0
ファイル: pkg.py プロジェクト: kennetham/sdscli
def export(args):
    """Export HySDS package."""

    # get user's SDS conf settings
    conf = SettingsConf()

    # container id
    cont_id = args.id

    # query for container
    mozart_es_url = "http://{}:9200".format(conf.get('MOZART_ES_PVT_IP'))
    grq_es_url = "http://{}:9200".format(conf.get('GRQ_ES_PVT_IP'))
    hits = run_query(mozart_es_url, "containers",
                     {"query": {
                         "term": {
                             "_id": cont_id
                         }
                     }})
    if len(hits) == 0:
        logger.error("SDS package id {} not found.".format(cont_id))
        return 1
    cont_info = hits[0]['_source']
    logger.debug("cont_info: {}".format(json.dumps(cont_info, indent=2)))

    # set export directory
    outdir = normpath(args.outdir)
    export_name = "{}.sdspkg".format(cont_id)
    export_dir = os.path.join(outdir, export_name)
    logger.debug("export_dir: {}".format(export_dir))

    # if directory exists, stop
    if os.path.exists(export_dir):
        logger.error(
            "SDS package export directory {} exists. Not continuing.".format(
                export_dir))
        return 1

    # create export directory
    validate_dir(export_dir)

    # download container
    get(cont_info['url'], export_dir)
    cont_info['url'] = os.path.basename(cont_info['url'])

    # query job specs
    job_specs = [
        i['_source']
        for i in run_query(mozart_es_url, "job_specs",
                           {"query": {
                               "term": {
                                   "container.raw": cont_id
                               }
                           }})
    ]
    logger.debug("job_specs: {}".format(json.dumps(job_specs, indent=2)))

    # pull hysds_ios for each job_spec and download any dependency images
    hysds_ios = []
    dep_images = {}
    for job_spec in job_specs:
        # download dependency images
        for d in job_spec.get('dependency_images', []):
            if d['container_image_name'] in dep_images:
                d['container_image_url'] = dep_images[
                    d['container_image_name']]
            else:
                # download container
                get(d['container_image_url'], export_dir)
                d['container_image_url'] = os.path.basename(
                    d['container_image_url'])
                dep_images[
                    d['container_image_name']] = d['container_image_url']

        # collect hysds_ios from mozart
        mozart_hysds_ios = [
            i['_source'] for i in run_query(
                mozart_es_url, "hysds_ios",
                {"query": {
                    "term": {
                        "job-specification.raw": job_spec['id']
                    }
                }})
        ]
        logger.debug("Found {} hysds_ios on mozart for {}.".format(
            len(mozart_hysds_ios), job_spec['id']))
        hysds_ios.extend(mozart_hysds_ios)

        # collect hysds_ios from mozart
        grq_hysds_ios = [
            i['_source'] for i in run_query(
                grq_es_url, "hysds_ios",
                {"query": {
                    "term": {
                        "job-specification.raw": job_spec['id']
                    }
                }})
        ]
        logger.debug("Found {} hysds_ios on grq for {}.".format(
            len(grq_hysds_ios), job_spec['id']))
        hysds_ios.extend(grq_hysds_ios)
    logger.debug("Found {} hysds_ios total.".format(len(hysds_ios)))

    # clean out allowed accounts
    for hysds_io in hysds_ios:
        if 'allowed_accounts' in hysds_io:
            del hysds_io['allowed_accounts']

    # dump manifest JSON
    manifest = {
        "containers": cont_info,
        "job_specs": job_specs,
        "hysds_ios": hysds_ios,
    }
    manifest_file = os.path.join(export_dir, 'manifest.json')
    with open(manifest_file, 'w') as f:
        json.dump(manifest, f, indent=2, sort_keys=True)

    # tar up hysds package
    tar_file = os.path.join(outdir, "{}.tar".format(export_name))
    with tarfile.open(tar_file, "w") as tar:
        tar.add(export_dir, arcname=os.path.relpath(export_dir, outdir))

    # remove package dir
    shutil.rmtree(export_dir)
コード例 #8
0
ファイル: pkg.py プロジェクト: kennetham/sdscli
def import_pkg(args):
    """Import HySDS package."""

    # get user's SDS conf settings
    conf = SettingsConf()

    # package tar file
    tar_file = normpath(args.file)
    if not os.path.isfile(tar_file):
        logger.error("HySDS package file {} doesn't exist.".format(tar_file))
        return 1
    logger.debug("tar_file: {}".format(tar_file))

    # extract
    outdir = os.path.dirname(tar_file)
    with tarfile.open(tar_file) as tar:
        export_name = tar.getnames()[0]
        tar.extractall(outdir)
    export_dir = os.path.join(outdir, export_name)
    logger.debug("export_dir: {}".format(export_dir))

    # detect export dir
    if not os.path.isdir(export_dir):
        logger.error("Cannot find HySDS package dir {}.".format(export_dir))
        return 1

    # read in manifest
    manifest_file = os.path.join(export_dir, 'manifest.json')
    with open(manifest_file) as f:
        manifest = json.load(f)
    logger.debug("manifest: {}".format(
        json.dumps(manifest, indent=2, sort_keys=True)))

    # get code bucket
    code_bucket = conf.get('CODE_BUCKET')
    code_bucket_url = "s3://{}/{}".format(conf.get('S3_ENDPOINT'), code_bucket)
    logger.debug("code_bucket: {}".format(code_bucket))
    logger.debug("code_bucket_url: {}".format(code_bucket_url))

    # get ES endpoints
    mozart_es_url = "http://{}:9200".format(conf.get('MOZART_ES_PVT_IP'))
    grq_es_url = "http://{}:9200".format(conf.get('GRQ_ES_PVT_IP'))

    # upload container image and index container in ES
    cont_info = manifest['containers']
    cont_image = os.path.join(export_dir, cont_info['url'])
    cont_info['url'] = "{}/{}".format(code_bucket_url, cont_info['url'])
    put(cont_image, cont_info['url'])
    r = requests.put("{}/containers/container/{}".format(
        mozart_es_url, cont_info['id']),
                     data=json.dumps(cont_info))
    r.raise_for_status()
    logger.debug(r.json())

    # index job_specs in ES and upload any dependency containers
    dep_images = {}
    for job_spec in manifest['job_specs']:
        # download dependency images
        for d in job_spec.get('dependency_images', []):
            if d['container_image_name'] in dep_images:
                d['container_image_url'] = dep_images[
                    d['container_image_name']]
            else:
                # upload container
                dep_img = os.path.join(export_dir, d['container_image_url'])
                d['container_image_url'] = "{}/{}".format(
                    code_bucket_url, d['container_image_url'])
                put(dep_img, d['container_image_url'])
                dep_images[
                    d['container_image_name']] = d['container_image_url']
        r = requests.put("{}/job_specs/job_spec/{}".format(
            mozart_es_url, job_spec['id']),
                         data=json.dumps(job_spec))
        r.raise_for_status()
        logger.debug(r.json())

    # index hysds_ios in ES
    for hysds_io in manifest['hysds_ios']:
        component = hysds_io.get('component', 'tosca')
        es_url = mozart_es_url if component == 'mozart' else grq_es_url
        r = requests.put("{}/hysds_ios/hysds_io/{}".format(
            es_url, hysds_io['id']),
                         data=json.dumps(hysds_io))
        r.raise_for_status()
        logger.debug(r.json())

    # remove package dir
    shutil.rmtree(export_dir)