Esempio n. 1
0
def whoami(anchore_config):
    """
    Show user data for current user if available
    :param anchore_config:
    :return:
    """
    ecode = 0
    try:
        aa = contexts['anchore_auth']
        if aa and 'username' in aa and 'password' in aa:
            info = {
                'Current user':
                aa['user_info'] if aa['user_info'] else 'anonymous'
            }

            anchore_print(info, do_formatting=True)
        else:
            anchore_print_err(
                'No anchore auth context found. Cannot get user info. Try logging in first'
            )
            ecode = 1

    except Exception as err:
        anchore_print_err('Cannot get user info')
        ecode = 1

    sys.exit(ecode)
Esempio n. 2
0
def show(details):
    """
    Show list of Anchore data policies.

    """

    ecode = 0
    try:
        policymeta = anchore_policy.load_policymeta()

        if details:
            anchore_print(policymeta, do_formatting=True)

        else:
            output = {}

            name = policymeta['name']
            output[name] = {}
            output[name]['id'] = policymeta['id']
            output[name]['policies'] = policymeta['policies']
            output[name]['whitelists'] = policymeta['whitelists']
            output[name]['mappings'] = policymeta['mappings']

            anchore_print(output, do_formatting=True)
    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 3
0
def show_schemas(schemaname):
    """
    Show anchore document schemas.
    """

    ecode = 0
    try:
        schemas = {}
        schema_dir = os.path.join(contexts['anchore_config']['pkg_dir'], 'schemas')
        for f in os.listdir(schema_dir):
            sdata = {}
            try:
                with open(os.path.join(schema_dir, f), 'r') as FH:
                    sdata = json.loads(FH.read())
            except:
                anchore_print_err('found schema file but failed to parse: ' + os.path.join(schema_dir, f))

            if sdata and (not schemaname or f in schemaname):
                schemas[f] = sdata

        if not schemas:
            anchore_print_err("no specified schemas were found to show")
        else:
            anchore_print(json.dumps(schemas, indent=4))

    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 4
0
def list(showgroups):
    """
    Show list of Anchore data feeds.
    """
    ecode = 0
    try:
        result = {}
        subscribed = {}
        available = {}
        feedmeta = anchore_feeds.load_anchore_feedmeta()
        for feed in feedmeta.keys():
            if feedmeta[feed]['subscribed']:
                subscribed[feed] = {}
                subscribed[feed]['description'] = feedmeta[feed]['description']
                if showgroups:
                    subscribed[feed]['groups'] = feedmeta[feed]['groups'].keys(
                    )

            else:
                available[feed] = {}
                available[feed]['description'] = feedmeta[feed]['description']
                if showgroups:
                    available[feed]['groups'] = feedmeta[feed]['groups'].keys()

        if available:
            result['Available'] = available
        if subscribed:
            result['Subscribed'] = subscribed

        anchore_print(result, do_formatting=True)
    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 5
0
def purge(dontask):
    ecode = 0

    if not nav:
        sys.exit(1)

    try:
        for i in nav.get_images():
            dodelete = False
            if dontask:
                dodelete = True
            else:
                try:
                    answer = raw_input("Really delete image '"+str(i)+"'? (y/N)")
                except:
                    answer = "n"
                if 'y' == answer.lower():
                    dodelete = True
                else:
                    anchore_print("Skipping delete.")
            if dodelete:
                try:
                    anchore_print("Deleting image '"+str(i)+"'")
                    contexts['anchore_db'].delete_image(i)
                except Exception as err:
                    raise err
    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1
    sys.exit(ecode)
Esempio n. 6
0
def status(anchore_config):
    """
    Show state of local anchore images and artifacts.

    Returns structure output with the results of checks of local resources and their staleness compared to
    the upstream service artifacts for items such as vulnerability data and analysis db entries for subscription images.

    The output of this command can be used to determine if/when to run a catalog sync and check if new service data is
    available. This command will use the network to check the service status.
    """

    assert anchore_config is not None

    try:
        result = working_catalog.check_status()
        for k, v in result.items():
            if 'sync' in v:
                result[k] = v['sync']

        anchore_print(result, do_formatting=True)
    except:
        anchore_print_err(
            'Failed checking catalog configuration. Please check config file: %s'
            % anchore_config.config_file)
        exit(1)
Esempio n. 7
0
def image_import(infile):
    """Import image anchore data from a JSON file."""
    ecode = 0

    try:
        with open(infile, 'r') as FH:
            savelist = json.loads(FH.read())
    except Exception as err:
        anchore_print_err("could not load input file: " + str(err))
        ecode = 1

    if ecode == 0:
        for record in savelist:
            try:
                imageId = record['image']['imageId']
                if contexts['anchore_db'].is_image_present(imageId):
                    anchore_print("image (" + str(imageId) +
                                  ") already exists in DB, skipping import.")
                else:
                    imagedata = record['image']['imagedata']
                    try:
                        rc = contexts['anchore_db'].save_image_new(
                            imageId, report=imagedata)
                        if not rc:
                            contexts['anchore_db'].delete_image(imageId)
                            raise Exception("save to anchore DB failed")
                    except Exception as err:
                        contexts['anchore_db'].delete_image(imageId)
                        raise err
            except Exception as err:
                anchore_print_err("could not store image (" + str(imageId) +
                                  ") from import file: " + str(err))
                ecode = 1

    sys.exit(ecode)
Esempio n. 8
0
def add(repos):
    """
    Adds the specified images/tags to the subscription. Tags are formatted as docker image tags. Values are checked
    against the list of available tags from the service. Run 'anchore subscriptions show' to get the list of available
    options. Because of this, you must run an initial 'anchore sync catalog' before subscription data is available.

    Duplicate entries are prevented and will not result in an error, but will be discarded.

    Tag/repo examples: ubuntu, centos:7, nginx:latest

    """

    if not working_catalog.has_db():
        anchore_print_err('No local analysis db detected. You probably need to run "anchore sync catalog" first to initialize')
        exit(5)

    repo_list = list(repos)

    try:
        working_catalog.subscribe(repo_list)
        if working_catalog.configuration().cliargs['json']:
            anchore_print(working_catalog.subscription.get(), do_formatting=True)
        else:
            anchore_print('\n'.join(working_catalog.subscription.get()))
    except:
        anchore_print_err('Failed adding %s to subscription' % repo_list)
        exit(1)
Esempio n. 9
0
def sub(feednames):
    """
    Subscribe to the specified feed(s).
    """

    ecode = 0
    current_user_data = contexts.get('anchore_auth', {}).get('user_info', None)
    if not current_user_data:
        current_user_tier = 0
    else:
        current_user_tier = int(current_user_data['tier'])

    try:
        for feed in feednames:
            rc, msg = anchore_feeds.subscribe_anchore_feed(feed, current_user_tier)
            if not rc:
                ecode = 1
                anchore_print_err(msg)
            else:
                anchore_print(msg)

    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 10
0
def sub(feednames):
    """
    Subscribe to the specified feed(s).
    """

    ecode = 0
    current_user_data = contexts.get('anchore_auth', {}).get('user_info', None)
    if not current_user_data:
        current_user_tier = 0
    else:
        current_user_tier = int(current_user_data['tier'])

    try:
        for feed in feednames:
            rc, msg = anchore_feeds.subscribe_anchore_feed(
                feed, current_user_tier)
            if not rc:
                ecode = 1
                anchore_print_err(msg)
            else:
                anchore_print(msg)

    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 11
0
def show(details):
    """
    Show list of Anchore data policies.

    """

    ecode = 0
    try:
        policymeta = anchore_policy.load_policymeta()

        if details:
            anchore_print(policymeta, do_formatting=True)

        else:
            output = {}

            name = policymeta['name']
            output[name] = {}
            output[name]['id'] = policymeta['id']
            output[name]['policies'] = policymeta['policies']
            output[name]['whitelists'] = policymeta['whitelists']
            output[name]['mappings'] = policymeta['mappings']

            anchore_print(output, do_formatting=True)
    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 12
0
def show_schemas(schemaname):
    """
    Show anchore document schemas.
    """

    ecode = 0
    try:
        schemas = {}
        schema_dir = os.path.join(contexts['anchore_config']['pkg_dir'],
                                  'schemas')
        for f in os.listdir(schema_dir):
            sdata = {}
            try:
                with open(os.path.join(schema_dir, f), 'r') as FH:
                    sdata = json.loads(FH.read())
            except:
                anchore_print_err('found schema file but failed to parse: ' +
                                  os.path.join(schema_dir, f))

            if sdata and (not schemaname or f in schemaname):
                schemas[f] = sdata

        if not schemas:
            anchore_print_err("no specified schemas were found to show")
        else:
            anchore_print(json.dumps(schemas, indent=4))

    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 13
0
def image_import(infile, force):
    """Import image anchore data from a JSON file."""
    ecode = 0
    
    try:
        with open(infile, 'r') as FH:
            savelist = json.loads(FH.read())
    except Exception as err:
        anchore_print_err("could not load input file: " + str(err))
        ecode = 1

    if ecode == 0:
        for record in savelist:
            try:
                imageId = record['image']['imageId']
                if contexts['anchore_db'].is_image_present(imageId) and not force:
                    anchore_print("image ("+str(imageId)+") already exists in DB, skipping import.")
                else:
                    imagedata = record['image']['imagedata']
                    try:
                        rc = contexts['anchore_db'].save_image_new(imageId, report=imagedata)
                        if not rc:
                            contexts['anchore_db'].delete_image(imageId)
                            raise Exception("save to anchore DB failed")
                    except Exception as err:
                        contexts['anchore_db'].delete_image(imageId)
                        raise err
            except Exception as err:
                anchore_print_err("could not store image ("+str(imageId)+") from import file: "+ str(err))
                ecode = 1

    sys.exit(ecode)
Esempio n. 14
0
def feeds(anchore_config):
    global config
    config = anchore_config

    ecode = 0
    emsg = ""
    success = True

    try:
        rc, msg = anchore_feeds.check()
        if not rc:
            anchore_print("initializing feed metadata: ...")
            rc, ret = anchore_feeds.sync_feedmeta()
            if not rc:
                emsg = "could not sync feed metadata from service: " + ret[
                    'text']
                success = False

    except Exception as err:
        anchore_print_err('operation failed')
        sys.exit(1)

    if not success:
        anchore_print_err(emsg)
        sys.exit(1)
Esempio n. 15
0
def exportdb(outdir):
    """Export all anchore images to JSON files"""
    ecode = 0
    try:
        imgdir = os.path.join(outdir, "images")
        feeddir = os.path.join(outdir, "feeds")
        storedir = os.path.join(outdir, "storedfiles")

        for d in [outdir, imgdir, feeddir, storedir]:
            if not os.path.exists(d):
                os.makedirs(d)

        anchore_print("exporting images...")
        imagelist = anchore_utils.get_image_list().keys()
        for imageId in imagelist:
            thefile = os.path.join(imgdir, imageId+".json")
            if not os.path.exists(thefile):
                with open(thefile, 'w') as OFH:
                    OFH.write(json.dumps(contexts['anchore_db'].load_image_new(imageId)))

            stored_namespaces = contexts['anchore_db'].load_files_namespaces(imageId)
            for namespace in stored_namespaces:
                stored_files = contexts['anchore_db'].load_files_tarfile(imageId, namespace)
                if os.path.exists(stored_files):
                    thedir = os.path.join(storedir, imageId, namespace)
                    if not os.path.exists(thedir):
                        os.makedirs(thedir)
                    thefile = os.path.join(thedir, "stored_files.tar.gz")
                    shutil.copy(stored_files, thefile)

        anchore_print("exporting feeds...")
        feedmeta = contexts['anchore_db'].load_feedmeta()
        thefile = os.path.join(feeddir, "feedmeta.json")
        with open(thefile, 'w') as OFH:
            OFH.write(json.dumps(feedmeta))

        for feed in feedmeta:
            feedobj = feedmeta[feed]
            for group in feedobj['groups']:
                groupobj = feedobj['groups'][group]
                datafiles = groupobj.pop('datafiles', [])
                for datafile in datafiles:
                    thedir = os.path.join(feeddir, feed, group)
                    if not os.path.exists(thedir):
                        os.makedirs(thedir)
                    thefile = os.path.join(thedir, datafile)
                    if not os.path.exists(thefile):
                        with open(thefile, 'w') as OFH:
                            OFH.write(json.dumps(contexts['anchore_db'].load_feed_group_data(feed, group, datafile)))

    except Exception as err:
        anchore_print_err("operation failed: " + str(err))
        ecode = 1

    sys.exit(ecode)
Esempio n. 16
0
def list(showgroups):
    """
    Show list of Anchore data feeds.
    """

    ecode = 0
    try:
        result = {}
        subscribed = {}
        available = {}
        unavailable = {}
        current_user_data = contexts['anchore_auth']['user_info']
        feedmeta = anchore_feeds.load_anchore_feedmeta()

        for feed in list(feedmeta.keys()):
            if feedmeta[feed]['subscribed']:
                subscribed[feed] = {}
                subscribed[feed]['description'] = feedmeta[feed]['description']
                if showgroups:
                    subscribed[feed]['groups'] = list(
                        feedmeta[feed]['groups'].keys())

            else:
                if current_user_data:
                    tier = int(current_user_data['tier'])
                else:
                    tier = 0

                if int(feedmeta[feed]['access_tier']) > tier:
                    collection = unavailable
                else:
                    collection = available

                collection[feed] = {}

                collection[feed]['description'] = feedmeta[feed]['description']
                if showgroups and collection == available:
                    collection[feed]['groups'] = list(
                        feedmeta[feed]['groups'].keys())

        if available:
            result['Available'] = available
        if subscribed:
            result['Subscribed'] = subscribed
        if unavailable:
            result['Unavailable/Insufficient Access Tier'] = unavailable

        anchore_print(result, do_formatting=True)
    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 17
0
def list(showgroups):
    """
    Show list of Anchore data feeds.
    """

    ecode = 0
    try:
        result = {}
        subscribed = {}
        available = {}
        unavailable = {}
        current_user_data = contexts['anchore_auth']['user_info']
        feedmeta = anchore_feeds.load_anchore_feedmeta()

        for feed in feedmeta.keys():
            if feedmeta[feed]['subscribed']:
                subscribed[feed] = {}
                subscribed[feed]['description'] = feedmeta[feed]['description']
                if showgroups:
                    subscribed[feed]['groups'] = feedmeta[feed]['groups'].keys()

            else:
                if current_user_data:
                    tier = int(current_user_data['tier'])
                else:
                    tier = 0

                if int(feedmeta[feed]['access_tier']) > tier:
                    collection = unavailable
                else:
                    collection = available

                collection[feed] = {}

                collection[feed]['description'] = feedmeta[feed]['description']
                if showgroups and collection == available:
                    collection[feed]['groups'] = feedmeta[feed]['groups'].keys()

        if available:
            result['Available'] = available
        if subscribed:
            result['Subscribed'] = subscribed
        if unavailable:
            result['Unavailable/Insufficient Access Tier'] = unavailable

        anchore_print(result, do_formatting=True)
    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 18
0
def user_images(operation):
    """
    Manages fetching and pushing user images. The operations are: 'input', 'output', 'both'.

    By default, 'both' is invoked and will execute 'input' then 'output' operations in that order.

    Scripts are located as follows, assuming $INSTALL_LOC = distro-specific location where pip installs python packages

    input: $INSTALL_LOC/anchore/anchore-modules/inputs/
    output: $INSTALL_LOC/anchore/anchore-modules/outputs/

    Scripts are executed in lexicographic order by filename and scripts must be marked as executable to be run.
    See the README file in each directory for more information.

    """

    if operation == 'input' or operation == 'all':
        try:
            anchore_print('Executing input scripts')
            working_catalog.inputs.execute()
            anchore_print('Execution of input scripts complete')
        except:
            anchore_print_err('Failed executing input scripts')
            exit(1)

    if operation == 'output' or operation == 'all':
        try:
            anchore_print('Executing output scripts')
            working_catalog.outputs.execute()
            anchore_print('Execution of output scripts complete')
        except:
            anchore_print_err('Failed executing output scripts')
            exit(1)
Esempio n. 19
0
def user_images(operation):
    """
    Manages fetching and pushing user images. The operations are: 'input', 'output', 'both'.

    By default, 'both' is invoked and will execute 'input' then 'output' operations in that order.

    Scripts are located as follows, assuming $INSTALL_LOC = distro-specific location where pip installs python packages

    input: $INSTALL_LOC/anchore/anchore-modules/inputs/
    output: $INSTALL_LOC/anchore/anchore-modules/outputs/

    Scripts are executed in lexicographic order by filename and scripts must be marked as executable to be run.
    See the README file in each directory for more information.

    """

    if operation == 'input' or operation == 'all':
        try:
            anchore_print('Executing input scripts')
            working_catalog.inputs.execute()
            anchore_print('Execution of input scripts complete')
        except:
            anchore_print_err('Failed executing input scripts')
            exit(1)

    if operation == 'output' or operation == 'all':
        try:
            anchore_print('Executing output scripts')
            working_catalog.outputs.execute()
            anchore_print('Execution of output scripts complete')
        except:
            anchore_print_err('Failed executing output scripts')
            exit(1)
Esempio n. 20
0
def backup(outputdir):
    """
    Backup an anchore installation to a tarfile.
    """

    ecode = 0
    try:
        anchore_print('Backing up anchore system to directory '+str(outputdir)+' ...')
        backupfile = config.backup(outputdir)
        anchore_print({"anchore_backup_tarball":str(backupfile)}, do_formatting=True)
    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 21
0
def restore(inputfile, destination_root):
    """
    Restore an anchore installation from a previously backed up tar file.
    """

    ecode = 0
    try:
        anchore_print('Restoring anchore system from backup file %s ...' % (str(inputfile.name)))
        restoredir = config.restore(destination_root, inputfile)
        anchore_print("Anchore restored.")
    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 22
0
def restore(inputfile, destination_root):
    """
    Restore an anchore installation from a previously backed up tar file.
    """

    ecode = 0
    try:
        anchore_print('Restoring anchore system from backup file %s ...' %
                      (str(inputfile.name)))
        restoredir = config.restore(destination_root, inputfile)
        anchore_print("Anchore restored.")
    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 23
0
def show(feed):
    """
    Show detailed feed information

    """
    ecode = 0
    try:
        feedmeta = anchore_feeds.load_anchore_feedmeta()
        if feed in feedmeta:
            result = {}
            groups = list(feedmeta[feed].get('groups', {}).values())
            result['name'] = feed
            result['access_tier'] = int(feedmeta[feed].get('access_tier'))
            result['description'] = feedmeta[feed].get('description')
            result['groups'] = {}
            if 'subscribed' not in feedmeta[feed]:
                result['subscribed'] = False
            else:
                result['subscribed'] = feedmeta[feed]['subscribed']

            for g in groups:
                result['groups'][g['name']] = {
                    'access_tier':
                    int(g.get('access_tier')),
                    'description':
                    g.get('description'),
                    'last_update':
                    datetime.datetime.fromtimestamp(
                        g.get('last_update')).isoformat()
                    if 'last_update' in g else 'None',
                    'prev_update':
                    datetime.datetime.fromtimestamp(
                        g.get('prev_update')).isoformat()
                    if 'prev_update' in g else 'None'
                }

            anchore_print(result, do_formatting=True)
        else:
            anchore_print_err(
                'Unknown feed name. Valid feeds can be seen withe the "list" command'
            )
            ecode = 1
    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 24
0
def backup(outputdir):
    """
    Backup an anchore installation to a tarfile.
    """

    ecode = 0
    try:
        anchore_print('Backing up anchore system to directory ' +
                      str(outputdir) + ' ...')
        backupfile = config.backup(outputdir)
        anchore_print({"anchore_backup_tarball": str(backupfile)},
                      do_formatting=True)
    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 25
0
def backup(anchore_config, outputdir):
    """
    Backup the Anchore data locally to a tarball. Will result in a backup file with the name:
    anchore-backup-<date>.tar.gz

    If the anchore configuration file specifies a different image_data_store outside of the anchore_data_dir tree it will
    be backed up, but may require manual intervention on restore. Backup includes the configuration files as well as data
    files. Backup does *not* include docker images themselves.

    """

    try:
        output_file = anchore.catalog.AnchoreCatalog.backup(
            anchore_config, outputdir)
        anchore_print({'output': output_file}, do_formatting=True)
    except:
        anchore_print_err('Backup of catalog to %s failed' % outputdir)
        exit(1)
Esempio n. 26
0
def show(subscribed, tags, filters):
    """
    Show the subscription options available and/or the current subscription list.

    Examples:

    Show all tags for all available repos for subscription: 'anchore subscriptions show --tags'
    Show all tags for centos available for subscription: 'anchore subscriptions show centos --tags'
    Show only the current subscriptions: 'anchore subscriptions show --subscribed'

    """

    if not working_catalog.has_db():
        anchore_print_err('No local analysis db detected. You probably need to run "anchore sync catalog" first to initialize')
        exit(5)

    results = {}
    try:
        if filters:
            results['Current Subscription'] = filter(lambda x: x in filters, working_catalog.subscription.get())
        else:
            results['Current Subscription'] = working_catalog.subscription.get()

    except:
        anchore_print_err('Failed getting subscription data.')
        exit(1)

    if not subscribed:
        repos = working_catalog.metadata.engine_repos
        tag_set = working_catalog.metadata.engine_tags
        if filters:
            filtered_repos = filter(lambda x: (x in repos) or (x in tag_set), filters)
        else:
            filtered_repos = repos

        if not tags:
            results['Available'] = filtered_repos
        else:
            results['Available'] = {}
            for r in filtered_repos:
                results['Available'][r] = filter(lambda x: x.startswith(r), tag_set)

    anchore_print(results, do_formatting=True)
    return
Esempio n. 27
0
def sync(infile, outfile):
    """
    Sync (download) latest policies from the Anchore.io service.

    """

    ecode = 0
    try:
        rc, ret = anchore_policy.sync_policymeta(bundlefile=infile, outfile=outfile)
        if not rc:
            anchore_print_err(ret['text'])
            ecode = 1
        elif outfile and outfile == '-':
            anchore_print(ret['text'])
    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 28
0
def restore(anchore_config, inputfile, destination_root):
    """
    Restore anchore from a backup to directory root. E.g. "anchore system restore /tmp/anchore_backup.tar.gz /"

    If the image_data_store value has been changed from default ('data') to a path outside of the anchore_data_dir subtree
    then manual intervention may be required to modify the restored config file or move the image_data_store if relative paths
    were used that are no longer accessible. The restore process is an untar process so data is placed in the same place
    relative to the root directory when the tar was created. If the system being restored on is structured differently than
    the original backup source then manual movement of data or config file values may be necessary to get all artifacts in the
    correct locations for the system to find.

    """

    anchore_print('Restoring anchore registry from backup file %s to %s' %
                  (inputfile, destination_root))
    try:
        anchore.catalog.AnchoreCatalog.restore(destination_root, inputfile)
    except:
        anchore_print_err('Restore of catalog from %s failed' % inputfile)
        exit(1)
Esempio n. 29
0
def sync(infile, outfile):
    """
    Sync (download) latest policies from the Anchore.io service.

    """

    ecode = 0
    try:
        rc, ret = anchore_policy.sync_policymeta(bundlefile=infile,
                                                 outfile=outfile)
        if not rc:
            anchore_print_err(ret['text'])
            ecode = 1
        elif outfile and outfile == '-':
            anchore_print(ret['text'])
    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 30
0
def sub(feednames):
    """
    Subscribe to the specified feed(s).
    """

    ecode = 0
    try:
        for feed in feednames:
            rc, msg = anchore_feeds.subscribe_anchore_feed(feed)
            if not rc:
                ecode = 1
                anchore_print_err(msg)
            else:
                anchore_print(msg)

    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 31
0
def purge(dontask):
    ecode = 0

    try:
        #for i in nav.get_images():
        for i in imagelist:
            imageId = None
            if contexts['anchore_db'].is_image_present(i):
                imageId = i
            else:
                try:
                    ret = anchore_utils.discover_imageId(i)
                    #imageId = ret.keys()[0]
                    imageId = ret
                except:
                    imageId = None

            if imageId:
                dodelete = False
                if dontask:
                    dodelete = True
                else:
                    try:
                        answer = raw_input("Really delete image '" + str(i) +
                                           "'? (y/N)")
                    except:
                        answer = "n"
                    if 'y' == answer.lower():
                        dodelete = True
                    else:
                        anchore_print("Skipping delete.")
                if dodelete:
                    try:
                        anchore_print("Deleting image '" + str(i) + "'")
                        contexts['anchore_db'].delete_image(imageId)
                    except Exception as err:
                        raise err
    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1
    sys.exit(ecode)
Esempio n. 32
0
def remove_subscription(repos):
    """
    Removes the specified images/tags to the subscription. Tags are formatted as docker image tags. Accepts values from
    the list of current subscriptions, reachable with 'anchore subscriptions show --subscribed'.
    """

    if not working_catalog.has_db():
        anchore_print_err('No local analysis db detected. You probably need to run "anchore sync catalog" first to initialize')
        exit(5)

    repo_list = list(repos)
    try:
        working_catalog.unsubscribe(repo_list)
        if working_catalog.configuration().cliargs['json']:
            anchore_print(working_catalog.subscription.get(), do_formatting=True)
        else:
            anchore_print('\n'.join(working_catalog.subscription.get()))

    except:
        anchore_print_err('Failed adding %s to subscription' % repo_list)
        exit(1)
Esempio n. 33
0
def login(anchore_config):
    """
    Log into Anchore service using your username/password from anchore.io.
    """
    config = anchore_config
    ecode = 0

    try:
        username = raw_input("Username: "******"Password: "******"Failed to log in: check your username/password and try again!"
            )
            anchore_print("Message from server: " + ret['text'])
        else:
            contexts['anchore_auth'].update(new_anchore_auth)
            anchore_print("Login successful.")

    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 34
0
def status(conf):
    """
    Show anchore system status.
    """

    ecode = 0
    try:
        if conf:
            if config.cliargs['json']:
                anchore_print(config.data, do_formatting=True)
            else:
                anchore_print(
                    yaml.safe_dump(config.data,
                                   indent=True,
                                   default_flow_style=False))
        else:
            result = {}
            if contexts['anchore_db'].check():
                result["anchore_db"] = "OK"
            else:
                result["anchore_db"] = "NOTINITIALIZED"

            if anchore_feeds.check():
                result["anchore_feeds"] = "OK"
            else:
                result["anchore_feeds"] = "NOTSYNCED"

            afailed = False
            latest = 0
            for imageId in list(
                    contexts['anchore_db'].load_all_images().keys()):
                amanifest = anchore_utils.load_analyzer_manifest(imageId)
                for module_name in list(amanifest.keys()):
                    try:
                        if amanifest[module_name]['timestamp'] > latest:
                            latest = amanifest[module_name]['timestamp']
                        if amanifest[module_name]['status'] != 'SUCCESS':
                            analyzer_failed_imageId = imageId
                            analyzer_failed_name = module_name
                            afailed = True
                    except:
                        pass

            if latest == 0:
                result["analyzer_status"] = "NODATA"
            elif afailed:
                result[
                    "analyzer_status"] = "FAIL (" + analyzer_failed_imageId + ")"
                result["analyzer_latest_run"] = time.ctime(latest)
            else:
                result["analyzer_status"] = "OK"
                result["analyzer_latest_run"] = time.ctime(latest)

            anchore_print(result, do_formatting=True)

    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 35
0
def whoami(anchore_config):
    """
    Show user data for current user if available
    :param anchore_config:
    :return:
    """
    ecode = 0
    try:
        aa = contexts['anchore_auth']
        if aa and 'username' in aa and 'password' in aa:
            info = {'Current user': aa['user_info'] if aa['user_info'] else 'anonymous'}

            anchore_print(info, do_formatting=True)
        else:
            anchore_print_err('No anchore auth context found. Cannot get user info. Try logging in first')
            ecode = 1

    except Exception as err:
        anchore_print_err('Cannot get user info')
        ecode = 1

    sys.exit(ecode)
Esempio n. 36
0
def kubesync():
    """Communicate with kubernetes deployment via kubectl and save image names/IDs to local files"""

    ecode = 0

    try:
        images = anchore_utils.get_images_from_kubectl()
        if images:
            anchore_print("Writing image IDs to ./anchore_imageIds.kube")
            with open("anchore_imageIds.kube", 'w') as OFH:
                for imageId in images:
                    OFH.write(imageId + "\n")
            anchore_print("Writing image names to ./anchore_imageNames.kube")
            with open("anchore_imageNames.kube", 'w') as OFH:
                for imageId in images:
                    OFH.write(images[imageId] + "\n")
                    
    except Exception as err:
        anchore_print_err("operation failed: " + str(err))
        ecode = 1

    sys.exit(ecode)
Esempio n. 37
0
def delete(dontask):
    ecode = 0

    try:
        for i in imagelist:
            imageId = None
            if contexts['anchore_db'].is_image_present(i):
                imageId = i
            else:
                try:
                    ret = anchore_utils.discover_imageId(i)
                    #imageId = ret.keys()[0]
                    imageId = ret
                except:
                    imageId = None

            if imageId:
                dodelete = False
                if dontask:
                    dodelete = True
                else:
                    try:
                        answer = raw_input("Really delete image '"+str(i)+"'? (y/N)")
                    except:
                        answer = "n"
                    if 'y' == answer.lower():
                        dodelete = True
                    else:
                        anchore_print("Skipping delete.")
                if dodelete:
                    try:
                        anchore_print("Deleting image '"+str(i)+"'")
                        contexts['anchore_db'].delete_image(imageId)
                    except Exception as err:
                        raise err
    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1
    sys.exit(ecode)
Esempio n. 38
0
def kubesync():
    """Communicate with kubernetes deployment via kubectl and save image names/IDs to local files"""

    ecode = 0

    try:
        images = anchore_utils.get_images_from_kubectl()
        if images:
            anchore_print("Writing image IDs to ./anchore_imageIds.kube")
            with open("anchore_imageIds.kube", 'w') as OFH:
                for imageId in images:
                    OFH.write(imageId + "\n")
            anchore_print("Writing image names to ./anchore_imageNames.kube")
            with open("anchore_imageNames.kube", 'w') as OFH:
                for imageId in images:
                    OFH.write(images[imageId] + "\n")

    except Exception as err:
        anchore_print_err("operation failed: " + str(err))
        ecode = 1

    sys.exit(ecode)
Esempio n. 39
0
def status(anchore_config):
    """
    Show state of local anchore images and artifacts.

    Returns structure output with the results of checks of local resources and their staleness compared to
    the upstream service artifacts for items such as vulnerability data and analysis db entries for subscription images.

    The output of this command can be used to determine if/when to run a catalog sync and check if new service data is
    available. This command will use the network to check the service status.
    """

    assert anchore_config is not None

    try:
        result = working_catalog.check_status()
        for k,v in result.items():
            if 'sync' in v:
                result[k] = v['sync']

        anchore_print(result, do_formatting=True)
    except:
        anchore_print_err('Failed checking catalog configuration. Please check config file: %s' % anchore_config.config_file)
        exit(1)
Esempio n. 40
0
def feeds(anchore_config):
    global config
    config = anchore_config

    ecode = 0
    emsg = ""
    success = True

    try:
        rc, msg = anchore_feeds.check()
        if not rc:
            anchore_print("initializing feed metadata: ...")
            rc, ret = anchore_feeds.sync_feedmeta()
            if not rc:
                emsg = "could not sync feed metadata from service: " + ret['text']
                success = False

    except Exception as err:
        anchore_print_err('operation failed')
        sys.exit(1)

    if not success:
        anchore_print_err(emsg)
        sys.exit(1)
Esempio n. 41
0
def show(feed):
    """
    Show detailed feed information

    """
    ecode = 0
    try:
        feedmeta = anchore_feeds.load_anchore_feedmeta()
        if feed in feedmeta:
            result = {}
            groups = feedmeta[feed].get('groups',{}).values()
            result['name'] = feed
            result['access_tier'] = int(feedmeta[feed].get('access_tier'))
            result['description'] = feedmeta[feed].get('description')
            result['groups'] = {}
            if 'subscribed' not in feedmeta[feed]:
                result['subscribed'] = False
            else:
                result['subscribed'] = feedmeta[feed]['subscribed']

            for g in groups:
                result['groups'][g['name']] = {
                    'access_tier': int(g.get('access_tier')),
                    'description': g.get('description'),
                    'last_sync': datetime.datetime.fromtimestamp(g.get('last_update')).isoformat() if 'last_update' in g else 'None'
                }

            anchore_print(result, do_formatting=True)
        else:
            anchore_print_err('Unknown feed name. Valid feeds can be seen withe the "list" command')
            ecode = 1
    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 42
0
def status(conf):
    """
    Show anchore system status.
    """

    ecode = 0
    try:
        if conf:
            if config.cliargs['json']:
                anchore_print(config.data, do_formatting=True)
            else:
                anchore_print(yaml.safe_dump(config.data, indent=True, default_flow_style=False))
        else:
            result = {}
            if contexts['anchore_db'].check():
                result["anchore_db"] = "OK"
            else:
                result["anchore_db"] = "NOTINITIALIZED"

            if anchore_feeds.check():
                result["anchore_feeds"] = "OK"
            else:
                result["anchore_feeds"] = "NOTSYNCED"

            afailed = False
            latest = 0
            for imageId in contexts['anchore_db'].load_all_images().keys():
                amanifest = anchore_utils.load_analyzer_manifest(imageId)
                for module_name in amanifest.keys():
                    try:
                        if amanifest[module_name]['timestamp'] > latest:
                            latest = amanifest[module_name]['timestamp']
                        if amanifest[module_name]['status'] != 'SUCCESS':
                            analyzer_failed_imageId = imageId
                            analyzer_failed_name = module_name
                            afailed = True
                    except:
                        pass

            if latest == 0:
                result["analyzer_status"] = "NODATA"
            elif afailed:
                result["analyzer_status"] = "FAIL ("+analyzer_failed_imageId+")"
                result["analyzer_latest_run"] = time.ctime(latest)
            else:
                result["analyzer_status"] = "OK"
                result["analyzer_latest_run"] = time.ctime(latest)
   
            anchore_print(result, do_formatting=True)

    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 43
0
def unpack(destdir):
    """Unpack and Squash image to local filesystem"""

    if not nav:
        sys.exit(1)

    ecode = 0
    try:
        anchore_print("Unpacking images: " + ' '.join(nav.get_images()))
        result = nav.unpack(destdir=destdir)
        for imageId in result:
            anchore_print("Unpacked image: " + imageId)
            anchore_print("Unpack directory: "+ result[imageId])
    except:
        anchore_print_err("operation failed")
        ecode = 1

    contexts['anchore_allimages'].clear()
    
    sys.exit(ecode)
Esempio n. 44
0
def unpack(destdir):
    """Unpack and Squash image to local filesystem"""

    if not nav:
        sys.exit(1)

    ecode = 0
    try:
        anchore_print("Unpacking images: " + ' '.join(nav.get_images()))
        result = nav.unpack(destdir=destdir)
        for imageId in result:
            anchore_print("Unpacked image: " + imageId)
            anchore_print("Unpack directory: " + result[imageId])
    except:
        anchore_print_err("operation failed")
        ecode = 1

    contexts['anchore_allimages'].clear()

    sys.exit(ecode)
Esempio n. 45
0
def status(anchore_config, config):
    """
    Print state of local anchore images and artifacts. Includes paths, cache states, and configuration values.

    Use the --config option to dump the current configuration that the system is using. The configuration option
    returns structured output (yaml) or json if using the --json option. All configuration values are populated,
    with defaults if not explicitly overridden in the config file. The output of this is suitable to create a new config
    file.


    """
    assert anchore_config is not None

    try:

        working_catalog = AnchoreCatalog(config=anchore_config)
        if config:
            if anchore_config.cliargs['json']:
                anchore_print(working_catalog.configuration().data,
                              do_formatting=True)
            else:
                anchore_print(
                    yaml.safe_dump(working_catalog.configuration().data,
                                   indent=True,
                                   default_flow_style=False))
        else:
            result = working_catalog.check_status()
            for k, v in result.items():
                if 'local' in v:
                    result[k] = v['local']

            anchore_print(result, do_formatting=True)

    except:
        anchore_print_err(
            'Failed checking local system status. Please check config file: %s'
            % anchore_config.config_file)
        exit(1)
Esempio n. 46
0
def unsub(feednames, delete, dontask):
    """
    Unsubscribe from the specified feed(s).
    """

    ecode = 0
    try:
        for feed in feednames:
            rc, msg = anchore_feeds.unsubscribe_anchore_feed(feed)
            if not rc:
                ecode = 1
                anchore_print_err(msg)
            else:
                anchore_print(msg)
                if delete:
                    dodelete = False
                    if dontask:
                        dodelete = True
                    else:
                        try:
                            answer = input("Really delete feed data (" +
                                           str(feed) + "'? (y/N)")
                        except:
                            answer = "n"
                        if 'y' == answer.lower():
                            dodelete = True
                        else:
                            anchore_print(str(feed) + ": skipping delete.")

                    if dodelete:
                        anchore_print(str(feed) + ": deleting feed.")
                        rc = anchore_feeds.delete_anchore_feed(feed)

    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 47
0
def unsub(feednames, delete, dontask):
    """
    Unsubscribe from the specified feed(s).
    """

    ecode = 0
    try:
        for feed in feednames:
            rc, msg = anchore_feeds.unsubscribe_anchore_feed(feed)
            if not rc:
                ecode = 1
                anchore_print_err(msg)
            else:
                anchore_print(msg)
                if delete:
                    dodelete = False
                    if dontask:
                        dodelete = True
                    else:
                        try:
                            answer = raw_input("Really delete feed data ("+str(feed)+"'? (y/N)")
                        except:
                            answer = "n"
                        if 'y' == answer.lower():
                            dodelete = True
                        else:
                            anchore_print(str(feed) + ": skipping delete.")

                    if dodelete:
                        anchore_print(str(feed) + ": deleting feed.")
                        rc = anchore_feeds.delete_anchore_feed(feed)

    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 48
0
def analyze(anchore_config, force, image, imagefile, include_allanchore, dockerfile, imagetype, skipgates, layerstrategy, excludefile):
    """
    Invokes the anchore analyzer on the specified image(s).

    To include multiple images use the --imagefile, no option, or --include-allanchore options.
    To exclude specific images from analysis, use the --excludefile option.

    One of --imagetype or --dockerfile should be supplied for an analysis run. Use --dockerfile whenever possible as the inclusion
    of the dockerfile for an image associates the dockerfile and image for later use in queries etc. The --dockerfile option
    is only valid in combination with the --image option.  If neither --dockerfile and --imagetype is supplied, then 

    When using --imagetype, use 'none' to specify that the image(s) is an unknown or user image and use 'base' to specify
    that the image(s) are approved base images to be used to build other images or it is useful to mark the image one from which
    other images are meant to be derived.

    Image IDs can be specified as hash ids, repo names (e.g. centos), or tags (e.g. centos:latest).

    """

    success = True
    ecode = 0

    args = {}

    if image and imagefile:
        raise click.BadOptionUsage('Can only use one of --image, --imagefile')

    if dockerfile and not image:
        raise click.BadOptionUsage('Must specify --image option when using --dockerfile option')

    if not imagefile:
        if imagetype:
            if imagetype == "anchorebase":
                args['anchorebase'] = True
            elif imagetype == "base":
                args['isbase'] = True
            elif imagetype == "none":
                pass
            else:
                raise click.BadOptionUsage("Invalid imagetype specified: valid types are 'none' or 'base'")

    try:
        imagedict = build_image_list(anchore_config, image, imagefile, not (image or imagefile), include_allanchore, exclude_file=excludefile, dockerfile=dockerfile)
        imagelist = imagedict.keys()

        try:
            ret = anchore_utils.discover_imageIds(imagelist)
        except ValueError as err:
            raise err
        else:
            #imagelist = ret.keys()
            imagelist = ret

    except Exception as err:
        anchore_print_err("could not load any images")
        ecode = 1
    else:

        step = 1
        count = 0
        allimages = {}
        success = True
        for imageId in imagedict.keys():

            if count % step == 0:
                allimages.clear()
                allimages = {}
                count = 0

            args.update({'dockerfile': imagedict[imageId]['dockerfile'], 'skipgates': skipgates, 'selection_strategy': layerstrategy})

            inlist = [imageId]
            try:
                anchore_print("Analyzing image: " + imageId)
                rc = analyzer.Analyzer(anchore_config=anchore_config, imagelist=inlist, allimages=allimages, force=force, args=args).run()
                if not rc:
                    anchore_print_err("analysis failed.")
                    success = False
                    ecode = 1

            except:
                anchore_print_err('failed to run analyzer')
                allimages.clear()
                success = False
                ecode = 1
                break

            count = count + 1

        allimages.clear()

        if not success:
            anchore_print_err("analysis failed for one or more images.")
            ecode = 1

    sys.exit(ecode)
Esempio n. 49
0
def login(anchore_config, user, passfile):
    """
    Log into Anchore service using your username/password from anchore.io.
    """
    config = anchore_config
    ecode = 0

    try:
        anchore_creds_file = os.path.join(anchore_config.config_dir, 'anchore_creds.json')
        anchore_stored_username = None
        anchore_stored_password = None
        if os.path.exists(anchore_creds_file):
            try:
                with open(anchore_creds_file, 'r') as FH:
                    anchore_stored_creds = json.loads(FH.read())
                    anchore_stored_username = anchore_stored_creds.pop('username', None)
                    anchore_stored_password = anchore_stored_creds.pop('password', None)
            except Exception as err:
                raise err

        if user:
            anchore_print("Using user from cmdline option: " + str(user))
            username = user
        elif os.getenv('ANCHOREUSER'):
            anchore_print("Using user from environment (ANCHOREUSER)")
            username = os.getenv('ANCHOREUSER')
        elif anchore_stored_username:
            anchore_print("Using stored username from anchore_creds.json")
            username = anchore_stored_username
        else:
            username = raw_input("Username: "******"Using password from cmdline option: " + str(passfile))
            with open(passfile, "r") as FH:
                password = FH.read().strip()
        elif os.getenv('ANCHOREPASS'):
            anchore_print("Using password from environment (ANCHOREPASS)")
            password = os.getenv('ANCHOREPASS')
        elif anchore_stored_password:
            anchore_print("Using stored password from anchore_creds.json")
            password = anchore_stored_password
        else:
            password = getpass.getpass("Password: "******"Failed to log in: check your username/password and try again!")
            raise Exception("Login failure - message from server: " + str(ret['text']))
        else:
            contexts['anchore_auth'].update(new_anchore_auth)
            anchore_print("Login successful.")

    except Exception as err:
        anchore_print_err('operation failed')
        ecode = 1

    sys.exit(ecode)
Esempio n. 50
0
def gate(anchore_config, force, image, imagefile, include_allanchore, editpolicy, rmpolicy, listpolicy, updatepolicy, policy, run_bundle, bundlefile, usetag, resultsonly, show_gatehelp, show_policytemplate, whitelist, global_whitelist, show_triggerids, show_whitelisted):
    """
    Runs gate checks on the specified image(s) or edits the image's gate policy.
    The --editpolicy option is only valid for a single image.

    The --image and --imagefile options are mutually exclusive.

    Image IDs can be specified as hash ids, repo names (e.g. centos), or tags (e.g. centos:latest).
    """

    ecode = 0
    success = True

    # special option, does not need any image inputs
    if show_gatehelp:        
        try:
            gate_info = anchore_utils.discover_gates()
            anchore_print(gate_info, do_formatting=True)
        except Exception as err:
            anchore_print_err("operation failed: " + str(err))
            sys.exit(1)
        sys.exit(0)

    if show_policytemplate:
        try:
            outstr = "\n"
            gate_info = anchore_utils.discover_gates()
            for g in gate_info.keys():
                for t in gate_info[g].keys():
                    params = list()
                    if 'params' in gate_info[g][t] and gate_info[g][t]['params'] and gate_info[g][t]['params'].lower() != 'none':
                        for p in gate_info[g][t]['params'].split(','):
                            params.append(p+"=<a,b,c>")
                        
                    outstr += ':'.join([g, t, "<STOP|WARN|GO>", ' '.join(params)]) + "\n"
            
            anchore_print(outstr, do_formatting=False)
        except Exception as err:
            anchore_print_err("operation failed: " + str(err))
            sys.exit(1)
        sys.exit(0)

    # the rest require some form of image(s) be given as input
    if image and imagefile:
        raise click.BadOptionUsage('Can only use one of --image, --imagefile')

    if policy and (editpolicy or whitelist or listpolicy or updatepolicy or rmpolicy):
        raise click.BadOptionUsage('Cannot use other policy options when --policy <file> is specified.')

    if (policy and run_bundle):
        raise click.BadOptionUsage('Cannot use both --policy and --run_bundle at the same time.')

    if (run_bundle and (editpolicy or whitelist or listpolicy or updatepolicy or rmpolicy)):
        raise click.BadOptionUsage('Cannot use other policy options when --run_bundle is specified.')

    if (run_bundle and (usetag and resultsonly)):
        raise click.BadOptionUsage('Cannot use --resultsonly if --usetag is specified.')

    if (run_bundle and (usetag and not image)):
        raise click.BadOptionUsage('Cannot specify --usetag unless gating a single image (using --image)')

    try:
        imagedict = build_image_list(anchore_config, image, imagefile, not (image or imagefile), include_allanchore)
        imagelist = imagedict.keys()
        inputimagelist = list(imagelist)

        try:
            ret = anchore_utils.discover_imageIds(imagelist)
        except ValueError as err:
            raise err
        else:
            imagelist = ret

    except Exception as err:
        anchore_print_err("could not load any images")
        sys.exit(1)

    try:
        con = controller.Controller(anchore_config=anchore_config, imagelist=imagelist, allimages=contexts['anchore_allimages'], force=force)
    except Exception as err:
        anchore_print_err("gate operation failed")
        ecode = 1
    else:
        if editpolicy:
            if not con.editpolicy():
                ecode = 1
        elif whitelist:
            if not con.editwhitelist():
                ecode = 1
        elif rmpolicy:
            if not con.rmpolicy():
                ecode = 1;
            else:
                anchore_print("policies successfully removed.", do_formatting=True)
        elif updatepolicy:
            if not con.updatepolicy(updatepolicy):
                ecode = 1;
            else:
                anchore_print("policies successfully updated.", do_formatting=True)
        elif listpolicy:
            result = con.listpolicy()
            record = {}
            if not result:
                ecode = 1
            else:
                try:
                    for imageId in result.keys():
                        record[imageId] = list()
                        pol = result[imageId]
                        for gate in pol.keys():
                            for trigger in pol[gate].keys():
                                if str(pol[gate][trigger]['params']):
                                    outstr = ":".join([gate, trigger, str(pol[gate][trigger]['action']), str(pol[gate][trigger]['params'])])
                                else:
                                    outstr = ":".join([gate, trigger, str(pol[gate][trigger]['action'])])
                                record[imageId].append(outstr)
                    if record:
                        anchore_print(record, do_formatting=True)
                except Exception as err:
                    anchore_print_err("failed to list policies: " + str(err))
                    ecode = 1
        elif run_bundle:
            try:
                if not anchore_policy.check():
                    anchore_print_err("run-bundle specified, but it appears as though no policy bundles have been synced yet: run 'anchore policybundle sync' to get your latest bundles from anchore.io")
                    ecode = 1
                else:
                    bundle = anchore_policy.load_policymeta(policymetafile=bundlefile)
                    if not bundle:
                        raise Exception("could not load stored bundle - run 'anchore policybundle sync' and try again")

                    bundleId = bundle['id']
                    
                    inputimage = inputimagelist[0]

                    allresults = {}
                    for inputimage in inputimagelist:
                        result, image_ecode = anchore_policy.run_bundle(anchore_config=anchore_config, image=inputimage, matchtags=usetag, bundle=bundle, show_whitelisted=show_whitelisted, show_triggerIds=show_triggerids)
                        allresults.update(result)

                        if image_ecode == 1:
                            ecode = 1
                        elif ecode == 0 and image_ecode > ecode:
                            ecode = image_ecode

                    if not resultsonly:
                        if anchore_config.cliargs['json']:
                            anchore_print(json.dumps(allresults))
                        else:
                            for image in allresults.keys():
                                for gate_result in allresults[image]['evaluations']:
                                    _logger.info("Image="+image + " BundleId="+bundleId+" Policy="+gate_result['policy_name']+" Whitelists="+str(gate_result['whitelist_names']))
                                    anchore_utils.print_result(anchore_config, gate_result['results'])
                    else:
                        final_result = {}
                        for image in allresults.keys():
                            for gate_result in allresults[image]['evaluations']:
                                final_result.update(gate_result['results'])
                        anchore_utils.print_result(anchore_config, final_result)
            except Exception as err:
                anchore_print_err("failed to run gates")
                ecode = 1

        else:
            try:
                # run the gates
                result = con.run_gates(policy=policy, global_whitelist=global_whitelist, show_triggerIds=show_triggerids, show_whitelisted=show_whitelisted)
                if result:
                    anchore_utils.print_result(anchore_config, result)
                    success = True
                    ecode = con.result_get_highest_action(result)
            except Exception as err:
                anchore_print_err("failed to run gates")
                ecode = 1

    contexts['anchore_allimages'].clear()
    sys.exit(ecode)
Esempio n. 51
0
def analyze(anchore_config, force, image, imagefile, include_allanchore, dockerfile, imagetype, skipgates, excludefile):
    """
    Invokes the anchore analyzer on the specified image(s).

    To include multiple images use the --imagefile, no option, or --include-allanchore options.
    To exclude specific images from analysis, use the --excludefile option.

    One of --imagetype or --dockerfile are required for an analysis run. Use --dockerfile whenever possible as the inclusion
    of the dockerfile for an image associates the dockerfile and image for later use in queries etc. The --dockerfile option
    is only valid in combination with the --image option.

    When using --imagetype, use 'none' to specify that the image(s) is an unknown or user image and use 'base' to specify
    that the image(s) are approved base images to be used to build other images--'golden' images.

    Image IDs can be specified as hash ids, repo names (e.g. centos), or tags (e.g. centos:latest).

    """

    success = True
    ecode = 0

    args = {}

    if image and imagefile:
        raise click.BadOptionUsage('Can only use one of --image, --imagefile')

    if dockerfile and not image:
        raise click.BadOptionUsage('Must specify --image option when using --dockerfile option')

    if not imagefile:
        if imagetype:
            if imagetype == "anchorebase":
                args['anchorebase'] = True
            elif imagetype == "base":
                args['isbase'] = True
            elif imagetype == "none":
                pass
            else:
                raise click.BadOptionUsage("Invalid imagetype specified: valid types are 'none' or 'base'")
        elif not dockerfile:
            raise click.BadOptionUsage('Must specify either --dockerfile or --imagetype <type>')

    try:
        imagedict = build_image_list(anchore_config, image, imagefile, not (image or imagefile), include_allanchore, exclude_file=excludefile, dockerfile=dockerfile)
        imagelist = imagedict.keys()
    except Exception as err:
        anchore_print_err("could not load any images")
        ecode = 1
    else:

        step = 5
        count = 0
        allimages = {}
        for imageId in imagedict.keys():
            
            if count % step == 0:
                allimages.clear()
                allimages = {}
                count = 0

            args.update({'dockerfile': imagedict[imageId]['dockerfile'], 'skipgates': skipgates})

            inlist = [imageId]
            try:
                anchore_print("Analyzing image: " + imageId)
                success = analyzer.Analyzer(anchore_config=anchore_config, imagelist=inlist, allimages=allimages,force=force, args=args).run()
            except:
                anchore_print_err('failed to run analyzer')
                allimages.clear()
                success = False
                ecode = 1
                break

            count = count + 1
        allimages.clear()

        if not success:
            ecode = 1

    sys.exit(ecode)
Esempio n. 52
0
def setup_module_dev(destdir):
    """
    Sets up a development environment suitable for working on anchore modules (queries, etc) in the specified directory.
    Creates a copied environment in the destination containing the module scripts, unpacked image(s) and helper scripts
    such that a module script that works in the environment can be copied into the correct installation environment and
    run with anchore explore <modulename> invocation and should work.

    """

    if not nav:
        sys.exit(1)

    ecode = 0
    try:
        anchore_print("Anchore Module Development Environment\n")
        helpstr = "This tool has set up an environment that represents what anchore will normally set up before running an analyzer, gate and/or query module.  Each section below includes some information along with a string that you can use to help develop your own anchore modules.\n"
        anchore_print(fill(helpstr, 80))
        anchore_print("")

        anchore_print("Setting up environment...")
        anchore_print("")

        result = nav.unpack(destdir=destdir)
        if not result:
            raise Exception("unable to unpack input image")

        for imageId in result:
            unpackdir = result[imageId]

            # copy anchore imageDB dir into unpacked environment
            imgdir = '/'.join([config.data['image_data_store'], imageId])
            tmpdatastore = '/'.join([unpackdir, 'data'])
            dstimgdir = '/'.join([tmpdatastore, imageId])

            if not os.path.exists(imgdir):
                anchore_print_err(
                    "Image must exist and have been analyzed before being used for module development."
                )
                break
            if not os.path.exists(tmpdatastore):
                os.makedirs(tmpdatastore)
            shutil.copytree(imgdir, dstimgdir, symlinks=True)

            # copy examples into the unpacked environment
            examples = {}
            basedir = '/'.join([unpackdir, "anchore-modules"])
            if not os.path.exists(basedir):
                os.makedirs(basedir)

                # copy the shell-utils
                os.makedirs('/'.join([basedir, 'shell-utils']))
                for s in os.listdir('/'.join(
                    [config.data['scripts_dir'], 'shell-utils'])):
                    shutil.copy(
                        '/'.join(
                            [config.data['scripts_dir'], 'shell-utils', s]),
                        '/'.join([basedir, 'shell-utils', s]))

            # copy any examples that exist in the anchore egg into the unpack dir
            for d in os.listdir(config.data['scripts_dir']):
                scriptdir = '/'.join([basedir, d])

                if os.path.exists(config.data['scripts_dir'] + "/examples/" +
                                  d):
                    if not os.path.exists(scriptdir):
                        os.makedirs(scriptdir)
                    for s in os.listdir(config.data['scripts_dir'] +
                                        "/examples/" + d):
                        thefile = '/'.join(
                            [config.data['scripts_dir'], "examples", d, s])
                        thefiledst = '/'.join([scriptdir, s])
                        if re.match(".*(\.sh)$", thefile):
                            examples[d] = thefiledst
                            shutil.copy(thefile, thefiledst)

            # all set, show how to use them
            anchore_print("\tImage: " + imageId[0:12])
            anchore_print("\tUnpack Directory: " + result[imageId])
            anchore_print("")
            analyzer_string = ' '.join([
                examples['analyzers'], imageId, tmpdatastore, dstimgdir,
                result[imageId]
            ])
            anchore_print("\tAnalyzer Command:\n\n\t" + analyzer_string)
            anchore_print("")

            anchore_utils.write_plainfile_fromstr(
                result[imageId] + "/queryimages", imageId + "\n")

            queryoutput = '/'.join([result[imageId], "querytmp/"])
            if not os.path.exists(queryoutput):
                os.makedirs(queryoutput)

            query_string = ' '.join([
                examples['queries'], result[imageId] + "/queryimages",
                tmpdatastore, queryoutput, "passwd"
            ])
            anchore_print("Query Command:\n\n\t" + query_string)
            anchore_print("")

            anchore_print("Next Steps: ")
            anchore_print(
                "\tFirst: run the above analyzer command and note the RESULT output"
            )
            anchore_print(
                "\tSecond: run the above query command and note the RESULT output, checking that the query was able to use the analyzer data to perform its search"
            )
            anchore_print(
                "\tThird: modify the analyzer/query modules as you wish, including renaming them and continue running/inspecting output until you are satisfied"
            )
            anchore_print(
                "\tFinally: when you're happy with the analyzer/query, copy them to next to existing anchore analyzer/query modules and anchore will start calling them as part of container analysis/query:\n"
            )
            anchore_print("\tcp " + examples['analyzers'] + " " +
                          config.data['scripts_dir'] +
                          "/analyzers/99_analyzer-example.sh")
            anchore_print("\tcp " + examples['queries'] + " " +
                          config.data['scripts_dir'] + "/queries/")
            anchore_print("\tanchore analyze --force --image " + imageId +
                          " --imagetype none")
            anchore_print("\tanchore query --image " + imageId +
                          " query-example")
            anchore_print("\tanchore query --image " + imageId +
                          " query-example passwd")
            anchore_print("\tanchore query --image " + imageId +
                          " query-example pdoesntexist")

    except:
        anchore_print_err("operation failed")
        ecode = 1

    contexts['anchore_allimages'].clear()

    sys.exit(ecode)
Esempio n. 53
0
def importdb(indir):
    """Import a previously exported anchore DB"""
    ecode = 0
    try:
        imgdir = os.path.join(indir, "images")
        feeddir = os.path.join(indir, "feeds")
        storedir = os.path.join(indir, "storedfiles")

        for d in [indir, imgdir, feeddir, storedir]:
            if not os.path.exists(d):
                raise Exception ("specified directory "+str(indir)+" does not appear to be complete (missing "+str(d)+")")

                
        anchore_print("importing images...")
        #imagelist = []
        for ifile in os.listdir(imgdir):
            patt = re.match("(.*)\.json", ifile)
            if patt:
                imageId = patt.group(1)

                if contexts['anchore_db'].is_image_present(imageId):
                    anchore_print("\timage ("+str(imageId)+") already exists in DB, skipping import.")
                else:
                    #imagelist.append(patt.group(1))
                    thefile = os.path.join(imgdir, ifile)
                    with open(thefile, 'r') as FH:
                        imagedata = json.loads(FH.read())
                    try:
                        rc = contexts['anchore_db'].save_image_new(imageId, report=imagedata)
                        if not rc:
                            contexts['anchore_db'].delete_image(imageId)
                            raise Exception("save to anchore DB failed")
                    except Exception as err:
                        contexts['anchore_db'].delete_image(imageId)
                        raise err

                    thedir = os.path.join(storedir, imageId)
                    if os.path.exists(thedir):
                        for namespace in os.listdir(thedir):
                            thefile = os.path.join(thedir, namespace, "stored_files.tar.gz")
                            if os.path.exists(thefile):
                                contexts['anchore_db'].save_files_tarfile(imageId, namespace, thefile)

                    anchore_print("\timage ("+str(imageId)+") imported.")

        anchore_print("importing feeds...")
        thefile = os.path.join(feeddir, "feedmeta.json")
        with open(thefile, 'r') as FH:
            feedmeta = json.loads(FH.read())

        if feedmeta:
            contexts['anchore_db'].save_feedmeta(feedmeta)

        for feed in feedmeta:
            feedobj = feedmeta[feed]
            for group in feedobj['groups']:
                groupobj = feedobj['groups'][group]
                datafiles = groupobj.pop('datafiles', [])
                for datafile in datafiles:
                    thedir = os.path.join(feeddir, feed, group)
                    thefile = os.path.join(thedir, datafile)
                    if not os.path.exists(thefile):
                        pass
                    else:
                        with open(thefile, 'r') as FH:
                            contexts['anchore_db'].save_feed_group_data(feed, group, datafile, json.loads(FH.read()))
                    anchore_print("\tfeed ("+feed+" " + group + " " + datafile + ") imported")

        #TODO import stored files

    except Exception as err:
        anchore_print_err("operation failed: " + str(err))
        ecode = 1

    sys.exit(ecode)
Esempio n. 54
0
def analyze(anchore_config, force, image, imagefile, include_allanchore,
            dockerfile, imagetype, skipgates, layerstrategy, excludefile):
    """
    Invokes the anchore analyzer on the specified image(s).

    To include multiple images use the --imagefile, no option, or --include-allanchore options.
    To exclude specific images from analysis, use the --excludefile option.

    One of --imagetype or --dockerfile should be supplied for an analysis run. Use --dockerfile whenever possible as the inclusion
    of the dockerfile for an image associates the dockerfile and image for later use in queries etc. The --dockerfile option
    is only valid in combination with the --image option.  If neither --dockerfile and --imagetype is supplied, then 

    When using --imagetype, use 'none' to specify that the image(s) is an unknown or user image and use 'base' to specify
    that the image(s) are approved base images to be used to build other images or it is useful to mark the image one from which
    other images are meant to be derived.

    Image IDs can be specified as hash ids, repo names (e.g. centos), or tags (e.g. centos:latest).

    """

    success = True
    ecode = 0

    args = {}

    if image and imagefile:
        raise click.BadOptionUsage('Can only use one of --image, --imagefile')

    if dockerfile and not image:
        raise click.BadOptionUsage(
            'Must specify --image option when using --dockerfile option')

    if not imagefile:
        if imagetype:
            if imagetype == "anchorebase":
                args['anchorebase'] = True
            elif imagetype == "base":
                args['isbase'] = True
            elif imagetype == "none":
                pass
            else:
                raise click.BadOptionUsage(
                    "Invalid imagetype specified: valid types are 'none' or 'base'"
                )
        #elif not dockerfile:
        #    raise click.BadOptionUsage('Must specify either --dockerfile or --imagetype <type>')

    try:
        imagedict = build_image_list(anchore_config,
                                     image,
                                     imagefile,
                                     not (image or imagefile),
                                     include_allanchore,
                                     exclude_file=excludefile,
                                     dockerfile=dockerfile)
        imagelist = imagedict.keys()

        try:
            ret = anchore_utils.discover_imageIds(imagelist)
        except ValueError as err:
            raise err
        else:
            #imagelist = ret.keys()
            imagelist = ret

    except Exception as err:
        anchore_print_err("could not load any images")
        ecode = 1
    else:

        step = 1
        count = 0
        allimages = {}
        success = True
        for imageId in imagedict.keys():

            if count % step == 0:
                allimages.clear()
                allimages = {}
                count = 0

            args.update({
                'dockerfile': imagedict[imageId]['dockerfile'],
                'skipgates': skipgates,
                'selection_strategy': layerstrategy
            })

            inlist = [imageId]
            try:
                anchore_print("Analyzing image: " + imageId)
                rc = analyzer.Analyzer(anchore_config=anchore_config,
                                       imagelist=inlist,
                                       allimages=allimages,
                                       force=force,
                                       args=args).run()
                if not rc:
                    anchore_print_err("analysis failed.")
                    success = False
                    ecode = 1

            except:
                anchore_print_err('failed to run analyzer')
                allimages.clear()
                success = False
                ecode = 1
                break

            count = count + 1

        allimages.clear()

        if not success:
            anchore_print_err("analysis failed for one or more images.")
            ecode = 1

    sys.exit(ecode)
Esempio n. 55
0
def setup_module_dev(destdir):
    """
    Sets up a development environment suitable for working on anchore modules (queries, etc) in the specified directory.
    Creates a copied environment in the destination containing the module scripts, unpacked image(s) and helper scripts
    such that a module script that works in the environment can be copied into the correct installation environment and
    run with anchore explore <modulename> invocation and should work.

    """

    if not nav:
        sys.exit(1)

    ecode = 0
    try:
        anchore_print("Anchore Module Development Environment\n")
        helpstr = "This tool has set up and environment that represents what anchore will normally set up before running and analyzer, gate and/or query module.  Each section below includes some information along with a string that you can use to help develop your own anchore modules.\n"
        anchore_print(fill(helpstr, 80))
        anchore_print("")

        anchore_print("Setting up environment...")
        anchore_print("")

        result = nav.unpack(destdir=destdir)
        for imageId in result:
            unpackdir = result[imageId]

            # copy anchore imageDB dir into unpacked environment
            imgdir = '/'.join([config.data['image_data_store'], imageId])
            tmpdatastore = '/'.join([unpackdir, 'data'])
            dstimgdir = '/'.join([tmpdatastore, imageId])

            if not os.path.exists(imgdir):
                anchore_print_err("Image must exist and have been analyzed before being used for module development.")
                break
            if not os.path.exists(tmpdatastore):
                os.makedirs(tmpdatastore)
            shutil.copytree(imgdir, dstimgdir, symlinks=True)

            # copy examples into the unpacked environment            
            examples = {}
            basedir = '/'.join([unpackdir, "anchore-modules"])
            if not os.path.exists(basedir):
                os.makedirs(basedir)

                # copy the shell-utils
                os.makedirs('/'.join([basedir, 'shell-utils']))
                for s in os.listdir('/'.join([config.data['scripts_dir'], 'shell-utils'])):
                    shutil.copy('/'.join([config.data['scripts_dir'], 'shell-utils', s]), '/'.join([basedir, 'shell-utils', s]))
                            
            # copy any examples that exist in the anchore egg into the unpack dir
            for d in os.listdir(config.data['scripts_dir']):
                scriptdir = '/'.join([basedir, d])

                if os.path.exists(config.data['scripts_dir'] + "/examples/" + d):
                    if not os.path.exists(scriptdir):
                        os.makedirs(scriptdir)
                    for s in os.listdir(config.data['scripts_dir'] + "/examples/" + d):
                        thefile = '/'.join([config.data['scripts_dir'], "examples", d, s])
                        thefiledst = '/'.join([scriptdir, s])
                        if re.match(".*(\.sh)$", thefile):
                            examples[d] = thefiledst
                            shutil.copy(thefile, thefiledst)

            # all set, show how to use them
            anchore_print("\tImage: " + imageId[0:12])
            anchore_print("\tUnpack Directory: " +result[imageId])
            anchore_print("")
            analyzer_string = ' '.join([examples['analyzers'], imageId, tmpdatastore, dstimgdir, result[imageId]])
            anchore_print("\tAnalyzer Command:\n\n\t" +analyzer_string)
            anchore_print("")

            anchore_utils.write_plainfile_fromstr(result[imageId] + "/queryimages", imageId+"\n")

            queryoutput = '/'.join([result[imageId], "querytmp/"])
            if not os.path.exists(queryoutput):
                os.makedirs(queryoutput)

            query_string = ' '.join([examples['queries'], result[imageId] + "/queryimages", tmpdatastore, queryoutput, "passwd"])
            anchore_print("Query Command:\n\n\t" + query_string)
            anchore_print("")
 
            anchore_print("Next Steps: ")
            anchore_print("\tFirst: run the above analyzer command and note the RESULT output")
            anchore_print("\tSecond: run the above query command and note the RESULT output, checking that the query was able to use the analyzer data to perform its search")
            anchore_print("\tThird: modify the analyzer/query modules as you wish, including renaming them and continue running/inspecting output until you are satisfied")
            anchore_print("\tFinally: when you're happy with the analyzer/query, copy them to next to existing anchore analyzer/query modules and anchore will start calling them as part of container analysis/query:\n")
            anchore_print("\tcp " + examples['analyzers'] + " " + config.data['scripts_dir'] + "/analyzers/99_analyzer-example.sh")
            anchore_print("\tcp " + examples['queries'] + " " + config.data['scripts_dir'] + "/queries/")
            anchore_print("\tanchore analyze --force --image " + imageId + " --imagetype none")
            anchore_print("\tanchore explore --image " + imageId + " query query-example")
            anchore_print("\tanchore explore --image " + imageId + " query query-example passwd")
            anchore_print("\tanchore explore --image " + imageId + " query query-example pdoesntexist")
            
    except:
        anchore_print_err("operation failed")
        ecode = 1

    contexts['anchore_allimages'].clear()
    
    sys.exit(ecode)