Ejemplo n.º 1
0
    def _configure(self):
        """
        Load service configuration

        :return:
        """
        logger.info('Loading and initializing global configuration')
        self.configuration = self._get_service_configuration(self.global_configuration)
        self.instance_id = localconfig.get_host_id()
        self.fq_name = (self.name, self.instance_id)

        # get versions of things
        try:
            self.versions = localconfig.get_versions()
        except Exception as err:
            logger.error('cannot detect versions of service: exception - ' + str(err))
            raise err

        try:
            kick_timer = int(self.configuration['cycle_timer_seconds'])
        except:
            kick_timer = 1

        try:
            cycle_timers = {}
            cycle_timers.update(self.configuration['cycle_timers'])
        except:
            cycle_timers = {}

        self.monitor_kwargs['kick_timer'] = kick_timer
        self.monitor_kwargs['cycle_timers'] = cycle_timers
        self.monitor_kwargs['monitors'] = copy.deepcopy(self.__monitors__)
        self.monitor_kwargs['monitor_threads'] = self.monitor_threads
        self.monitor_kwargs['servicename'] = self.name
        logger.info('Configuration complete')
Ejemplo n.º 2
0
    def render_GET(self, request):
        try:
            versions = localconfig.get_versions()
        except:
            versions = {}

        request.responseHeaders.addRawHeader(b"Content-Type", b"application/json")

        ret = {
            'service': {
                'version': versions.get('service_version', None),
            },
            'api': {
            },
            'db': {
                'schema_version': versions.get('db_version', None),
            }
        }

        try:
            response = utils.ensure_bytes(json.dumps(ret))
        except:
            response = utils.ensure_bytes(json.dumps({}))

        return response
Ejemplo n.º 3
0
 def _init_versions(self):
     """
     Initialize the service versions
     :return:
     """
     try:
         self.versions = localconfig.get_versions()
     except Exception as err:
         logger.error('cannot detect versions of service: exception - ' + str(err))
         raise err
Ejemplo n.º 4
0
    def _configure(self):
        """
        Load service configuration

        :return:
        """
        logger.info('Loading and initializing global configuration')
        self.configuration = self._get_service_configuration(
            self.global_configuration)
        self.instance_id = localconfig.get_host_id()
        self.fq_name = (self.name, self.instance_id)

        # get versions of things
        try:
            self.versions = localconfig.get_versions()
        except Exception as err:
            logger.error('cannot detect versions of service: exception - ' +
                         str(err))
            raise err

        self.task_handlers_enabled = self.configuration.get(
            'task_handlers_enabled', True)
        env_setting = not os.environ.get('ANCHORE_ENGINE_DISABLE_MONITORS',
                                         'false').lower() == 'true'
        self.task_handlers_enabled = self.task_handlers_enabled and env_setting

        if not self.task_handlers_enabled:
            if env_setting:
                logger.warn(
                    'Task handlers disabled by setting ANCHORE_ENGINE_DISABLE_MONITORS in environment'
                )
            else:
                logger.warn(
                    'Task handlers disabled by configuration file value')

        try:
            kick_timer = int(self.configuration['cycle_timer_seconds'])
        except:
            kick_timer = 1

        try:
            cycle_timers = {}
            cycle_timers.update(self.configuration['cycle_timers'])
        except:
            cycle_timers = {}

        self.monitor_kwargs['kick_timer'] = kick_timer
        self.monitor_kwargs['cycle_timers'] = cycle_timers
        self.monitor_kwargs['monitors'] = copy.deepcopy(self.__monitors__)
        self.monitor_kwargs['monitor_threads'] = self.monitor_threads
        self.monitor_kwargs['servicename'] = self.name
        logger.info('Configuration complete')
Ejemplo n.º 5
0
    def render_GET(self, request):
        try:
            versions = localconfig.get_versions()
        except:
            versions = {}

        ret = {
            'service': {
                'version': versions.get('service_version', None),
            },
            'api': {},
            'db': {
                'schema_version': versions.get('db_version', None),
            }
        }

        try:
            response = utils.ensure_bytes(json.dumps(ret))
        except:
            response = utils.ensure_bytes(json.dumps({}))

        return response
Ejemplo n.º 6
0
def exec(docker_archive, anchore_archive, digest, parent_digest, image_id, tag,
         account_id, manifest, dockerfile, created_at, annotation):
    """
    Analyze a local image stored as a docker archive (output result of 'docker save'), and generate an anchore image archive tarball ready for import into an anchore engine.

    DOCKER_ARCHIVE : Location of input docker archive tarfile to analyze
    ANCHORE_ARCHIVE : Location of output anchore image archive to write

    """

    global config

    # this could be improved to allow use to input timestamps (created_at, analyzed_at, etc)
    now = int(time.time())
    try:
        try:
            imageDigest = None
            input_manifest_data = None
            rawmanifest = None

            if (not manifest and not digest) or (manifest and digest):
                raise Exception(
                    "must supply either an image digest or a valid manifest, but not both"
                )

            if os.path.exists(anchore_archive):
                raise Exception(
                    "the supplied anchore archive file ({}) already exists, please remove and try again"
                    .format(anchore_archive))

            if manifest:
                try:
                    with open(manifest, 'r') as FH:
                        # TODO implement manifest validator for anchore requirements, specifically
                        rawmanifest = FH.read()
                        input_manifest_data = json.loads(rawmanifest)
                        imageDigest = manifest_to_digest(rawmanifest)
                except Exception as err:
                    raise ValueError(
                        "cannot calculate digest from supplied manifest - exception: {}"
                        .format(err))

            if digest:
                if re.match("^sha256:[\d|a-f]{64}$", digest):
                    imageDigest = digest
                else:
                    raise ValueError(
                        "input digest does not validate - must be sha256:<64 hex characters>"
                    )

            if parent_digest:
                if re.match("^sha256:[\d|a-f]{64}$", parent_digest):
                    parentDigest = parent_digest
                else:
                    raise ValueError(
                        "input parent_digest does not validate - must be sha256:<64 hex characters>"
                    )
            else:
                parentDigest = imageDigest

            if image_id:
                if re.match("^[\d|a-f]{64}$", image_id):
                    imageId = image_id
                else:
                    raise ValueError("input image_id does not validate")
            else:
                # TODO this could be improved to generate imageId from configuration hash
                imageId = "{}".format(''.join(
                    [random.choice('0123456789abcdef') for x in range(0, 64)]))

            if account_id:
                userId = account_id
            else:
                userId = 'admin'

            if created_at:
                try:
                    if int(created_at) < 0 or int(created_at) > now + 1:
                        raise Exception()
                except Exception as err:
                    raise ValueError(
                        "created_at must by a unix timestamp between 0 and now ({})"
                        .format(now))
            else:
                created_at = now

            try:
                inputTag = tag
                image_info = parse_dockerimage_string(inputTag)
                fulltag = "{}/{}:{}".format(image_info['registry'],
                                            image_info['repo'],
                                            image_info['tag'])
                fulldigest = "{}/{}@{}".format(image_info['registry'],
                                               image_info['repo'], imageDigest)
            except Exception as err:
                raise ValueError(
                    "input tag does not validate - exception: {}".format(err))

            dockerfile_mode = "Guessed"
            dockerfile_contents = None
            if dockerfile:
                with open(dockerfile, 'r') as FH:
                    dockerfile_contents = ensure_str(
                        base64.b64encode(ensure_bytes(FH.read())))
                    dockerfile_mode = "Actual"

            annotations = {}
            if annotation:
                for a in annotation:
                    try:
                        (k, v) = a.split('=', 1)
                        if k and v:
                            annotations[k] = v
                        else:
                            raise Exception("found null in key or value")
                    except Exception as err:
                        raise ValueError(
                            "annotation format error - annotations must be of the form (--annotation key=value), found: {}"
                            .format(a))

            workspace_root = config['tmp_dir']
        except Exception as err:
            # input setup/validation failure
            raise err

        logger.debug(
            "input has been prepared: imageDigest={} parentDigest={} imageId={} inputTag={} fulltag={} fulldigest={} userId={} annotations={} created_at={}"
            .format(imageDigest, parentDigest, imageId, inputTag, fulltag,
                    fulldigest, userId, annotations, created_at))

        # create an image record
        try:
            image_record = make_image_record(userId,
                                             'docker',
                                             None,
                                             image_metadata={
                                                 'tag': fulltag,
                                                 'digest': fulldigest,
                                                 'imageId': imageId,
                                                 'parentdigest': parentDigest,
                                                 'created_at': created_at,
                                                 'dockerfile':
                                                 dockerfile_contents,
                                                 'dockerfile_mode':
                                                 dockerfile_mode,
                                                 'annotations': annotations
                                             },
                                             registry_lookup=False,
                                             registry_creds=(None, None))
            image_record['created_at'] = created_at
            image_record['last_updated'] = created_at
            image_record['analyzed_at'] = now
            image_record['analysis_status'] = 'analyzed'
            image_record['image_status'] = 'active'
            image_record['record_state_key'] = 'active'
            for image_detail in image_record['image_detail']:
                image_detail['created_at'] = created_at
                image_detail['last_updated'] = created_at
                image_detail['tag_detected_at'] = created_at
                image_detail['record_state_key'] = 'active'
        except Exception as err:
            # image record setup fail
            raise err

        # perform analysis
        try:
            image_data, analyzed_manifest_data = analyze_image(
                userId,
                rawmanifest,
                image_record,
                workspace_root,
                config,
                registry_creds=[],
                use_cache_dir=None,
                image_source='docker-archive',
                image_source_meta=docker_archive)

            image_content_data = {}
            for content_type in anchore_engine.common.image_content_types + anchore_engine.common.image_metadata_types:
                try:
                    image_content_data[
                        content_type] = anchore_engine.common.helpers.extract_analyzer_content(
                            image_data,
                            content_type,
                            manifest=input_manifest_data)
                except:
                    image_content_data[content_type] = {}

            anchore_engine.common.helpers.update_image_record_with_analysis_data(
                image_record, image_data)
            image_record['image_size'] = int(image_record['image_size'])
        except Exception as err:
            # image analysis fail
            raise err

        # generate an output image archive tarball
        archive_file = anchore_archive
        try:
            with ImageArchive.for_writing(archive_file) as img_archive:

                img_archive.account = userId
                img_archive.image_digest = imageDigest
                img_archive.manifest.metadata = {
                    'versions': localconfig.get_versions(),
                    'image_id': imageId,
                    'image_record': json.dumps(image_record, sort_keys=True)
                }

                pack_data = {'document': image_data}
                data = ensure_bytes(json.dumps(pack_data, sort_keys=True))
                img_archive.add_artifact('analysis',
                                         source=ObjectStoreLocation(
                                             bucket='analysis_data',
                                             key=imageDigest),
                                         data=data,
                                         metadata=None)

                pack_data = {'document': image_content_data}
                data = ensure_bytes(json.dumps(pack_data, sort_keys=True))
                img_archive.add_artifact('image_content',
                                         source=ObjectStoreLocation(
                                             bucket='image_content_data',
                                             key=imageDigest),
                                         data=data,
                                         metadata=None)

                pack_data = {'document': input_manifest_data}
                data = ensure_bytes(json.dumps(pack_data, sort_keys=True))
                img_archive.add_artifact('image_manifest',
                                         source=ObjectStoreLocation(
                                             bucket='manifest_data',
                                             key=imageDigest),
                                         data=data,
                                         metadata=None)
        except Exception as err:
            # archive tarball generate fail
            raise err

    except Exception as err:
        logger.error(
            anchore_manager.cli.utils.format_error_output(
                click_config, 'db', {}, err))
        sys.exit(2)

    click.echo(
        "Analysis complete for image {} - archive file is located at {}".
        format(imageDigest, archive_file))
Ejemplo n.º 7
0
    def _execute(self):
        """
        Do the archiving of data
        :return:
        """

        src_obj_mgr = object_store.get_manager()
        dest_archive_mgr = archive.get_manager()
        data_written = False

        with session_scope() as session:
            record = db_archived_images.get(session, self.account, self.image_digest)

            if not record:
                raise Exception('No analysis archive record found to track state')

            try:
                with tempfile.TemporaryDirectory(dir=localconfig.get_config().get('tmp_dir')) as tempdir:
                    with ImageArchive.for_writing(os.path.join(tempdir, 'analysis_archive.tar.gz')) as img_archive:
                        img_archive.account = self.account
                        img_archive.image_digest = self.image_digest

                        if self._catalog_record.get('image_detail'):
                            image_id = self._catalog_record.get('image_detail')[0]['imageId']
                        else:
                            image_id = None

                        img_archive.manifest.metadata = {
                            'versions': localconfig.get_versions(),
                            'image_id': image_id,
                            'image_record': json.dumps(self._catalog_record, sort_keys=True)
                        }

                        self.archive_required(src_obj_mgr, self.required_artifacts, img_archive)

                        try:
                            vuln_artifacts = self.archive_vuln_history(img_archive)
                        except:
                            logger.exception('Error saving vuln history')
                            raise

                        try:
                            eval_artifacts = self.archive_policy_evaluations(src_obj_mgr, img_archive, session)
                        except:
                            logger.exception('Error saving policy evals')
                            raise

                        self.manifest = img_archive.manifest

                    # Closed tarball, now write it.

                    archive_bucket = self.__archive_bucket__
                    archive_key = '{}.tar.gz'.format(self.image_digest)
                    record.manifest_bucket = archive_bucket
                    record.manifest_key = archive_key

                    # Write the archive out to object store
                    with open(img_archive.backing_file_path, 'r+b') as tb:
                        tarball_data = tb.read()
                        size = len(tarball_data)

                    if not dest_archive_mgr.put(self.account, bucket=archive_bucket, archiveId=archive_key, data=tarball_data):
                        raise Exception("Could not write archive manifest")

                    data_written = True
                    record.archive_size_bytes = size
                    record.status = 'archived'

                    add_event(ImageArchived(self.account, self.image_digest, self.id))
                    return record.status, 'Completed successfully'
            except Exception as ex:
                record.status = 'error'

                if data_written:
                    logger.info('Cleaning up after failed analysis archive task for {}/{}'.format(self.account,
                                                                                                  self.image_digest))
                    try:
                        resp = dest_archive_mgr.delete(self.account, record.manifest_bucket, record.manifest_key)
                    except Exception as ex:
                        logger.warn('Could not delete the analysis archive tarball in storage. May have leaked. Err: {}'.format(ex))

                session.delete(record)
                add_event(ImageArchivingFailed(self.account, self.image_digest, self.id, err=str(ex)))
                return 'error', str(ex)