Exemplo n.º 1
0
 def to_json(self):
     j = super().to_json()
     j["status"] = self.status.value
     j["expires_at"] = datetime_to_rfc3339(self.expires_at)
     j["created_at"] = datetime_to_rfc3339(self.created_at)
     j["last_update"] = datetime_to_rfc3339(self.last_updated)
     return j
Exemplo n.º 2
0
class TestExecutableWhitelistItem:

    ten_min_ago = datetime_to_rfc3339(
        anchore_now_datetime() - datetime.timedelta(minutes=10)
    )
    ten_min_from_now = datetime_to_rfc3339(
        anchore_now_datetime() + datetime.timedelta(minutes=10)
    )

    @pytest.mark.parametrize(
        "expires_on,expected", [(ten_min_ago, True), (ten_min_from_now, False)]
    )
    def test_is_expired(self, expires_on, expected):
        item_json = {"expires_on": expires_on, "gate": "fake_gate"}
        item = ExecutableWhitelistItem(item_json, None)
        assert item.is_expired() == expected

    @pytest.mark.parametrize(
        "expires_on,exception,expected",
        [(None, None, False), ("", None, False), ("2020-10-15", Exception, None)],
    )
    def test_is_expired_bad_input(self, expires_on, exception, expected):
        if exception:
            with pytest.raises(exception):
                item_json = {"expires_on": expires_on, "gate": "fake_gate"}
                ExecutableWhitelistItem(item_json, None)
        else:
            item_json = {"expires_on": expires_on, "gate": "fake_gate"}
            item = ExecutableWhitelistItem(item_json, None)
            assert item.is_expired() == expected
Exemplo n.º 3
0
def user_db_to_msg(user):
    if user is None:
        return None

    return {
        'username': user['username'],
        'created_at': datetime_to_rfc3339(datetime.datetime.utcfromtimestamp(user['created_at'])),
        'last_updated': datetime_to_rfc3339(datetime.datetime.utcfromtimestamp(user['last_updated']))
    }
Exemplo n.º 4
0
def account_db_to_msg(account):
    if account is None:
        return None

    return {
        'name': account['name'],
        'email': account['email'],
        'state': account['state'].value if type(account['state']) != str else account['state'],
        'type': account['type'] if type(account['type']) == str else account['type'].value ,
        'created_at': datetime_to_rfc3339(datetime.datetime.utcfromtimestamp(account['created_at'])),
        'last_updated': datetime_to_rfc3339(datetime.datetime.utcfromtimestamp(account['last_updated']))
    }
Exemplo n.º 5
0
def test_rfc3339():

    # parsing/validation and conversion symmetry
    for rfc_str, assert_targets in rfc3339_examples:
        print("testing input string: {}".format(rfc_str))
        rc = utils.rfc3339str_to_epoch(rfc_str)
        print("\trfc3339_to_epoch: {}".format(rc))
        assert (rc == assert_targets['epoch'])
        print("\tepoch assertion passed")

        rc = utils.rfc3339str_to_datetime(rfc_str)
        print("\trfc3339_to_datetime: {}".format(rc))
        assert (rc == assert_targets['dt'])
        print("\tdatetime assertion passed")

    for epoch, assert_targets in epoch_examples:
        print("testing input epoch: {}".format(epoch))
        rc = utils.epoch_to_rfc3339(epoch)
        print("\tepoch_to_rfc3339: {}".format(rc))
        assert (rc == assert_targets['rfc3339'])
        print("\tdatetime assertion passed")

    for dt, assert_targets in dt_examples:
        print("testing input datetime: {}".format(dt))
        rc = utils.datetime_to_rfc3339(dt)
        print("\tdatetime_to_rfc3339: {}".format(rc))
        assert (rc == assert_targets['rfc3339'])
        print("\tdatetime assertion passed")
Exemplo n.º 6
0
def list_import_content(operation_id: str, account: str, content_type: str):
    """
    Generic way to list content of a given type from the db entries

    :param operation_id:
    :param account:
    :param content_type:
    :return:
    """
    try:
        with session_scope() as db_session:
            resp = []
            for x in (db_session.query(ImageImportContent).join(
                    ImageImportContent.operation).filter(
                        ImageImportOperation.account == account,
                        ImageImportOperation.uuid == operation_id,
                        ImageImportContent.content_type == content_type,
                    )):
                resp.append({
                    "created_at": datetime_to_rfc3339(x.created_at),
                    "digest": x.digest,
                })

        return resp, 200
    except Exception as ex:
        return make_response_error(ex, in_httpcode=500), 500
Exemplo n.º 7
0
def credential_db_to_msg(credential):
    if credential is None:
        return None

    return {
        'type': credential['type'].value,
        'value': ''.join(['*' for _ in credential['value']]),
        'created_at': datetime_to_rfc3339(datetime.datetime.utcfromtimestamp(credential['created_at']))
    }
Exemplo n.º 8
0
def test_groups():
    d1 = datetime.datetime.utcnow()
    d2 = datetime.datetime.utcnow() - datetime.timedelta(days=1)
    g = FeedGroupMetadata()
    g.name = "group"
    g.enabled = True
    g.created_at = d2
    g.updated_at = d1
    g.last_sync = d1
    g.record_count = 0
    assert g.to_json() == {
        "name": "group",
        "enabled": True,
        "created_at": datetime_to_rfc3339(d2),
        "updated_at": datetime_to_rfc3339(d1),
        "last_sync": datetime_to_rfc3339(d1),
        "record_count": 0,
    }
Exemplo n.º 9
0
def user_db_to_msg(user):
    if user is None:
        return None

    return {
        "username":
        user["username"],
        "type":
        user["type"].value,
        "source":
        user["source"],
        "created_at":
        datetime_to_rfc3339(
            datetime.datetime.utcfromtimestamp(user["created_at"])),
        "last_updated":
        datetime_to_rfc3339(
            datetime.datetime.utcfromtimestamp(user["last_updated"])),
    }
Exemplo n.º 10
0
 def _map_type(obj):
     if type(obj) == datetime.datetime:
         return datetime_to_rfc3339(obj)
     elif type(obj) in [list, set]:
         return [SimpleJsonModel._map_type(i) for i in obj]
     elif type(obj) == dict:
         return {k: SimpleJsonModel._map_type(v) for k, v in obj.items()}
     elif isinstance(obj, SimpleJsonModel):
         return obj.to_json()
     else:
         return obj
Exemplo n.º 11
0
def test_feeds():
    f = FeedMetadata()
    f.name = "feed1"
    d1 = datetime.datetime.utcnow()
    f.updated_at = d1
    assert f.to_json() == {
        "name": "feed1",
        "updated_at": datetime_to_rfc3339(d1),
        "groups": None,
        "enabled": None,
        "last_full_sync": None,
        "created_at": None,
    }

    f.groups = []
    g = FeedGroupMetadata()
    g.name = "group1"
    g.record_count = 10
    g.enabled = True
    f.groups.append(g)

    assert f.to_json() == {
        "name":
        "feed1",
        "updated_at":
        datetime_to_rfc3339(d1),
        "enabled":
        None,
        "last_full_sync":
        None,
        "created_at":
        None,
        "groups": [{
            "name": "group1",
            "enabled": True,
            "record_count": 10,
            "created_at": None,
            "updated_at": None,
            "last_sync": None,
        }],
    }
Exemplo n.º 12
0
def content_upload(operation_id, content_type, request):
    """
    Generic handler for multiple types of content uploads. Still operates at the API layer

    :param operation_id:
    :param content_type:
    :param request:
    :return:
    """
    try:
        with session_scope() as db_session:
            record = (db_session.query(ImageImportOperation).filter_by(
                account=ApiRequestContextProxy.namespace(),
                uuid=operation_id).one_or_none())
            if not record:
                raise api_exceptions.ResourceNotFound(resource=operation_id,
                                                      detail={})

            if not record.status.is_active():
                raise api_exceptions.ConflictingRequest(
                    message="import operation status does not allow uploads",
                    detail={"status": record.status},
                )

            if not request.content_length:
                raise api_exceptions.BadRequest(
                    message="Request must contain content-length header",
                    detail={})
            elif request.content_length > MAX_UPLOAD_SIZE:
                raise api_exceptions.BadRequest(
                    message=
                    "too large. Max size of 100MB supported for content",
                    detail={"content-length": request.content_length},
                )

            digest, created_at = save_import_content(db_session, operation_id,
                                                     request.data,
                                                     content_type)

        resp = {
            "digest": digest,
            "created_at": datetime_to_rfc3339(created_at)
        }

        return resp, 200
    except api_exceptions.AnchoreApiError as ex:
        return (
            make_response_error(ex, in_httpcode=ex.__response_code__),
            ex.__response_code__,
        )
    except Exception as ex:
        logger.exception("Unexpected error in api processing")
        return make_response_error(ex, in_httpcode=500), 500
Exemplo n.º 13
0
def account_db_to_msg(account):
    if account is None:
        return None

    return {
        "name":
        account["name"],
        "email":
        account["email"],
        "state":
        account["state"].value
        if type(account["state"]) != str else account["state"],
        "type":
        account["type"]
        if type(account["type"]) == str else account["type"].value,
        "created_at":
        datetime_to_rfc3339(
            datetime.datetime.utcfromtimestamp(account["created_at"])),
        "last_updated":
        datetime_to_rfc3339(
            datetime.datetime.utcfromtimestamp(account["last_updated"])),
    }
Exemplo n.º 14
0
def credential_db_to_msg(credential):
    if credential is None:
        return None

    return {
        "type":
        credential["type"].value,
        "value":
        "".join(["*" for _ in credential["value"]]),
        "created_at":
        datetime_to_rfc3339(
            datetime.datetime.utcfromtimestamp(credential["created_at"])),
    }
Exemplo n.º 15
0
    def archive_required(self, src_mgr: ObjectStorageManager, artifacts: list, img_archive: ImageArchive) -> list:
        """

        :return: tuple of (manifest (dict), bucket (str), key (str))
        """
        for artifact in artifacts:
            data = src_mgr.get(self.account, artifact.source.bucket, artifact.source.key)

            if not data:
                raise Exception('Required artifact not found for migration: {}'.format(artifact.name))

            artifact.metadata['completed_at'] = datetime_to_rfc3339(datetime.datetime.utcnow())
            artifact.metadata['bytes_copied'] = len(data)
            artifact.dest = TarballLocation(tarfile_path=artifact.name)
            img_archive.add_artifact(artifact.name, source=artifact.source, data=data, metadata=artifact.metadata)

        return img_archive.manifest.artifacts
Exemplo n.º 16
0
    def archive_policy_evaluations(self, src_obj_mgr: ObjectStorageManager, img_archive: ImageArchive, session) -> list:
        """
        Returns a dict of the following structure:
        {
          tag1 -> {policyId1 -> [eval history], policyId2 -> [eval history], ... },
          tag2 -> {policyId1...}, ... }
        }

        Policy evaluation histories are only moved, not generated, so online previously generated evaluations are migrated.

        :param dest_archive_mgr:
        :param dest_bucket:
        :return: dict mapping tags (full pull tags) to policy eval histories for the migrating image
        """

        logger.debug("Copying policy evaluation history to archive")

        policy_evaluations = db_policyeval.get_all_bydigest(self.account, self.image_digest, session)

        artifacts = []

        for eval_rec in policy_evaluations:
            artifact = Artifact(name='policy_evaluation-' + eval_rec['evalId'], metadata=None, source=None, dest=None)

            artifact.source = ObjectStoreLocation(bucket='policy_evaluations', key=eval_rec['evalId'])

            eval_content = src_obj_mgr.get(self.account, artifact.source.bucket, artifact.source.key)

            meta = {
                'record': eval_rec,
                'record_type': 'policy_evaluation',
                'completed_at': datetime_to_rfc3339(datetime.datetime.utcnow()),
                'bytes_copied': len(eval_content)
            }

            artifacts.append(artifact)
            img_archive.add_artifact(artifact.name, source=None, data=eval_content, metadata=meta)

        return artifacts
Exemplo n.º 17
0
    def archive_vuln_history(self, img_archive: ImageArchive) -> list:
        logger.debug("Migrating image vulnerability history to archive")

        image_id = img_archive.manifest.metadata.get('image_id')
        if not image_id:
            logger.warn('No image id found in archive metadata for getting vuln history.')
            return []

        try:
            pe_client = internal_client_for(PolicyEngineClient, userId=self.account)
            vuln_report = pe_client.get_image_vulnerabilities(self.account, image_id)
            data = ensure_bytes(json.dumps(vuln_report, sort_keys=True))

            metadata = {'completed_at': datetime_to_rfc3339(datetime.datetime.utcnow()),
                        'bytes_copied': len(data)}

            a = img_archive.add_artifact('vulnerabilities', source=None, data=data, metadata=metadata)

            return [a]
        except Exception as ex:
            logger.exception("Error flushing policy engine state for image")
            raise ex
Exemplo n.º 18
0
 def to_json(self):
     j = super().to_json()
     j["created_at"] = datetime_to_rfc3339(self.created_at)
     j["last_update"] = datetime_to_rfc3339(self.last_updated)
     return j
Exemplo n.º 19
0
class TestImageAddWorkflow:
    now_str = datetime_to_rfc3339(datetime.now())

    @pytest.mark.parametrize(
        "param",
        [
            pytest.param(
                {
                    "input": {},
                    "expected_dockerfile": None,
                    "expected_dockerfile_mode": None,
                    "expected_error_prefix": None,
                },
                id="no-dockerfile-data",
            ),
            pytest.param(
                {
                    "input": {
                        "dockerfile": "not-encoded!"
                    },
                    "expected_dockerfile":
                    None,
                    "expected_dockerfile_mode":
                    None,
                    "expected_error_prefix":
                    "input dockerfile data must be base64 encoded - exception on decode",
                },
                id="not-encoded",
            ),
            pytest.param(
                {
                    "input": {
                        "dockerfile":
                        str(
                            base64.b64encode(
                                "dockerfile contents".encode("utf-8")),
                            "utf-8",
                        )
                    },
                    "expected_dockerfile":
                    str(
                        base64.b64encode(
                            "dockerfile contents".encode("utf-8")), "utf-8"),
                    "expected_dockerfile_mode":
                    "Actual",
                    "expected_error_prefix":
                    None,
                },
                id="success",
            ),
        ],
    )
    def test_get_dockerfile_info(self, param):
        if param["expected_error_prefix"] is not None:
            with pytest.raises(Exception) as err:
                catalog_impl.get_dockerfile_info(param["input"])
                assert str(err).startswith(param["expected_error_prefix"])
        else:
            (
                actual_dockerfile,
                actual_dockerfile_mode,
            ) = catalog_impl.get_dockerfile_info(param["input"])
            assert actual_dockerfile == param["expected_dockerfile"]
            assert actual_dockerfile_mode == param["expected_dockerfile_mode"]

    @pytest.mark.parametrize(
        "param",
        [
            pytest.param(
                {
                    "params": {
                        "digest":
                        "sha256:714511030a442b48f37791a817ce6e124d9ea4b0158f93ce914520549bd6bc30",
                    },
                    "image_info": {
                        "repo": "anchore/kai",
                        "registry": "docker.io",
                        "tag": None,
                    },
                    "input_string": "test",
                    "expected_overrides": {},
                    "expected_input_string": "test",
                },
                id="missing-tag",
            ),
            pytest.param(
                {
                    "params": {
                        "tag": "anchore/kai:latest"
                    },
                    "image_info": {
                        "repo": "anchore/kai",
                        "registry": "docker.io",
                        "tag": None,
                    },
                    "input_string": "test",
                    "expected_overrides": {},
                    "expected_input_string": "test",
                },
                id="missing-digest",
            ),
            pytest.param(
                {
                    "params": {
                        "tag":
                        "anchore/kai:latest",
                        "digest":
                        "sha256:714511030a442b48f37791a817ce6e124d9ea4b0158f93ce914520549bd6bc30",
                    },
                    "image_info": {
                        "repo": "anchore/kai",
                        "registry": "docker.io",
                        "tag": "latest",
                    },
                    "input_string":
                    "test",
                    "expected_overrides": {
                        "fulltag": "anchore/kai:latest",
                        "tag": "latest",
                    },
                    "expected_input_string":
                    "docker.io/anchore/kai@sha256:714511030a442b48f37791a817ce6e124d9ea4b0158f93ce914520549bd6bc30",
                },
                id="success-no-created-at",
            ),
            pytest.param(
                {
                    "params": {
                        "tag": "anchore/kai:latest",
                        "digest":
                        "sha256:714511030a442b48f37791a817ce6e124d9ea4b0158f93ce914520549bd6bc30",
                        "created_at": now_str,
                    },
                    "image_info": {
                        "repo": "anchore/kai",
                        "registry": "docker.io",
                        "tag": "latest",
                    },
                    "input_string":
                    "test",
                    "expected_overrides": {
                        "fulltag": "anchore/kai:latest",
                        "tag": "latest",
                        "created_at_override": now_str,
                    },
                    "expected_input_string":
                    "docker.io/anchore/kai@sha256:714511030a442b48f37791a817ce6e124d9ea4b0158f93ce914520549bd6bc30",
                },
                id="success-with-created-at",
            ),
        ],
    )
    def test_resolve_image_info_overrides_and_input_string(self, param):
        (
            actual_overrides,
            actual_input_string,
        ) = catalog_impl.resolve_image_info_overrides_and_input_string(
            param["params"], param["image_info"], param["input_string"])
        assert actual_overrides == param["expected_overrides"]
        assert actual_input_string == param["expected_input_string"]

    @pytest.mark.parametrize(
        "param",
        [
            pytest.param(
                {
                    "image_info": {},
                    "expected_manifest":
                    None,
                    "expected_error_prefix":
                    "could not fetch/parse manifest - exception: no manifest from get_image_info",
                },
                id="no-manifest",
            ),
            pytest.param(
                {
                    "image_info": {
                        "manifest": "hey there"
                    },
                    "expected_manifest":
                    None,
                    "expected_error_prefix":
                    "could not fetch/parse manifest - exception: ",
                },
                id="non-json-manifest",
            ),
            pytest.param(
                {
                    "image_info": {
                        "manifest": {
                            "layers": [{
                                "this": "is a layer"
                            }]
                        }
                    },
                    "expected_manifest":
                    json.dumps({"layers": [{
                        "this": "is a layer"
                    }]}),
                    "expected_error_prefix":
                    "could not fetch/parse manifest - exception: no manifest from get_image_info",
                },
                id="valid-manifest",
            ),
        ],
    )
    def test_get_manifest(self, param):
        if param["expected_error_prefix"] is not None:
            with pytest.raises(Exception) as err:
                catalog_impl.get_manifest(param["image_info"])
                assert str(err).startswith(param["expected_error_prefix"])
        else:
            actual_manifest = catalog_impl.get_manifest(param["image_info"])
            assert actual_manifest == param["expected_manifest"]

    @pytest.mark.parametrize(
        "param",
        [
            pytest.param(
                {
                    "image_key":
                    catalog_impl.ImageKey(
                        tag="docker.io/anchore/test_images:centos8",
                        digest=""),
                    "expected":
                    "docker.io/anchore/test_images:centos8",
                },
                id="no-digest",
            ),
            pytest.param(
                {
                    "image_key":
                    catalog_impl.ImageKey(
                        tag="docker.io/anchore/test_images:centos8",
                        digest="unknown"),
                    "expected":
                    "docker.io/anchore/test_images:centos8",
                },
                id="digest-unknown",
            ),
            pytest.param(
                {
                    "image_key": catalog_impl.ImageKey(tag="", digest=""),
                    "expected": "",
                },
                id="no-digest-no-tag",
            ),
            pytest.param(
                {
                    "image_key":
                    catalog_impl.ImageKey(
                        tag="docker.io/anchore/test_images:centos8",
                        digest="sha256:1234abcd",
                    ),
                    "expected":
                    "docker.io/anchore/test_images@sha256:1234abcd",
                },
                id="valid-digest-valid-tag",
            ),
        ],
    )
    def test_get_input_string(self, param):
        actual = catalog_impl.get_input_string(param["image_key"])
        assert actual == param["expected"]