Пример #1
0
    async def environment_modify(
        self,
        environment_id: uuid.UUID,
        name: str,
        repository: Optional[str],
        branch: Optional[str],
        project_id: Optional[uuid.UUID] = None,
        description: Optional[str] = None,
        icon: Optional[str] = None,
    ) -> model.Environment:
        env = await data.Environment.get_by_id(environment_id)
        if env is None:
            raise NotFound("The environment id does not exist.")
        original_env = env.to_dto()

        project = project_id or env.project

        # check if an environment with this name is already defined in this project
        envs = await data.Environment.get_list(project=project, name=name)
        if len(envs) > 0 and envs[0].id != environment_id:
            raise BadRequest(
                f"Project with id={project} already has an environment with name {name}"
            )

        fields = {"name": name}
        if repository is not None:
            fields["repo_url"] = repository

        if branch is not None:
            fields["repo_branch"] = branch

        # Update the project field if requested and the project exists
        if project_id is not None:
            project_from_db = await data.Project.get_by_id(project_id)
            if not project_from_db:
                raise BadRequest(f"Project with id={project_id} doesn't exist")
            fields["project"] = project_id

        if description is not None:
            fields["description"] = description

        if icon is not None:
            self.validate_icon(icon)
            fields["icon"] = icon

        try:
            await env.update_fields(connection=None, **fields)
        except StringDataRightTruncationError:
            raise BadRequest(
                "Maximum size of the icon data url or the description exceeded"
            )
        await self.notify_listeners(EnvironmentAction.updated, env.to_dto(),
                                    original_env)
        return env.to_dto()
Пример #2
0
    async def environment_create(
        self,
        project_id: uuid.UUID,
        name: str,
        repository: str,
        branch: str,
        environment_id: Optional[uuid.UUID],
        description: str = "",
        icon: str = "",
    ) -> model.Environment:
        if environment_id is None:
            environment_id = uuid.uuid4()

        if (repository is None
                and branch is not None) or (repository is not None
                                            and branch is None):
            raise BadRequest("Repository and branch should be set together.")

        # fetch the project first
        project = await data.Project.get_by_id(project_id)
        if project is None:
            raise NotFound(
                "The project id for the environment does not exist.")

        # check if an environment with this name is already defined in this project
        envs = await data.Environment.get_list(project=project_id, name=name)
        if len(envs) > 0:
            raise ServerError(
                f"Project {project.name} (id={project.id}) already has an environment with name {name}"
            )

        self.validate_icon(icon)

        env = data.Environment(
            id=environment_id,
            name=name,
            project=project_id,
            repo_url=repository,
            repo_branch=branch,
            description=description,
            icon=icon,
        )
        try:
            await env.insert()
        except StringDataRightTruncationError:
            raise BadRequest(
                "Maximum size of the icon data url or the description exceeded"
            )
        await self.notify_listeners(EnvironmentAction.created, env.to_dto())
        return env.to_dto()
Пример #3
0
 def validate_icon(self, icon: str) -> None:
     """Check if the icon is in the supported format, raise an exception otherwise"""
     if icon == "":
         return
     match = self.icon_regex.match(icon)
     if match and len(match.groups()) == 3:
         encoded_image = match.groups()[2]
         try:
             base64.b64decode(encoded_image, validate=True)
         except binascii.Error:
             raise BadRequest(
                 "The icon is not a valid base64 encoded string")
     else:
         raise BadRequest(
             "The value supplied for the icon parameter is invalid")
Пример #4
0
    async def get_reports(self,
                          env: data.Environment,
                          start: Optional[str] = None,
                          end: Optional[str] = None,
                          limit: Optional[int] = None) -> Apireturn:
        if env is None:
            return 404, {"message": "The given environment id does not exist!"}

        if limit is None:
            limit = APILIMIT
        elif limit > APILIMIT:
            raise BadRequest(
                f"limit parameter can not exceed {APILIMIT}, got {limit}.")

        start_time = None
        end_time = None
        if start is not None:
            start_time = dateutil.parser.parse(start)
        if end is not None:
            end_time = dateutil.parser.parse(end)

        models = await data.Compile.get_list_paged(
            page_by_column="started",
            order_by_column="started",
            order="DESC",
            limit=limit,
            start=start_time,
            end=end_time,
            no_obj=False,
            connection=None,
            environment=env.id,
        )

        return 200, {"reports": [m.to_dict() for m in models]}
Пример #5
0
    async def list_version(self,
                           env: data.Environment,
                           start: Optional[int] = None,
                           limit: Optional[int] = None) -> Apireturn:
        if (start is None and limit is not None) or (limit is None
                                                     and start is not None):
            raise ServerError("Start and limit should always be set together.")

        if start is None or limit is None:
            start = 0
            limit = data.APILIMIT

        if limit > APILIMIT:
            raise BadRequest(
                f"limit parameter can not exceed {APILIMIT}, got {limit}.")

        models = await data.ConfigurationModel.get_versions(
            env.id, start, limit)
        count = len(models)

        d = {
            "versions": models,
            "start": start,
            "limit": limit,
            "count": count,
        }

        return 200, d
Пример #6
0
 async def _validate_version_parameters(self, env: uuid.UUID,
                                        first_version: int,
                                        other_version: int) -> None:
     if first_version >= other_version:
         raise BadRequest(
             f"Invalid version parameters: ({first_version}, {other_version}). "
             "The second version number should be strictly greater than the first"
         )
     await self._check_version_exists(env, first_version)
     await self._check_version_exists(env, other_version)
Пример #7
0
    async def get_version(
        self,
        env: data.Environment,
        version_id: int,
        include_logs: Optional[bool] = None,
        log_filter: Optional[str] = None,
        limit: Optional[int] = 0,
    ) -> Apireturn:
        version = await data.ConfigurationModel.get_version(env.id, version_id)
        if version is None:
            return 404, {
                "message": "The given configuration model does not exist yet."
            }

        resources = await data.Resource.get_resources_for_version(env.id,
                                                                  version_id,
                                                                  no_obj=True)
        if resources is None:
            return 404, {
                "message": "The given configuration model does not exist yet."
            }

        if limit is None:
            limit = APILIMIT
        elif limit > APILIMIT:
            raise BadRequest(
                f"limit parameter can not exceed {APILIMIT}, got {limit}."
                f" To retrieve more entries, use /api/v2/resource_actions")

        resources_out: List[JsonType] = []
        d = {"model": version, "resources": resources_out}
        resource_action_lookup: Dict[ResourceVersionIdStr,
                                     List[data.ResourceAction]] = {}

        for res_dict in resources:
            resources_out.append(res_dict)
            if bool(include_logs):
                actions: List[data.ResourceAction] = []
                res_dict["actions"] = actions
                resource_action_lookup[
                    res_dict["resource_version_id"]] = actions

        if include_logs:
            # get all logs, unsorted
            all_logs = await data.ResourceAction.get_logs_for_version(
                env.id, version_id, log_filter, limit)
            for log in all_logs:
                for resource_version_id in log.resource_version_ids:
                    resource_action_lookup[resource_version_id].append(log)

        d["unknowns"] = await data.UnknownParameter.get_list(
            environment=env.id, version=version_id)

        return 200, d
Пример #8
0
    def upload_file_internal(self, file_hash: str, content: bytes) -> None:
        file_name = os.path.join(self.server_slice._server_storage["files"],
                                 file_hash)

        if os.path.exists(file_name):
            raise ServerError("A file with this id already exists.")

        if hash_file(content) != file_hash:
            raise BadRequest("The hash does not match the content")

        with open(file_name, "wb+") as fd:
            fd.write(content)
Пример #9
0
 def validate_arguments(self, values: Dict[str, Any]) -> Dict[str, Any]:
     """
     Validate methods arguments. Values is a dict with key/value pairs for the arguments (similar to kwargs). This method
     validates and converts types if required (e.g. str to int). The returns value has the correct typing to dispatch
     to method handlers.
     """
     try:
         out = self.argument_validator(**values)
         return {f: getattr(out, f) for f in out.__fields__.keys()}
     except ValidationError as e:
         error_msg = f"Failed to validate argument\n{str(e)}"
         LOGGER.exception(error_msg)
         raise BadRequest(error_msg, {"validation_errors": e.errors()})
Пример #10
0
 async def update_notification(
     self,
     env: data.Environment,
     notification_id: uuid.UUID,
     read: Optional[bool] = None,
     cleared: Optional[bool] = None,
 ) -> Notification:
     notification = await data.Notification.get_one(environment=env.id, id=notification_id)
     if not notification:
         raise NotFound(f"Notification with id {notification_id} not found")
     if read is not None and cleared is not None:
         await notification.update(read=read, cleared=cleared)
     elif read is not None:
         await notification.update(read=read)
     elif cleared is not None:
         await notification.update(cleared=cleared)
     else:
         raise BadRequest("At least one of {read, cleared} should be specified for a valid update")
     return notification.to_dto()
Пример #11
0
    async def list_notifications(
        self,
        env: data.Environment,
        limit: Optional[int] = None,
        first_id: Optional[uuid.UUID] = None,
        last_id: Optional[uuid.UUID] = None,
        start: Optional[datetime.datetime] = None,
        end: Optional[datetime.datetime] = None,
        filter: Optional[Dict[str, List[str]]] = None,
        sort: str = "created.desc",
    ) -> ReturnValue[List[Notification]]:
        if limit is None:
            limit = APILIMIT
        elif limit > APILIMIT:
            raise BadRequest(f"limit parameter can not exceed {APILIMIT}, got {limit}.")

        query: Dict[str, Tuple[QueryType, object]] = {}
        if filter:
            try:
                query.update(NotificationFilterValidator().process_filters(filter))
            except InvalidFilter as e:
                raise BadRequest(e.message) from e

        try:
            notification_order = NotificationOrder.parse_from_string(sort)
        except InvalidSort as e:
            raise BadRequest(e.message) from e

        try:
            dtos = await data.Notification.list_notifications(
                database_order=notification_order,
                limit=limit,
                environment=env.id,
                first_id=first_id,
                last_id=last_id,
                start=start,
                end=end,
                connection=None,
                **query,
            )
        except (data.InvalidQueryParameter, data.InvalidFieldNameException) as e:
            raise BadRequest(e.message)

        paging_handler = NotificationPagingHandler(NotificationPagingCountsProvider())
        metadata = await paging_handler.prepare_paging_metadata(
            QueryIdentifier(environment=env.id), dtos, query, limit, notification_order
        )
        links = await paging_handler.prepare_paging_links(
            dtos,
            filter,
            notification_order,
            limit,
            first_id=first_id,
            last_id=last_id,
            start=start,
            end=end,
            has_next=metadata.after > 0,
            has_prev=metadata.before > 0,
        )

        return ReturnValueWithMeta(response=dtos, links=links if links else {}, metadata=vars(metadata))
Пример #12
0
    async def upload_code_batched(self, env: data.Environment, code_id: int,
                                  resources: JsonType) -> Apireturn:
        # validate
        for rtype, sources in resources.items():
            if not isinstance(rtype, str):
                raise BadRequest(
                    "All keys in the resources map must be strings")
            if not isinstance(sources, dict):
                raise BadRequest(
                    "All values in the resources map must be dicts")

            for name, refs in sources.items():
                if not isinstance(name, str):
                    raise BadRequest(
                        "All keys in the sources map must be strings")
                if not isinstance(refs, (list, tuple)):
                    raise BadRequest(
                        "All values in the sources map must be lists or tuple")
                if (len(refs) != 3 or not isinstance(refs[0], str)
                        or not isinstance(refs[1], str)
                        or not isinstance(refs[2], list)):
                    raise BadRequest(
                        "The values in the source map should be of the form (filename, module, [requirements])"
                    )

        allrefs = [
            ref for sourcemap in resources.values()
            for ref in sourcemap.keys()
        ]

        val = self.file_slice.stat_file_internal(allrefs)

        if len(val) != 0:
            raise BadRequest("Not all file references provided are valid",
                             details={"references": val})

        code = await data.Code.get_versions(environment=env.id,
                                            version=code_id)
        oldmap: Dict[str, data.Code] = {c.resource: c for c in code}

        new = {k: v for k, v in resources.items() if k not in oldmap}
        conflict = [
            k for k, v in resources.items()
            if k in oldmap and oldmap[k].source_refs != v
        ]

        if len(conflict) > 0:
            raise ServerError(
                "Some of these items already exists, but with different source files",
                details={"references": conflict})

        newcodes = [
            data.Code(environment=env.id,
                      version=code_id,
                      resource=resource,
                      source_refs=hashes) for resource, hashes in new.items()
        ]

        await data.Code.insert_many(newcodes)

        return 200
Пример #13
0
    async def get_parameters(
        self,
        env: data.Environment,
        limit: Optional[int] = None,
        first_id: Optional[uuid.UUID] = None,
        last_id: Optional[uuid.UUID] = None,
        start: Optional[Union[datetime.datetime, str]] = None,
        end: Optional[Union[datetime.datetime, str]] = None,
        filter: Optional[Dict[str, List[str]]] = None,
        sort: str = "name.asc",
    ) -> ReturnValue[List[Parameter]]:
        if limit is None:
            limit = APILIMIT
        elif limit > APILIMIT:
            raise BadRequest(
                f"limit parameter can not exceed {APILIMIT}, got {limit}.")

        query: Dict[str, Tuple[QueryType, object]] = {}
        if filter:
            try:
                query.update(
                    ParameterFilterValidator().process_filters(filter))
            except InvalidFilter as e:
                raise BadRequest(e.message) from e
        try:
            parameter_order = ParameterOrder.parse_from_string(sort)
        except InvalidSort as e:
            raise BadRequest(e.message) from e

        typed_start, typed_end = None, None
        if start is not None:
            typed_start = parameter_order.ensure_boundary_type(start)
        if end is not None:
            typed_end = parameter_order.ensure_boundary_type(end)

        try:
            dtos = await data.Parameter.get_parameter_list(
                database_order=parameter_order,
                limit=limit,
                environment=env.id,
                first_id=first_id,
                last_id=last_id,
                start=typed_start,
                end=typed_end,
                connection=None,
                **query,
            )
        except (data.InvalidQueryParameter,
                data.InvalidFieldNameException) as e:
            raise BadRequest(e.message)

        paging_handler = ParameterPagingHandler(
            ParameterPagingCountsProvider())
        paging_metadata = await paging_handler.prepare_paging_metadata(
            QueryIdentifier(environment=env.id), dtos, query, limit,
            parameter_order)
        links = await paging_handler.prepare_paging_links(
            dtos,
            filter,
            parameter_order,
            limit,
            first_id=first_id,
            last_id=last_id,
            start=typed_start,
            end=typed_end,
            has_next=paging_metadata.after > 0,
            has_prev=paging_metadata.before > 0,
        )

        return ReturnValueWithMeta(response=dtos,
                                   links=links if links else {},
                                   metadata=vars(paging_metadata))
Пример #14
0
    async def put_version(
        self,
        env: data.Environment,
        version: int,
        resources: List[JsonType],
        resource_state: Dict[ResourceIdStr, const.ResourceState],
        unknowns: List[Dict[str, PrimitiveTypes]],
        version_info: JsonType,
        compiler_version: Optional[str] = None,
    ) -> Apireturn:
        """
        :param resources: a list of serialized resources
        :param unknowns: dict with the following structure
        {
         "resource": ResourceIdStr,
         "parameter": str,
         "source": str
         }
        :param version_info:
        :param compiler_version:
        :return:
        """

        if not compiler_version:
            raise BadRequest(
                "Older compiler versions are no longer supported, please update your compiler"
            )

        if version > env.last_version:
            raise BadRequest(
                f"The version number used is {version} "
                f"which is higher than the last outstanding reservation {env.last_version}"
            )
        if version <= 0:
            raise BadRequest(
                f"The version number used ({version}) is not positive")

        started = datetime.datetime.now().astimezone()

        agents = set()
        # lookup for all RV's, lookup by resource id
        rv_dict: Dict[ResourceVersionIdStr, data.Resource] = {}
        # reverse dependency tree, Resource.provides [:] -- Resource.requires as resource_id
        provides_tree: Dict[str, List[str]] = defaultdict(lambda: [])
        # list of all resources which have a cross agent dependency, as a tuple, (dependant,requires)
        cross_agent_dep = []
        # list of all resources which are undeployable
        undeployable: List[data.Resource] = []

        resource_objects = []
        resource_version_ids = []
        for res_dict in resources:
            res_obj = data.Resource.new(env.id, res_dict["id"])
            if res_obj.resource_id in resource_state:
                res_obj.status = const.ResourceState[resource_state[
                    res_obj.resource_id]]
                if res_obj.status in const.UNDEPLOYABLE_STATES:
                    undeployable.append(res_obj)

            # collect all agents
            agents.add(res_obj.agent)

            attributes = {}
            for field, value in res_dict.items():
                if field != "id":
                    attributes[field] = value

            res_obj.attributes = attributes
            resource_objects.append(res_obj)
            resource_version_ids.append(res_obj.resource_version_id)

            rv_dict[res_obj.resource_id] = res_obj

            # find cross agent dependencies
            agent = res_obj.agent
            resc_id = res_obj.resource_id
            if "requires" not in attributes:
                LOGGER.warning(
                    "Received resource without requires attribute (%s)" %
                    res_obj.resource_id)
            else:
                for req in attributes["requires"]:
                    rid = Id.parse_id(req)
                    provides_tree[rid.resource_str()].append(resc_id)
                    if rid.get_agent_name() != agent:
                        # it is a CAD
                        cross_agent_dep.append((res_obj, rid))

        # hook up all CADs
        for f, t in cross_agent_dep:
            res_obj = rv_dict[t.resource_str()]
            res_obj.provides.append(f.resource_version_id)

        # detect failed compiles
        def safe_get(input: JsonType, key: str, default: object) -> object:
            if not isinstance(input, dict):
                return default
            if key not in input:
                return default
            return input[key]

        metadata: JsonType = safe_get(version_info, const.EXPORT_META_DATA, {})
        compile_state = safe_get(metadata, const.META_DATA_COMPILE_STATE, "")
        failed = compile_state == const.Compilestate.failed

        resources_to_purge: List[data.Resource] = []
        if not failed and (await env.get(PURGE_ON_DELETE)):
            # search for deleted resources (purge_on_delete)
            resources_to_purge = await data.Resource.get_deleted_resources(
                env.id, version, set(rv_dict.keys()))

            previous_requires = {}
            for res in resources_to_purge:
                LOGGER.warning("Purging %s, purged resource based on %s" %
                               (res.resource_id, res.resource_version_id))

                attributes = res.attributes.copy()
                attributes["purged"] = True
                attributes["requires"] = []
                res_obj = data.Resource.new(
                    env.id,
                    resource_version_id=ResourceVersionIdStr(
                        "%s,v=%s" % (res.resource_id, version)),
                    attributes=attributes,
                )
                resource_objects.append(res_obj)

                previous_requires[
                    res_obj.resource_id] = res.attributes["requires"]
                resource_version_ids.append(res_obj.resource_version_id)
                agents.add(res_obj.agent)
                rv_dict[res_obj.resource_id] = res_obj

            # invert dependencies on purges
            for res_id, requires in previous_requires.items():
                res_obj = rv_dict[res_id]
                for require in requires:
                    req_id = Id.parse_id(require)

                    if req_id.resource_str() in rv_dict:
                        req_res = rv_dict[req_id.resource_str()]

                        req_res.attributes["requires"].append(
                            res_obj.resource_version_id)
                        res_obj.provides.append(req_res.resource_version_id)

        undeployable_ids: List[str] = [res.resource_id for res in undeployable]
        # get skipped for undeployable
        work = list(undeployable_ids)
        skippeable: Set[str] = set()
        while len(work) > 0:
            current = work.pop()
            if current in skippeable:
                continue
            skippeable.add(current)
            work.extend(provides_tree[current])

        skip_list = sorted(list(skippeable - set(undeployable_ids)))

        try:
            cm = data.ConfigurationModel(
                environment=env.id,
                version=version,
                date=datetime.datetime.now().astimezone(),
                total=len(resources),
                version_info=version_info,
                undeployable=undeployable_ids,
                skipped_for_undeployable=skip_list,
            )
            await cm.insert()
        except asyncpg.exceptions.UniqueViolationError:
            raise ServerError(
                "The given version is already defined. Versions should be unique."
            )

        await data.Resource.insert_many(resource_objects)
        await cm.update_fields(total=cm.total + len(resources_to_purge))

        for uk in unknowns:
            if "resource" not in uk:
                uk["resource"] = ""

            if "metadata" not in uk:
                uk["metadata"] = {}

            up = data.UnknownParameter(
                resource_id=uk["resource"],
                name=uk["parameter"],
                source=uk["source"],
                environment=env.id,
                version=version,
                metadata=uk["metadata"],
            )
            await up.insert()

        for agent in agents:
            await self.agentmanager_service.ensure_agent_registered(env, agent)

        # Don't log ResourceActions without resource_version_ids, because
        # no API call exists to retrieve them.
        if resource_version_ids:
            now = datetime.datetime.now().astimezone()
            log_line = data.LogLine.log(
                logging.INFO,
                "Successfully stored version %(version)d",
                version=version)
            self.resource_service.log_resource_action(env.id,
                                                      resource_version_ids,
                                                      logging.INFO, now,
                                                      log_line.msg)
            ra = data.ResourceAction(
                environment=env.id,
                version=version,
                resource_version_ids=resource_version_ids,
                action_id=uuid.uuid4(),
                action=const.ResourceAction.store,
                started=started,
                finished=now,
                messages=[log_line],
            )
            await ra.insert()

        LOGGER.debug("Successfully stored version %d", version)

        self.resource_service.clear_env_cache(env)

        auto_deploy = await env.get(data.AUTO_DEPLOY)
        if auto_deploy:
            LOGGER.debug("Auto deploying version %d", version)
            push_on_auto_deploy = cast(bool, await
                                       env.get(data.PUSH_ON_AUTO_DEPLOY))
            agent_trigger_method_on_autodeploy = cast(
                str, await env.get(data.AGENT_TRIGGER_METHOD_ON_AUTO_DEPLOY))
            agent_trigger_method_on_autodeploy = const.AgentTriggerMethod[
                agent_trigger_method_on_autodeploy]
            await self.release_version(env, version, push_on_auto_deploy,
                                       agent_trigger_method_on_autodeploy)

        return 200