Esempio n. 1
0
    async def upload_code(self, env: data.Environment, code_id: int,
                          resource: str, sources: JsonType) -> Apireturn:
        code = await data.Code.get_version(environment=env.id,
                                           version=code_id,
                                           resource=resource)
        if code is not None:
            raise ServerError(
                "Code for this version has already been uploaded.")

        hasherrors = any((k != hash_file(content[2].encode())
                          for k, content in sources.items()))
        if hasherrors:
            return 400, {
                "message": "Hashes in source map do not match to source_code"
            }

        for file_hash in self.file_slice.stat_file_internal(sources.keys()):
            self.file_slice.upload_file_internal(
                file_hash, sources[file_hash][2].encode())

        compact = {
            code_hash: (file_name, module, req)
            for code_hash, (file_name, module, _, req) in sources.items()
        }

        code = data.Code(environment=env.id,
                         version=code_id,
                         resource=resource,
                         source_refs=compact)
        await code.insert()

        return 200
Esempio n. 2
0
    async def list_version(self,
                           env: data.Environment,
                           start: Optional[int] = None,
                           limit: Optional[int] = None) -> Apireturn:
        if (start is None and limit is not None) or (limit is None
                                                     and start is not None):
            raise ServerError("Start and limit should always be set together.")

        if start is None or limit is None:
            start = 0
            limit = data.APILIMIT

        if limit > APILIMIT:
            raise BadRequest(
                f"limit parameter can not exceed {APILIMIT}, got {limit}.")

        models = await data.ConfigurationModel.get_versions(
            env.id, start, limit)
        count = len(models)

        d = {
            "versions": models,
            "start": start,
            "limit": limit,
            "count": count,
        }

        return 200, d
Esempio n. 3
0
    async def project_create(self, name: str,
                             project_id: Optional[uuid.UUID]) -> model.Project:
        if project_id is None:
            project_id = uuid.uuid4()

        try:
            project = data.Project(id=project_id, name=name)
            await project.insert()
        except asyncpg.exceptions.UniqueViolationError:
            raise ServerError(f"A project with name {name} already exists.")

        return project.to_dto()
Esempio n. 4
0
    def upload_file_internal(self, file_hash: str, content: bytes) -> None:
        file_name = os.path.join(self.server_slice._server_storage["files"],
                                 file_hash)

        if os.path.exists(file_name):
            raise ServerError("A file with this id already exists.")

        if hash_file(content) != file_hash:
            raise BadRequest("The hash does not match the content")

        with open(file_name, "wb+") as fd:
            fd.write(content)
Esempio n. 5
0
    async def project_modify(self, project_id: uuid.UUID,
                             name: str) -> model.Project:
        try:
            project = await data.Project.get_by_id(project_id)
            if project is None:
                raise NotFound("The project with given id does not exist.")

            await project.update_fields(name=name)

            return project.to_dto()

        except asyncpg.exceptions.UniqueViolationError:
            raise ServerError(f"A project with name {name} already exists.")
Esempio n. 6
0
 async def set_setting(self, env: data.Environment, key: str,
                       value: model.EnvSettingType) -> Apireturn:
     try:
         original_env = env.to_dto()
         await env.set(key, value)
         warnings = await self._setting_change(env, key)
         await self.notify_listeners(EnvironmentAction.updated,
                                     env.to_dto(), original_env)
         return attach_warnings(200, None, warnings)
     except KeyError:
         raise NotFound()
     except ValueError as e:
         raise ServerError(f"Invalid value. {e}")
Esempio n. 7
0
    def get_file_internal(self, file_hash: str) -> bytes:
        """get_file, but on return code 200, content is not encoded """

        file_name = os.path.join(self.server_slice._server_storage["files"],
                                 file_hash)

        if not os.path.exists(file_name):
            raise NotFound()

        with open(file_name, "rb") as fd:
            content = fd.read()
            actualhash = hash_file(content)
            if actualhash == file_hash:
                return content

            # handle corrupt file
            if opt.server_delete_currupt_files.get():
                LOGGER.error(
                    "File corrupt, expected hash %s but found %s at %s, Deleting file",
                    file_hash, actualhash, file_name)
                try:
                    os.remove(file_name)
                except OSError:
                    LOGGER.exception("Failed to delete file %s", file_name)
                    raise ServerError(
                        f"File corrupt, expected hash {file_hash} but found {actualhash}. Failed to delete file, please "
                        "contact the server administrator")

                raise ServerError(
                    f"File corrupt, expected hash {file_hash} but found {actualhash}. "
                    "Deleting file, please re-upload the corrupt file.")
            else:
                LOGGER.error(
                    "File corrupt, expected hash %s but found %s at %s",
                    file_hash, actualhash, file_name)
                raise ServerError(
                    f"File corrupt, expected hash {file_hash} but found {actualhash}, please contact the server administrator"
                )
Esempio n. 8
0
    async def environment_create(
        self,
        project_id: uuid.UUID,
        name: str,
        repository: str,
        branch: str,
        environment_id: Optional[uuid.UUID],
        description: str = "",
        icon: str = "",
    ) -> model.Environment:
        if environment_id is None:
            environment_id = uuid.uuid4()

        if (repository is None
                and branch is not None) or (repository is not None
                                            and branch is None):
            raise BadRequest("Repository and branch should be set together.")

        # fetch the project first
        project = await data.Project.get_by_id(project_id)
        if project is None:
            raise NotFound(
                "The project id for the environment does not exist.")

        # check if an environment with this name is already defined in this project
        envs = await data.Environment.get_list(project=project_id, name=name)
        if len(envs) > 0:
            raise ServerError(
                f"Project {project.name} (id={project.id}) already has an environment with name {name}"
            )

        self.validate_icon(icon)

        env = data.Environment(
            id=environment_id,
            name=name,
            project=project_id,
            repo_url=repository,
            repo_branch=branch,
            description=description,
            icon=icon,
        )
        try:
            await env.insert()
        except StringDataRightTruncationError:
            raise BadRequest(
                "Maximum size of the icon data url or the description exceeded"
            )
        await self.notify_listeners(EnvironmentAction.created, env.to_dto())
        return env.to_dto()
Esempio n. 9
0
 async def environment_settings_set(
         self, env: data.Environment, key: str,
         value: model.EnvSettingType) -> ReturnValue[None]:
     try:
         original_env = env.to_dto()
         await env.set(key, value)
         warnings = await self._setting_change(env, key)
         result: ReturnValue[None] = ReturnValue(response=None)
         if warnings:
             result.add_warnings(warnings)
         await self.notify_listeners(EnvironmentAction.updated,
                                     env.to_dto(), original_env)
         return result
     except KeyError:
         raise NotFound()
     except ValueError:
         raise ServerError("Invalid value")
Esempio n. 10
0
    async def upload_code_batched(self, env: data.Environment, code_id: int,
                                  resources: JsonType) -> Apireturn:
        # validate
        for rtype, sources in resources.items():
            if not isinstance(rtype, str):
                raise BadRequest(
                    "All keys in the resources map must be strings")
            if not isinstance(sources, dict):
                raise BadRequest(
                    "All values in the resources map must be dicts")

            for name, refs in sources.items():
                if not isinstance(name, str):
                    raise BadRequest(
                        "All keys in the sources map must be strings")
                if not isinstance(refs, (list, tuple)):
                    raise BadRequest(
                        "All values in the sources map must be lists or tuple")
                if (len(refs) != 3 or not isinstance(refs[0], str)
                        or not isinstance(refs[1], str)
                        or not isinstance(refs[2], list)):
                    raise BadRequest(
                        "The values in the source map should be of the form (filename, module, [requirements])"
                    )

        allrefs = [
            ref for sourcemap in resources.values()
            for ref in sourcemap.keys()
        ]

        val = self.file_slice.stat_file_internal(allrefs)

        if len(val) != 0:
            raise BadRequest("Not all file references provided are valid",
                             details={"references": val})

        code = await data.Code.get_versions(environment=env.id,
                                            version=code_id)
        oldmap: Dict[str, data.Code] = {c.resource: c for c in code}

        new = {k: v for k, v in resources.items() if k not in oldmap}
        conflict = [
            k for k, v in resources.items()
            if k in oldmap and oldmap[k].source_refs != v
        ]

        if len(conflict) > 0:
            raise ServerError(
                "Some of these items already exists, but with different source files",
                details={"references": conflict})

        newcodes = [
            data.Code(environment=env.id,
                      version=code_id,
                      resource=resource,
                      source_refs=hashes) for resource, hashes in new.items()
        ]

        await data.Code.insert_many(newcodes)

        return 200
Esempio n. 11
0
    async def put_version(
        self,
        env: data.Environment,
        version: int,
        resources: List[JsonType],
        resource_state: Dict[ResourceIdStr, const.ResourceState],
        unknowns: List[Dict[str, PrimitiveTypes]],
        version_info: JsonType,
        compiler_version: Optional[str] = None,
    ) -> Apireturn:
        """
        :param resources: a list of serialized resources
        :param unknowns: dict with the following structure
        {
         "resource": ResourceIdStr,
         "parameter": str,
         "source": str
         }
        :param version_info:
        :param compiler_version:
        :return:
        """

        if not compiler_version:
            raise BadRequest(
                "Older compiler versions are no longer supported, please update your compiler"
            )

        if version > env.last_version:
            raise BadRequest(
                f"The version number used is {version} "
                f"which is higher than the last outstanding reservation {env.last_version}"
            )
        if version <= 0:
            raise BadRequest(
                f"The version number used ({version}) is not positive")

        started = datetime.datetime.now().astimezone()

        agents = set()
        # lookup for all RV's, lookup by resource id
        rv_dict: Dict[ResourceVersionIdStr, data.Resource] = {}
        # reverse dependency tree, Resource.provides [:] -- Resource.requires as resource_id
        provides_tree: Dict[str, List[str]] = defaultdict(lambda: [])
        # list of all resources which have a cross agent dependency, as a tuple, (dependant,requires)
        cross_agent_dep = []
        # list of all resources which are undeployable
        undeployable: List[data.Resource] = []

        resource_objects = []
        resource_version_ids = []
        for res_dict in resources:
            res_obj = data.Resource.new(env.id, res_dict["id"])
            if res_obj.resource_id in resource_state:
                res_obj.status = const.ResourceState[resource_state[
                    res_obj.resource_id]]
                if res_obj.status in const.UNDEPLOYABLE_STATES:
                    undeployable.append(res_obj)

            # collect all agents
            agents.add(res_obj.agent)

            attributes = {}
            for field, value in res_dict.items():
                if field != "id":
                    attributes[field] = value

            res_obj.attributes = attributes
            resource_objects.append(res_obj)
            resource_version_ids.append(res_obj.resource_version_id)

            rv_dict[res_obj.resource_id] = res_obj

            # find cross agent dependencies
            agent = res_obj.agent
            resc_id = res_obj.resource_id
            if "requires" not in attributes:
                LOGGER.warning(
                    "Received resource without requires attribute (%s)" %
                    res_obj.resource_id)
            else:
                for req in attributes["requires"]:
                    rid = Id.parse_id(req)
                    provides_tree[rid.resource_str()].append(resc_id)
                    if rid.get_agent_name() != agent:
                        # it is a CAD
                        cross_agent_dep.append((res_obj, rid))

        # hook up all CADs
        for f, t in cross_agent_dep:
            res_obj = rv_dict[t.resource_str()]
            res_obj.provides.append(f.resource_version_id)

        # detect failed compiles
        def safe_get(input: JsonType, key: str, default: object) -> object:
            if not isinstance(input, dict):
                return default
            if key not in input:
                return default
            return input[key]

        metadata: JsonType = safe_get(version_info, const.EXPORT_META_DATA, {})
        compile_state = safe_get(metadata, const.META_DATA_COMPILE_STATE, "")
        failed = compile_state == const.Compilestate.failed

        resources_to_purge: List[data.Resource] = []
        if not failed and (await env.get(PURGE_ON_DELETE)):
            # search for deleted resources (purge_on_delete)
            resources_to_purge = await data.Resource.get_deleted_resources(
                env.id, version, set(rv_dict.keys()))

            previous_requires = {}
            for res in resources_to_purge:
                LOGGER.warning("Purging %s, purged resource based on %s" %
                               (res.resource_id, res.resource_version_id))

                attributes = res.attributes.copy()
                attributes["purged"] = True
                attributes["requires"] = []
                res_obj = data.Resource.new(
                    env.id,
                    resource_version_id=ResourceVersionIdStr(
                        "%s,v=%s" % (res.resource_id, version)),
                    attributes=attributes,
                )
                resource_objects.append(res_obj)

                previous_requires[
                    res_obj.resource_id] = res.attributes["requires"]
                resource_version_ids.append(res_obj.resource_version_id)
                agents.add(res_obj.agent)
                rv_dict[res_obj.resource_id] = res_obj

            # invert dependencies on purges
            for res_id, requires in previous_requires.items():
                res_obj = rv_dict[res_id]
                for require in requires:
                    req_id = Id.parse_id(require)

                    if req_id.resource_str() in rv_dict:
                        req_res = rv_dict[req_id.resource_str()]

                        req_res.attributes["requires"].append(
                            res_obj.resource_version_id)
                        res_obj.provides.append(req_res.resource_version_id)

        undeployable_ids: List[str] = [res.resource_id for res in undeployable]
        # get skipped for undeployable
        work = list(undeployable_ids)
        skippeable: Set[str] = set()
        while len(work) > 0:
            current = work.pop()
            if current in skippeable:
                continue
            skippeable.add(current)
            work.extend(provides_tree[current])

        skip_list = sorted(list(skippeable - set(undeployable_ids)))

        try:
            cm = data.ConfigurationModel(
                environment=env.id,
                version=version,
                date=datetime.datetime.now().astimezone(),
                total=len(resources),
                version_info=version_info,
                undeployable=undeployable_ids,
                skipped_for_undeployable=skip_list,
            )
            await cm.insert()
        except asyncpg.exceptions.UniqueViolationError:
            raise ServerError(
                "The given version is already defined. Versions should be unique."
            )

        await data.Resource.insert_many(resource_objects)
        await cm.update_fields(total=cm.total + len(resources_to_purge))

        for uk in unknowns:
            if "resource" not in uk:
                uk["resource"] = ""

            if "metadata" not in uk:
                uk["metadata"] = {}

            up = data.UnknownParameter(
                resource_id=uk["resource"],
                name=uk["parameter"],
                source=uk["source"],
                environment=env.id,
                version=version,
                metadata=uk["metadata"],
            )
            await up.insert()

        for agent in agents:
            await self.agentmanager_service.ensure_agent_registered(env, agent)

        # Don't log ResourceActions without resource_version_ids, because
        # no API call exists to retrieve them.
        if resource_version_ids:
            now = datetime.datetime.now().astimezone()
            log_line = data.LogLine.log(
                logging.INFO,
                "Successfully stored version %(version)d",
                version=version)
            self.resource_service.log_resource_action(env.id,
                                                      resource_version_ids,
                                                      logging.INFO, now,
                                                      log_line.msg)
            ra = data.ResourceAction(
                environment=env.id,
                version=version,
                resource_version_ids=resource_version_ids,
                action_id=uuid.uuid4(),
                action=const.ResourceAction.store,
                started=started,
                finished=now,
                messages=[log_line],
            )
            await ra.insert()

        LOGGER.debug("Successfully stored version %d", version)

        self.resource_service.clear_env_cache(env)

        auto_deploy = await env.get(data.AUTO_DEPLOY)
        if auto_deploy:
            LOGGER.debug("Auto deploying version %d", version)
            push_on_auto_deploy = cast(bool, await
                                       env.get(data.PUSH_ON_AUTO_DEPLOY))
            agent_trigger_method_on_autodeploy = cast(
                str, await env.get(data.AGENT_TRIGGER_METHOD_ON_AUTO_DEPLOY))
            agent_trigger_method_on_autodeploy = const.AgentTriggerMethod[
                agent_trigger_method_on_autodeploy]
            await self.release_version(env, version, push_on_auto_deploy,
                                       agent_trigger_method_on_autodeploy)

        return 200