Esempio n. 1
0
    def get(cls, username: str, password: str) -> "DeveloperToken":
        response = requests.post(
            "https://iam.ebrains.eu/auth/realms/hbp/protocol/openid-connect/token",
            auth=('developer', ''),
            data={
                "username": username,
                "password": password,
                "grant_type": "password",
            },
        )
        print(
            f"Get DevToken response:\n:{json.dumps(response.json(), indent=4)}"
        )
        response.raise_for_status()
        payload = ensureJsonObject(response.json())

        return DeveloperToken(
            access_token=ensureJsonString(payload.get("access_token")),
            expires_in=ensureJsonInt(payload.get("expires_in")),
            not_before_policy=ensureJsonInt(payload.get("not-before-policy")),
            refresh_expires_in=ensureJsonInt(
                payload.get("refresh_expires_in")),
            refresh_token=ensureJsonString(payload.get("refresh_token")),
            scope=ensureJsonString(payload.get("scope")),
            token_type=ensureJsonString(payload.get("token_type")),
        )
Esempio n. 2
0
 def from_json_data(cls, data: JsonValue) -> "Color":
     data_dict = ensureJsonObject(data)
     return Color(
         r=np.uint8(ensureJsonInt(data_dict.get("r", 0))),
         g=np.uint8(ensureJsonInt(data_dict.get("g", 0))),
         b=np.uint8(ensureJsonInt(data_dict.get("b", 0))),
     )
Esempio n. 3
0
 def from_json_value(cls, value: JsonValue) -> "JobResources":
     value_obj = ensureJsonObject(value)
     return JobResources(
         Memory=Memory.from_json_value(value_obj.get("Memory")),
         Runtime=Seconds(ensureJsonInt(value_obj.get("Runtime"))),
         CPUs=ensureJsonInt(value_obj.get("CPUs")),
         Nodes=ensureJsonInt(value_obj.get("Nodes")),
         CPUsPerNode=ensureJsonInt(value_obj.get("CPUsPerNode")),
         Reservation=ensureJsonString(value_obj.get("Reservation")),
     )
Esempio n. 4
0
def wait_until_jobs_completed(workflow: PixelClassificationWorkflow,
                              timeout: float = 50):
    wait_time = 0.5
    while timeout > 0:
        export_status: JsonObject = workflow.export_applet._get_json_state()
        jobs = ensureJsonArray(export_status["jobs"])
        for job in jobs:
            job_obj = ensureJsonObject(job)
            num_args = ensureJsonInt(job_obj["num_args"])
            num_completed_steps = ensureJsonInt(job_obj["num_completed_steps"])
            if num_completed_steps < num_args:
                print(f"Jobs not done yet. Waiting...")
                time.sleep(wait_time)
                timeout -= wait_time
                break
        else:
            return
    raise TimeoutError("Waiting on jobs timed out!")
Esempio n. 5
0
 def from_json_value(cls, value: JsonValue) -> "BucketObject":
     value_dict = ensureJsonObject(value)
     return BucketObject(
         hash_=ensureJsonString(value_dict.get("hash")),
         last_modified=datetime.fromisoformat(
             ensureJsonString(value_dict.get("last_modified"))),
         bytes_=ensureJsonInt(value_dict.get("bytes")),
         name=PurePosixPath(ensureJsonString(value_dict.get("name"))),
         content_type=ensureJsonString(value_dict.get("content_type")),
     )
Esempio n. 6
0
 def from_json_value(cls, data: JsonValue):
     data_dict = ensureJsonObject(data)
     return PrecomputedChunksInfo(
         type_=ensureJsonString(data_dict.get("type")),
         data_type=np.dtype(ensureJsonString(data_dict.get("data_type"))), #type: ignore
         num_channels=ensureJsonInt(data_dict.get("num_channels")),
         scales=tuple(
             PrecomputedChunksScale.from_json_value(raw_scale)
             for raw_scale in ensureJsonArray(data_dict.get("scales"))
         )
     )
 def from_json_value(cls, value: JsonValue) -> "PrecomputedChunksScaleSink":
     value_obj = ensureJsonObject(value)
     return PrecomputedChunksScaleSink(
         filesystem=JsonableFilesystem.from_json_value(
             value_obj.get("filesystem")),
         info_dir=PurePosixPath(ensureJsonString(
             value_obj.get("info_dir"))),
         scale=PrecomputedChunksScale.from_json_value(
             value_obj.get("scale")),
         dtype=np.dtype(ensureJsonString(value_obj.get("dtype"))),
         num_channels=ensureJsonInt(value_obj.get("num_channels")))
Esempio n. 8
0
 def from_json_value(cls, data: JsonValue):
     data_dict = ensureJsonObject(data)
     type_ = ensureJsonString(data_dict.get("type"))
     if type_ != "image":
         raise ValueError(f"Bad 'type' marker value: {type_}")
     return PrecomputedChunksInfo(
         type_=type_,
         data_type=np.dtype(ensureJsonString(data_dict.get("data_type"))),
         num_channels=ensureJsonInt(data_dict.get("num_channels")),
         scales=tuple(
             PrecomputedChunksScale.from_json_value(raw_scale)
             for raw_scale in ensureJsonArray(data_dict.get("scales"))))
Esempio n. 9
0
    def get(cls, *, client_id: str, client_secret: str) -> "ServiceToken":
        response = requests.post(
            "https://iam.ebrains.eu/auth/realms/hbp/protocol/openid-connect/token",
            auth=('developer', ''),
            data={
                "client_id": client_id,
                "client_secret": client_secret,
                "scope": "email profile team group clb.wiki.read clb.wiki.write", #FIXME
                "grant_type": "client_credentials",
            },
        )
        print(f"Get ServiceToken response:\n:{json.dumps(response.json(), indent=4)}")
        response.raise_for_status()
        payload = ensureJsonObject(response.json())

        return ServiceToken(
            access_token=ensureJsonString(payload.get("access_token")),
            expires_in=ensureJsonInt(payload.get("expires_in")),
            not_before_policy=ensureJsonInt(payload.get("not-before-policy")),
            refresh_expires_in=ensureJsonInt(payload.get("refresh_expires_in")),
            scope=ensureJsonString(payload.get("scope")),
            token_type=ensureJsonString(payload.get("token_type")),
        )
Esempio n. 10
0
 def from_json_data(cls, data: JsonValue) -> "Bzip2Compressor":
     return Bzip2Compressor(blockSize=ensureJsonInt(
         ensureJsonObject(data).get("blockSize", 9)))
Esempio n. 11
0
 def from_json_data(cls, data: JsonValue) -> "GzipCompressor":
     return GzipCompressor(
         level=ensureJsonInt(ensureJsonObject(data).get("level", 1)))
Esempio n. 12
0
 def from_json_data(cls, data: JsonValue) -> "XzCompressor":
     return XzCompressor(
         preset=ensureJsonInt(ensureJsonObject(data).get("preset")))
Esempio n. 13
0
    def from_json_value(cls, value: JsonValue) -> "OidcClient":
        value_obj = ensureJsonObject(value)
        raw_rootUrl = ensureJsonString(value_obj.get("rootUrl"))
        rootUrl = Url.parse(raw_rootUrl)
        if rootUrl is None:
            rootUrl = Url.parse(
                raw_rootUrl +
                "/")  # it's possible to register a rootUrl without a path -.-
        assert rootUrl is not None

        redirectUris: List[Url] = []
        for raw_redirect_uri in ensureJsonStringArray(
                value_obj.get("redirectUris")):
            try:
                redirect_uri = Url.parse(raw_redirect_uri)
                assert redirect_uri is not None
                redirectUris.append(redirect_uri)
            except ValueError:
                uri = rootUrl.joinpath(
                    PurePosixPath(raw_redirect_uri)
                )  # FIXME: do leading slashes mean root here too?
                redirectUris.append(uri)

        baseUrl = Url.parse(ensureJsonString(value_obj.get("baseUrl")))
        assert baseUrl is not None
        return OidcClient(
            alwaysDisplayInConsole=ensureJsonBoolean(
                value_obj.get("alwaysDisplayInConsole")),
            baseUrl=baseUrl,
            bearerOnly=ensureJsonBoolean(value_obj.get("bearerOnly")),
            clientAuthenticatorType=ensureJsonString(
                value_obj.get("clientAuthenticatorType")),
            clientId=ensureJsonString(value_obj.get("clientId")),
            consentRequired=ensureJsonBoolean(
                value_obj.get("consentRequired")),
            defaultClientScopes=ensureJsonStringArray(
                value_obj.get("defaultClientScopes")),
            description=ensureJsonString(value_obj.get("description")),
            directAccessGrantsEnabled=ensureJsonBoolean(
                value_obj.get("directAccessGrantsEnabled")),
            enabled=ensureJsonBoolean(value_obj.get("enabled")),
            frontchannelLogout=ensureJsonBoolean(
                value_obj.get("frontchannelLogout")),
            fullScopeAllowed=ensureJsonBoolean(
                value_obj.get("fullScopeAllowed")),
            id=ensureJsonString(value_obj.get("id")),
            implicitFlowEnabled=ensureJsonBoolean(
                value_obj.get("implicitFlowEnabled")),
            name=ensureJsonString(value_obj.get("name")),
            nodeReRegistrationTimeout=ensureJsonInt(
                value_obj.get("nodeReRegistrationTimeout")),
            notBefore=ensureJsonInt(value_obj.get("notBefore")),
            optionalClientScopes=set(
                Scope.from_json_value(s) for s in ensureJsonArray(
                    value_obj.get("optionalClientScopes"))),
            protocol=ensureJsonString(value_obj.get("protocol")),
            publicClient=ensureJsonBoolean(value_obj.get("publicClient")),
            redirectUris=tuple(redirectUris),
            registrationAccessToken=ensureJsonString(
                value_obj.get("registrationAccessToken")),
            rootUrl=rootUrl,
            secret=ensureJsonString(value_obj.get("secret")),
            serviceAccountsEnabled=ensureJsonBoolean(
                value_obj.get("serviceAccountsEnabled")),
            standardFlowEnabled=ensureJsonBoolean(
                value_obj.get("standardFlowEnabled")),
            surrogateAuthRequired=ensureJsonBoolean(
                value_obj.get("surrogateAuthRequired")),
            webOrigins=ensureJsonStringArray(value_obj.get("webOrigins")),
        )
Esempio n. 14
0
 def from_json_value(cls, value: JsonValue) -> "PrecomputedChunksScale5D":
     value_obj = ensureJsonObject(value)
     raw_scale = PrecomputedChunksScale.from_json_value(value)
     return PrecomputedChunksScale5D.from_raw_scale(raw_scale, num_channels=ensureJsonInt(value_obj.get("num_channels")))
Esempio n. 15
0
    async def spawn_session(self, ebrains_login: EbrainsLogin,
                            request: web.Request) -> web.Response:
        raw_payload = await request.content.read()
        try:
            payload_dict = ensureJsonObject(
                json.loads(raw_payload.decode('utf8')))
            session_duration = Minutes(
                ensureJsonInt(payload_dict.get("session_duration_minutes")))
        except Exception:
            return web.json_response({"error": "Bad payload"}, status=400)
        session_id = uuid.uuid4()
        quota: NodeSeconds = NodeSeconds(100 * 60 * 60)  #FIXME

        user_info = await ebrains_login.user_token.get_userinfo(
            self.http_client_session)

        if user_info.sub != uuid.UUID(
                "bdca269c-f207-4cdb-8b68-a562e434faed"):  #FIXME
            return web.json_response(
                {"error": "This user can't allocate sessions yet"}, status=400)

        if user_info.sub not in self.session_user_locks:
            self.session_user_locks[user_info.sub] = asyncio.Lock()
        async with self.session_user_locks[user_info.sub]:
            this_months_jobs_result = await self.session_launcher.get_jobs(
                starttime=datetime.today().replace(day=1))
            if isinstance(this_months_jobs_result, Exception):
                print(
                    f"Could not get session information:\n{this_months_jobs_result}\n",
                    file=sys.stderr)
                return web.json_response(
                    {"error": "Could get session information"}, status=500)

            for job in this_months_jobs_result:
                if not job.belongs_to(user_info=user_info):
                    continue
                if job.is_running():
                    return web.json_response(
                        {"error": "Already running a session"}, status=400)
                quota = NodeSeconds(quota - job.duration * job.num_nodes)

            if quota <= 0:  #FIXME
                return web.json_response({"error": "Out of quota"}, status=400)

            session_result = await self.session_launcher.launch(
                user_info=user_info,
                time=Minutes(min(quota, session_duration)),
                ebrains_user_token=ebrains_login.user_token,
                session_id=session_id,
            )

            if isinstance(session_result, Exception):
                print(f"Could not create compute session:\n{session_result}",
                      file=sys.stderr)
                return web.json_response(
                    {"error": "Could not create compute session"}, status=500)

            return web.json_response(
                {
                    "id": str(session_id),
                    "url": self._make_session_url(session_id).raw,
                }, )