def search_resources_post(project_id, location, dataset_id, fhir_store_id): """ Searches for resources in the given FHIR store. Uses the _search POST method and a query string containing the information to search for. In this sample, the search criteria is 'family:exact=Smith' on a Patient resource. See https://github.com/GoogleCloudPlatform/python-docs-samples/tree/master/healthcare/api-client/v1/fhir before running the sample.""" # Imports Python's built-in "os" module import os # Imports the google.auth.transport.requests transport from google.auth.transport import requests # Imports a module to allow authentication using a service account from google.oauth2 import service_account # Gets credentials from the environment. credentials = service_account.Credentials.from_service_account_file( os.environ["GOOGLE_APPLICATION_CREDENTIALS"] ) scoped_credentials = credentials.with_scopes( ["https://www.googleapis.com/auth/cloud-platform"] ) # Creates a requests Session object with the credentials. session = requests.AuthorizedSession(scoped_credentials) # URL to the Cloud Healthcare API endpoint and version base_url = "https://healthcare.googleapis.com/v1" # TODO(developer): Uncomment these lines and replace with your values. # project_id = 'my-project' # replace with your GCP project ID # location = 'us-central1' # replace with the parent dataset's location # dataset_id = 'my-dataset' # replace with the parent dataset's ID # fhir_store_id = 'my-fhir-store' # replace with the FHIR store ID url = "{}/projects/{}/locations/{}".format(base_url, project_id, location) fhir_store_path = "{}/datasets/{}/fhirStores/{}/fhir".format( url, dataset_id, fhir_store_id ) resource_path = "{}/Patient/_search?family:exact=Smith".format(fhir_store_path) # Sets required application/fhir+json header on the request headers = {"Content-Type": "application/fhir+json;charset=utf-8"} response = session.post(resource_path, headers=headers) response.raise_for_status() resources = response.json() print( "Using POST request, found a total of {} Patient resources:".format( resources["total"] ) ) print(json.dumps(resources, indent=2)) return resources
def create_session_from_gcp_credentials( google_credentials: Optional[Any] = None) -> requests.Session: """Creates an authorized session for Google Cloud Platform. Parameters ---------- google_credentials: Any Google Cloud credentials. (see https://cloud.google.com/docs/authentication/production for more information on Google Cloud authentication). If not set, will be initialized to ``google.auth.default()``. Returns ------- requests.Session Google Cloud authorized session. Note ---- Credentials will be read from environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` if set. """ if google_credentials is None: google_credentials, _ = google.auth.default( scopes=['https://www.googleapis.com/auth/cloud-platform']) return google_requests.AuthorizedSession(google_credentials)
def classify_by_package(self, query): credentials = service_account.Credentials.from_service_account_file( "CommsMedia-e6083b82e793.json") credentials = credentials.with_scopes( ['https://www.googleapis.com/auth/cloud-platform']) # Create a requests Session object with the credentials. session = requests.AuthorizedSession(credentials) headers = {'content-type': "application/json"} data = { "document": { "type": "PLAIN_TEXT", "content": query }, "classificationConfig": { "model": "question_classification_v2_0" } } response = session.request( 'POST', 'https://language.googleapis.com/v1beta2/documents:classifyText', headers=headers, data=json.dumps(data)) response = response.json() return response['categories'][0]['name']
def get_raster_profile(request): """ Cloud function that get a raster from Google Cloud Storage and returns its `rasterio` profile. :param request: :return: """ try: filename = request.args["filename"] except KeyError: logging.error("'filename' parameter is missing", exc_info=True) flask.abort(400) try: bucket = request.args["bucket"] except KeyError: logging.error("'bucket' parameter is missing", exc_info=True) flask.abort(400) # Get the credentials and project ID from the environment credentials, project = google.auth.default() print(credentials) print(credentials.to_json()) print(project) google.auth.C # Create a requests Session object with the credentials session = requests.AuthorizedSession(credentials) print(session) with rasterio.Env(session_class=GSSession(credentials.to_json())): with rasterio.open(f"gs://{bucket}/{filename}") as src: profile = src.profile return flask.jsonify(profile)
def __init__( self, bucket, blob, min_part_size=_DEFAULT_MIN_PART_SIZE, client=None, # type: google.cloud.storage.Client ): if client is None: client = google.cloud.storage.Client() self._client = client self._credentials = self._client._credentials # noqa self._blob = self._client.bucket(bucket).blob( blob) # type: google.cloud.storage.Blob assert min_part_size % _REQUIRED_CHUNK_MULTIPLE == 0, 'min part size must be a multiple of 256KB' assert min_part_size >= _MIN_MIN_PART_SIZE, 'min part size must be greater than 256KB' self._min_part_size = min_part_size self._total_size = 0 self._total_parts = 0 self._bytes_uploaded = 0 self._current_part = io.BytesIO() self._session = google_requests.AuthorizedSession(self._credentials) # # https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload#start-resumable # self._resumable_upload_url = self._blob.create_resumable_upload_session( ) # # This member is part of the io.BufferedIOBase interface. # self.raw = None
def __init__( self, endpoint: str, auth_type: AuthType, service_account_json: Optional[str] = "", username: Optional[str] = "", password: Optional[str] = "", client_id: Optional[str] = "", ): self._endpoint = endpoint self.token_response_cache: Dict[Tuple[str, str], Dict[str, Any]] = {} self._req_params: Dict[str, str] = {} self.req = requests.Session() if auth_type is AuthType.SERVICE_ACCOUNT: credentials = service_account.Credentials.from_service_account_file( service_account_json).with_scopes(["email"]) self.req = google_requests.AuthorizedSession(credentials) self._req_params = {"grant_type": "client_credentials"} elif auth_type is AuthType.PASSWORD: self._req_params = { "grant_type": "password", "username": username, "password": password, "client_id": client_id, } elif auth_type is AuthType.NONE: # No special setup requred pass else: # Something unknown was passed in raise ("Unknown OAuth authentication Type") self.parameterized_url = False
def create_session_from_gcp_credentials( google_credentials: Optional[Any] = None ) -> requests.Session: '''Creates an authorized session for Google Cloud Platform. Parameters ---------- google_credentials: Any Google cloud credentials. (see https://cloud.google.com/docs/authentication/production for more information on Google cloud authentication). If not set, will be initialized to ``google.auth.default()`` Returns ------- requests.Session Google cloud authorized session ''' try: from google.auth.transport import requests as google_requests if google_credentials is None: import google.auth google_credentials, _ = google.auth.default( scopes=['https://www.googleapis.com/auth/cloud-platform'] ) except ImportError: raise ImportError( 'The dicomweb-client package needs to be installed with the ' '"gcp" extra requirements to support interaction with the ' 'Google Cloud Healthcare API: pip install dicomweb-client[gcp]' ) logger.debug('initialize, authenticate and authorize HTTP session') return google_requests.AuthorizedSession(google_credentials)
def initialize_http_session( credentials_file_path: Optional[Union[str, pathlib.Path]], scopes: Optional[Sequence[str]] = None) -> requests.AuthorizedSession: """Initializes an authorized HTTP session, based on the given credentials. Args: credentials_file_path: Absolute or relative path to a JSON file containing the private OAuth 2.0 credentials of a Google Cloud Platform service account. Optional - the default is ".chronicle_credentials.json" in the user's home directory. Keep it secret, keep it safe. scopes: A list of OAuth scopes (https://oauth.net/2/scope/) that are associated with the end points to be accessed. The default is the Chronicle API scope. Returns: HTTP session object to send authorized requests and receive responses. Raises: OSError: Failed to read the given file, e.g. not found, no read access (https://docs.python.org/library/exceptions.html#os-exceptions). ValueError: Invalid file contents. """ credentials = service_account.Credentials.from_service_account_file( str(credentials_file_path or DEFAULT_CREDENTIALS_FILE), scopes=scopes or AUTHORIZATION_SCOPES) return requests.AuthorizedSession(credentials)
def com(): credentials, project_id = google.auth.default() authed_session = requests.AuthorizedSession(credentials) response = authed_session.request( "GET", "https://www.endpoints.robotrent.cloud.goog/apis/hello-server") print(response.status_code, response.reason, response.text)
def execute_bundle( project_id, location, dataset_id, fhir_store_id, bundle, ): """Executes the operations in the given bundle. See https://github.com/GoogleCloudPlatform/python-docs-samples/tree/master/healthcare/api-client/v1/fhir before running the sample.""" # Imports Python's built-in "os" module import os # Imports the google.auth.transport.requests transport from google.auth.transport import requests # Imports a module to allow authentication using a service account from google.oauth2 import service_account # Gets credentials from the environment. credentials = service_account.Credentials.from_service_account_file( os.environ["GOOGLE_APPLICATION_CREDENTIALS"] ) scoped_credentials = credentials.with_scopes( ["https://www.googleapis.com/auth/cloud-platform"] ) # Creates a requests Session object with the credentials. session = requests.AuthorizedSession(scoped_credentials) # URL to the Cloud Healthcare API endpoint and version base_url = "https://healthcare.googleapis.com/v1" # TODO(developer): Uncomment these lines and replace with your values. # project_id = 'my-project' # replace with your GCP project ID # location = 'us-central1' # replace with the parent dataset's location # dataset_id = 'my-dataset' # replace with the parent dataset's ID # fhir_store_id = 'my-fhir-store' # replace with the FHIR store ID # bundle = 'bundle.json' # replace with the bundle file url = "{}/projects/{}/locations/{}".format(base_url, project_id, location) resource_path = "{}/datasets/{}/fhirStores/{}/fhir".format( url, dataset_id, fhir_store_id ) headers = {"Content-Type": "application/fhir+json;charset=utf-8"} with open(bundle, "r") as bundle_file: bundle_file_content = bundle_file.read() response = session.post(resource_path, headers=headers, data=bundle_file_content) response.raise_for_status() resource = response.json() print("Executed bundle from file: {}".format(bundle)) print(json.dumps(resource, indent=2)) return resource
def search_resources_get( project_id, location, dataset_id, fhir_store_id, resource_type, ): """ Uses the searchResources GET method to search for resources in the given FHIR store. See https://github.com/GoogleCloudPlatform/python-docs-samples/tree/master/healthcare/api-client/v1/fhir before running the sample.""" # Imports Python's built-in "os" module import os # Imports the google.auth.transport.requests transport from google.auth.transport import requests # Imports a module to allow authentication using a service account from google.oauth2 import service_account # Gets credentials from the environment. credentials = service_account.Credentials.from_service_account_file( os.environ["GOOGLE_APPLICATION_CREDENTIALS"] ) scoped_credentials = credentials.with_scopes( ["https://www.googleapis.com/auth/cloud-platform"] ) # Creates a requests Session object with the credentials. session = requests.AuthorizedSession(scoped_credentials) # URL to the Cloud Healthcare API endpoint and version base_url = "https://healthcare.googleapis.com/v1" # TODO(developer): Uncomment these lines and replace with your values. # project_id = 'my-project' # replace with your GCP project ID # location = 'us-central1' # replace with the parent dataset's location # dataset_id = 'my-dataset' # replace with the parent dataset's ID # fhir_store_id = 'my-fhir-store' # replace with the FHIR store ID # resource_type = 'Patient' # replace with the FHIR resource type url = "{}/projects/{}/locations/{}".format(base_url, project_id, location) resource_path = "{}/datasets/{}/fhirStores/{}/fhir/{}".format( url, dataset_id, fhir_store_id, resource_type ) response = session.get(resource_path) response.raise_for_status() resources = response.json() print( "Using GET request, found a total of {} {} resources:".format( resources["total"], resource_type ) ) print(json.dumps(resources, indent=2)) return resources
def get_resource( project_id, location, dataset_id, fhir_store_id, resource_type, resource_id, ): """Gets a FHIR resource. See https://github.com/GoogleCloudPlatform/python-docs-samples/tree/master/healthcare/api-client/v1/fhir before running the sample.""" # Imports Python's built-in "os" module import os # Imports the google.auth.transport.requests transport from google.auth.transport import requests # Imports a module to allow authentication using a service account from google.oauth2 import service_account # Gets credentials from the environment. credentials = service_account.Credentials.from_service_account_file( os.environ["GOOGLE_APPLICATION_CREDENTIALS"] ) scoped_credentials = credentials.with_scopes( ["https://www.googleapis.com/auth/cloud-platform"] ) # Creates a requests Session object with the credentials. session = requests.AuthorizedSession(scoped_credentials) # URL to the Cloud Healthcare API endpoint and version base_url = "https://healthcare.googleapis.com/v1" # TODO(developer): Uncomment these lines and replace with your values. # project_id = 'my-project' # replace with your GCP project ID # location = 'us-central1' # replace with the parent dataset's location # dataset_id = 'my-dataset' # replace with the parent dataset's ID # fhir_store_id = 'my-fhir-store' # replace with the FHIR store ID # resource_type = 'Patient' # replace with the FHIR resource type # resource_id = 'b682d-0e-4843-a4a9-78c9ac64' # replace with the FHIR resource's ID url = "{}/projects/{}/locations/{}".format(base_url, project_id, location) resource_path = "{}/datasets/{}/fhirStores/{}/fhir/{}/{}".format( url, dataset_id, fhir_store_id, resource_type, resource_id ) # Sets required application/fhir+json header on the request headers = {"Content-Type": "application/fhir+json;charset=utf-8"} response = session.get(resource_path, headers=headers) response.raise_for_status() resource = response.json() print("Got {} resource:".format(resource["resourceType"])) print(json.dumps(resource, indent=2)) return resource
def get_session(service_account_json): credentials=service_account.Credentials.from_service_account_file(service_account_json) scoped_credentials= credentials.with_scopes(['https://www.googleapis.com/auth/cloud-platform']) session = requests.AuthorizedSession(scoped_credentials) return session
def delete_resource( project_id, location, dataset_id, fhir_store_id, resource_type, resource_id, ): """ Deletes a FHIR resource. Regardless of whether the operation succeeds or fails, the server returns a 200 OK HTTP status code. To check that the resource was successfully deleted, search for or get the resource and see if it exists. See https://github.com/GoogleCloudPlatform/python-docs-samples/tree/master/healthcare/api-client/v1/fhir before running the sample.""" # Imports Python's built-in "os" module import os # Imports the google.auth.transport.requests transport from google.auth.transport import requests # Imports a module to allow authentication using a service account from google.oauth2 import service_account # Gets credentials from the environment. credentials = service_account.Credentials.from_service_account_file( os.environ["GOOGLE_APPLICATION_CREDENTIALS"] ) scoped_credentials = credentials.with_scopes( ["https://www.googleapis.com/auth/cloud-platform"] ) # Creates a requests Session object with the credentials. session = requests.AuthorizedSession(scoped_credentials) # URL to the Cloud Healthcare API endpoint and version base_url = "https://healthcare.googleapis.com/v1" # TODO(developer): Uncomment these lines and replace with your values. # project_id = 'my-project' # replace with your GCP project ID # location = 'us-central1' # replace with the parent dataset's location # dataset_id = 'my-dataset' # replace with the parent dataset's ID # fhir_store_id = 'my-fhir-store' # replace with the FHIR store ID # resource_type = 'Patient' # replace with the FHIR resource type # resource_id = 'b682d-0e-4843-a4a9-78c9ac64' # replace with the FHIR resource's ID url = "{}/projects/{}/locations/{}".format(base_url, project_id, location) resource_path = "{}/datasets/{}/fhirStores/{}/fhir/{}/{}".format( url, dataset_id, fhir_store_id, resource_type, resource_id ) response = session.delete(resource_path) print("Deleted {} resource with ID {}.".format(resource_type, resource_id)) return response
def __init__(self, token_endpoint: str, service_account_json: str): super().__init__() credentials = service_account.Credentials.from_service_account_file( service_account_json).with_scopes(['email']) oauth_session = google_requests.AuthorizedSession(credentials) self._oauth_token_endpoint = token_endpoint self._oauth_session = oauth_session
def _get_authorized_session(self) -> requests.AuthorizedSession: """Get an authorized HTTP session for calling AutoML API. Returns: An AuthorizedSession object. """ # Get credentials from airflow environment using the base hook credentials = self._get_credentials() return requests.AuthorizedSession(credentials=credentials)
def create_patient(project_id, location, dataset_id, fhir_store_id): """Creates a new Patient resource in a FHIR store. See https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/healthcare/api-client/v1/fhir before running the sample.""" # Imports Python's built-in "os" module import os # Imports the google.auth.transport.requests transport from google.auth.transport import requests # Imports a module to allow authentication using a service account from google.oauth2 import service_account # Gets credentials from the environment. credentials = service_account.Credentials.from_service_account_file( os.environ["GOOGLE_APPLICATION_CREDENTIALS"]) scoped_credentials = credentials.with_scopes( ["https://www.googleapis.com/auth/cloud-platform"]) # Creates a requests Session object with the credentials. session = requests.AuthorizedSession(scoped_credentials) # URL to the Cloud Healthcare API endpoint and version base_url = "https://healthcare.googleapis.com/v1" # TODO(developer): Uncomment these lines and replace with your values. # project_id = 'my-project' # replace with your GCP project ID # location = 'us-central1' # replace with the parent dataset's location # dataset_id = 'my-dataset' # replace with the parent dataset's ID # fhir_store_id = 'my-fhir-store' # replace with the FHIR store ID url = "{}/projects/{}/locations/{}".format(base_url, project_id, location) fhir_store_path = "{}/datasets/{}/fhirStores/{}/fhir/Patient".format( url, dataset_id, fhir_store_id) # Sets required application/fhir+json header on the request headers = {"Content-Type": "application/fhir+json;charset=utf-8"} body = { "name": [{ "use": "official", "family": "Smith", "given": ["Darcy"] }], "gender": "female", "birthDate": "1970-01-01", "resourceType": "Patient", } response = session.post(fhir_store_path, headers=headers, json=body) response.raise_for_status() resource = response.json() print("Created Patient resource with ID {}".format(resource["id"])) return response
def get_session(): """Creates an authorized Requests Session.""" credentials = service_account.Credentials.from_service_account_file( filename=os.environ["GOOGLE_APPLICATION_CREDENTIALS"], scopes=["https://www.googleapis.com/auth/cloud-platform"], ) # Create a requests Session object with the credentials. session = requests.AuthorizedSession(credentials) return session
def get_session(self, credential): if self.session: return self.session # if the credential is not provided, use the default credential. if not credential: credential, _ = default() new_seesion = requests.AuthorizedSession(credential) self.session = new_seesion return new_seesion
def init_session( credentials: service_account.Credentials) -> requests.AuthorizedSession: """Initializes an authorized HTTP session, based on the given credentials. Args: credentials: OAuth 2.0 credentials object. Returns: HTTP session object to send authorized requests and receive responses. """ return requests.AuthorizedSession(credentials)
def __init__(self, config: types.Config, override_project: Optional[str] = None): self._project = override_project or config.project self._config = config if config.user_info.get('type') == 'service_account': creds = service_account.Credentials.from_service_account_info( config.user_info, scopes=constants.SCOPES) else: creds = google_credentials.Credentials.from_authorized_user_info( config.user_info, scopes=constants.SCOPES) self._session = google_auth_requests.AuthorizedSession(creds) self._storage = storage_client.Client(project=self._project, _http=self._session)
def __init__(self, project, user): self._project = project self._user = user self._session = None self._private_key = None self._public_key = None service_key = project.credentials["service-acc-key"] scopes = project.credentials["access-scopes"] credentials = service_account.Credentials.from_service_account_file( service_key, scopes=scopes) self._session = requests.AuthorizedSession(credentials=credentials)
def get_gcs_file_http(file_name): # Get gcs file from REST Api file_name = file_name.replace('/', '%2F') api_endpoint = "{}/b/{}/o/{}?alt=media".format(GCS_BASE_URL, BUCKET_NAME, file_name) credential, _ = default() session = requests.AuthorizedSession(credential) response = session.get(api_endpoint) response.raise_for_status() return response.json()
def dicomweb_retrieve_study(project_id, location, dataset_id, dicom_store_id, study_uid): """Handles the GET requests specified in the DICOMweb standard. See https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/healthcare/api-client/v1/dicom before running the sample.""" # Imports Python's built-in "os" module import os # Imports the google.auth.transport.requests transport from google.auth.transport import requests # Imports a module to allow authentication using a service account from google.oauth2 import service_account # Gets credentials from the environment. credentials = service_account.Credentials.from_service_account_file( os.environ["GOOGLE_APPLICATION_CREDENTIALS"]) scoped_credentials = credentials.with_scopes( ["https://www.googleapis.com/auth/cloud-platform"]) # Creates a requests Session object with the credentials. session = requests.AuthorizedSession(scoped_credentials) # URL to the Cloud Healthcare API endpoint and version base_url = "https://healthcare.googleapis.com/v1" # TODO(developer): Uncomment these lines and replace with your values. # project_id = 'my-project' # replace with your GCP project ID # location = 'us-central1' # replace with the parent dataset's location # dataset_id = 'my-dataset' # replace with the parent dataset's ID # dicom_store_id = 'my-dicom-store' # replace with the DICOM store ID # study_uid = '1.3.6.1.4.1.5062.55.1.227' # replace with the study UID url = "{}/projects/{}/locations/{}".format(base_url, project_id, location) dicomweb_path = "{}/datasets/{}/dicomStores/{}/dicomWeb/studies/{}".format( url, dataset_id, dicom_store_id, study_uid) # When specifying the output file, use an extension like ".multipart." # Then, parse the downloaded multipart file to get each individual # DICOM file. file_name = "study.multipart" response = session.get(dicomweb_path) response.raise_for_status() with open(file_name, "wb") as f: f.write(response.content) print("Retrieved study and saved to {} in current directory".format( file_name)) return response
def main(args): file_location = Path(args.file_location) file_name = file_location.name local_file = file_location client = storage.Client() blob_folder = "word2vec_service/v2" bucket_name = "hutoma-datasets" bucket = client.get_bucket(bucket_name) blob_path = "{}/{}".format(blob_folder, file_name) blob = bucket.blob(blob_path) bytes_in_1MB = 1024 * 1024 print("Operation {}: blob is {}, local file is {}".format( args.operation, blob_path, local_file)) transport = g_requests.AuthorizedSession(credentials=client._credentials) if args.operation == "download": if not blob.exists(): raise DataError("Blob {} doesn't exist".format(blob_path)) if local_file.exists(): confirm_prompt("File {} exists, overwrite?".format(local_file)) url = ("https://www.googleapis.com/download/storage/v1/b/" "{bucket}/o/{blob_name}?alt=media").format( bucket=bucket_name, blob_name=urllib.parse.quote_plus(blob_path)) chunk_size = bytes_in_1MB * 5 # 5MB with local_file.open("wb") as file_stream: download = ChunkedDownload(url, chunk_size, file_stream) download.finished response = download.consume_next_chunk(transport) if not download.finished: process_operation(transport, download) elif args.operation == "upload": if not local_file.exists(): raise DataError("File {} doesn't exist".format(blob_path)) if blob.exists(): confirm_prompt("Blob {} exists, overwrite?".format(local_file)) url = ("https://www.googleapis.com/upload/storage/v1/b/{bucket}" + "/o?uploadType=resumable").format(bucket=bucket_name) chunk_size = bytes_in_1MB # 1MB upload = ResumableUpload(url, chunk_size) metadata = {"name": blob_path} content_type = "application/octet-stream" with local_file.open("rb") as file_stream: response = upload.initiate(transport, file_stream, metadata, content_type) if response.status_code != 200: raise DataError("Failed to initiate upload") process_operation(transport, upload)
def __init__(self, project): self._project = project self._session = None self._created_status = False self._deleted_status = False self._data = None self._exceptions = [] service_key = project.credentials["service-acc-key"] scopes = project.credentials["access-scopes"] credentials = service_account.Credentials.from_service_account_file( service_key, scopes=scopes) self._session = requests.AuthorizedSession(credentials=credentials)
def make_adc_session() -> requests.AuthorizedSession: """Create a scoped, authorized requests session using ADC Returns: requests.AuthorizedSession: The authorized requests session Raises: DefaultCredentialsError if no credentials found """ credentials, _ = google.auth.default( scopes=["https://www.googleapis.com/auth/pubsub"]) session = requests.AuthorizedSession(credentials) return session
def __init__(self, request: WSGIRequest = None, bucket: str = None) -> None: super(GCPStreamingFileUploadHandler, self).__init__(request) self.upload_url: str = self.upload_url.format(bucket=bucket) self.transport = tr_requests.AuthorizedSession( credentials=Credentials.from_service_account_file( settings.GCP_STORAGE_KEY, scopes=[ "https://www.googleapis.com/auth/devstorage.read_write" ])) self.data = AutoTruncatingChunkedStreamable(self.chunk_size) self.file = None # type: Optional[ResumableUpload]
def delete_dicom_store(project_id, dataset_id, region, dicom_store_id): # Delete an existing DICOM store credential, _ = default() session = requests.AuthorizedSession(credential) api_endpoint = "{}/projects/{}/locations/{}".format( HEALTHCARE_BASE_URL, project_id, region) # base of dicomweb path. dicomweb_path = "{}/datasets/{}/dicomStores/{}".format( api_endpoint, dataset_id, dicom_store_id) response = session.delete(dicomweb_path) response.raise_for_status() return response.status_code
def get_auth_session(sa_name, service_scopes): """Creates AuthorizedSession for given service account. Args: sa_name: Service account name. service_scopes: A list of cloud account service scopes. Returns: AuthorizedSession. """ service_account_file = '{}.json'.format(sa_name) credentials = service_account.Credentials.from_service_account_file( service_account_file, scopes=service_scopes) return google_auth_requests.AuthorizedSession(credentials)