def set_kaggle_user_agent(client_info: ClientInfo): # Add kaggle client user agent in order to attribute usage. if client_info is None: client_info = ClientInfo(user_agent=KAGGLE_GCP_CLIENT_USER_AGENT) else: client_info.user_agent = KAGGLE_GCP_CLIENT_USER_AGENT return client_info
def main(): """Main function""" module = GcpModule(argument_spec=dict( action=dict(type="str", choices=["download", "upload", "delete"]), src=dict(type="path"), dest=dict(type="path"), bucket=dict(type="str"), )) if not HAS_GOOGLE_STORAGE_LIBRARY: module.fail_json( msg="Please install the google-cloud-storage Python library") if not module.params["scopes"]: module.params["scopes"] = [ "https://www.googleapis.com/auth/devstorage.full_control" ] creds = GcpSession(module, "storage")._credentials() client = storage.Client( project=module.params['project'], credentials=creds, client_info=ClientInfo(user_agent="Google-Ansible-MM-object")) bucket = client.get_bucket(module.params['bucket']) remote_file_exists = Blob(remote_file_path(module), bucket).exists() local_file_exists = os.path.isfile(local_file_path(module)) # Check if files exist. results = {} if module.params["action"] == "delete" and not remote_file_exists: module.fail_json(msg="File does not exist in bucket") if module.params["action"] == "download" and not remote_file_exists: module.fail_json(msg="File does not exist in bucket") if module.params["action"] == "upload" and not local_file_exists: module.fail_json(msg="File does not exist on disk") if module.params["action"] == "delete": if remote_file_exists: results = delete_file(module, client, module.params["src"]) results["changed"] = True module.params["changed"] = True elif module.params["action"] == "download": results = download_file(module, client, module.params["src"], module.params["dest"]) results["changed"] = True # Upload else: results = upload_file(module, client, module.params["src"], module.params["dest"]) results["changed"] = True module.exit_json(**results)
def _create_client_info(application_name): user_agent = [] if application_name: user_agent.append(application_name) user_agent.append(_USER_AGENT_DEFAULT_TEMPLATE.format(ibis.__version__)) return ClientInfo(user_agent=" ".join(user_agent))
def __init__(self, client, client_info=None): self._client = client if client_info is None: client_info = ClientInfo() self._client_info = client_info self._extra_headers = {}
def __init__(self, application, request, **kwargs): super().__init__(application, request, **kwargs) if GetQueryDetailsHandler.bigquery_client is None: GetQueryDetailsHandler.bigquery_client = bigquery.Client( client_info=ClientInfo( user_agent='jupyterlab_gcpextension/jupyterlab_bigquery-{}'. format(VERSION)))
def __init__( self, client=bigquery.Client(client_info=ClientInfo( user_agent='jupyterlab_gcpextension/jupyterlab_bigquery-{}'.format( VERSION))), datacatalog_client=DataCatalogClient(client_info=DataCatalogClientInfo( user_agent='jupyterlab_gcpextension/jupyterlab_bigquery-{}'.format( VERSION)))): self._client = client self._datacatalog_client = datacatalog_client
def __init__(self, application, request, **kwargs): super().__init__(application, request, **kwargs) self.pool = Pool(NUM_THREADS) if PagedQueryHandler.client is None: PagedQueryHandler.client = bigquery.Client(client_info=ClientInfo( user_agent='jupyterlab_gcpextension/jupyterlab_bigquery-{}'. format(VERSION))) PagedQueryHandler.orig_project = PagedQueryHandler.client.project
def client(self) -> storage.Client: """Returns GCS Client.""" credentials, project_id = get_credentials_and_project_id( key_path=self.gcp_key_path, keyfile_dict=self.gcp_keyfile_dict, scopes=self.scopes, disable_logging=True) return storage.Client( credentials=credentials, client_info=ClientInfo(client_library_version='airflow_v' + version.version), project=self.project_id if self.project_id else project_id)
def client_info(self) -> ClientInfo: """ Return client information used to generate a user-agent for API calls. It allows for better errors tracking. This object is only used by the google-cloud-* libraries that are built specifically for the Google Cloud Platform. It is not supported by The Google APIs Python Client that use Discovery based APIs. """ client_info = ClientInfo(client_library_version='airflow_v' + version.version) return client_info
def test_ctor_w_client_info(self): from google.api_core.client_info import ClientInfo from google.cloud.dns._http import Connection client_info = ClientInfo() creds = _make_credentials() http = object() client = self._make_one(project=self.PROJECT, credentials=creds, _http=http, client_info=client_info) self.assertIsInstance(client._connection, Connection) self.assertIs(client._connection.credentials, creds) self.assertIs(client._connection.http, http) self.assertIs(client._connection._client_info, client_info)
def get_connection(self, new: bool = False, project_id: str = None) -> bigquery.client.Client: """Creates return new Singleton database connection""" if project_id or self._client is None or new: job_config = bigquery.QueryJobConfig(use_legacy_sql=False) client_project_id = (project_id if project_id else self._gcp_credentials.project_id) self._client = bigquery.Client( default_query_job_config=job_config, credentials=self._gcp_credentials.credentials, project=client_project_id, client_info=ClientInfo(user_agent=USER_AGENT_TAG), ) return self._client else: return self._client
def create_storage_client(): return storage.Client(client_info=ClientInfo( user_agent='jupyterlab_gcsfilebrowser/{}'.format(VERSION)))
def create_bigquery_client(): return bigquery.Client(client_info=ClientInfo( user_agent='jupyterlab_gcpextension/jupyterlab_bigquery-{}'.format( VERSION)))