def test_get_secret_validates_label(self): env = EnvironmentVarGuard() env.set(_KAGGLE_USER_SECRETS_TOKEN_ENV_VAR_NAME, _TEST_JWT) with env: client = UserSecretsClient() with self.assertRaises(ValidationError): secret_response = client.get_secret("")
def refresh(self, request): print("Calling Kaggle.UserSecrets to refresh token.") try: client = UserSecretsClient() fresh_token = client.get_bigquery_access_token() except Exception as e: raise RefreshError('Unable to refresh access token.') from e
def refresh(self, request): try: client = UserSecretsClient() if self.target == GcpTarget.BIGQUERY: self.token, self.expiry = client.get_bigquery_access_token() elif self.target == GcpTarget.GCS: self.token, self.expiry = client._get_gcs_access_token() elif self.target == GcpTarget.CLOUDAI: self.token, self.expiry = client._get_cloudai_access_token() except ConnectionError as e: Log.error(f"Connection error trying to refresh access token: {e}") print( "There was a connection error trying to fetch the access token. " f"Please ensure internet is on in order to use the {self.target.service} Integration." ) raise RefreshError( 'Unable to refresh access token due to connection error.' ) from e except Exception as e: Log.error(f"Error trying to refresh access token: {e}") if (not get_integrations().has_integration(self.target)): Log.error(f"No {self.target.service} integration found.") print( f"Please ensure you have selected a {self.target.service} account in the Notebook Add-ons menu." ) raise RefreshError('Unable to refresh access token.') from e
def test_set_tensorflow_credential(self, mock_configure_gcs): credential = '{"client_id":"fake_client_id",' \ '"client_secret":"fake_client_secret",' \ '"refresh_token":"not a refresh token",' \ '"type":"authorized_user"}' env = EnvironmentVarGuard() env.set('HOME', '/tmp') env.set('GOOGLE_APPLICATION_CREDENTIALS', '') # These need to be set to make UserSecretsClient happy, but aren't # pertinent to this test. env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar') env.set('KAGGLE_KERNEL_INTEGRATIONS', 'CLOUDAI') user_secrets = UserSecretsClient() user_secrets.set_tensorflow_credential(credential) credential_path = '/tmp/gcloud_credential.json' self.assertEqual(credential_path, os.environ['GOOGLE_APPLICATION_CREDENTIALS']) with open(credential_path, 'r') as f: saved_cred = f.read() self.assertEqual(credential, saved_cred) mock_configure_gcs.assert_called_with(credentials=credential)
def refresh(self, request): try: client = UserSecretsClient() self.token, self.expiry = client.get_bigquery_access_token() except Exception as e: if (not get_integrations().has_bigquery()): print( 'Please ensure you have selected a BigQuery account in the Kernels Settings sidebar.') raise RefreshError('Unable to refresh access token.') from e
def refresh(self, request): try: client = UserSecretsClient() self.token, self.expiry = client.get_bigquery_access_token() except Exception as e: if (not get_integrations().has_bigquery()): print( 'Please ensure you have selected a BigQuery account in the Kernels Settings sidebar.' ) raise RefreshError('Unable to refresh access token.') from e
def get_gcs_path_of_kaggle_data(data_name, is_private=False): if is_private: # Step 1: Get the credential from the Cloud SDK from kaggle_secrets import UserSecretsClient user_secrets = UserSecretsClient() user_credential = user_secrets.get_gcloud_credential() # Step 2: Set the credentials user_secrets.set_tensorflow_credential(user_credential) # Step 3: Use a familiar call to get the GCS path of the dataset from kaggle_datasets import KaggleDatasets GCS_DS_PATH = KaggleDatasets().get_gcs_path(data_name) return GCS_DS_PATH
def refresh(self, request): try: client = UserSecretsClient() self.token, self.expiry = client.get_bigquery_access_token() except ConnectionError as e: Log.error(f"Connection error trying to refresh access token: {e}") print("There was a connection error trying to fetch the access token. " "Please ensure internet is on in order to use the BigQuery Integration.") raise RefreshError('Unable to refresh access token due to connection error.') from e except Exception as e: Log.error(f"Error trying to refresh access token: {e}") if (not get_integrations().has_bigquery()): Log.error(f"No bigquery integration found.") print( 'Please ensure you have selected a BigQuery account in the Kernels Settings sidebar.') raise RefreshError('Unable to refresh access token.') from e
def kaggle_settings(): """kaggle_settings NOTE: This function is only usable in the kaggle notebook """ from kaggle_secrets import UserSecretsClient user_secrets = UserSecretsClient() secret_dict = {} secret_dict['NEPTUNE_API_TOKEN'] = user_secrets.get_secret( 'NEPTUNE_API_TOKEN') secret_dict['GITHUB_PAT'] = user_secrets.get_secret('GITHUB_PAT') # For gcsfs UserSecretsClient().set_gcloud_credentials() for key, value in secret_dict.items(): os.environ[key] = value
def test_fn(): client = UserSecretsClient() client.set_gcloud_credentials(project=project, account=account) self.assertEqual(project, os.environ['GOOGLE_CLOUD_PROJECT']) self.assertEqual(project, get_gcloud_config_value('project')) self.assertEqual(account, os.environ['GOOGLE_ACCOUNT']) self.assertEqual(account, get_gcloud_config_value('account')) expected_creds_file = '/tmp/gcloud_credential.json' self.assertEqual(expected_creds_file, os.environ['GOOGLE_APPLICATION_CREDENTIALS']) self.assertEqual( expected_creds_file, get_gcloud_config_value('auth/credential_file_override')) with open(expected_creds_file, 'r') as f: self.assertEqual(secret, '\n'.join(f.readlines()))
def refresh(self, request): try: client = UserSecretsClient() if self.target == GcpTarget.BIGQUERY: self.token, self.expiry = client.get_bigquery_access_token() elif self.target == GcpTarget.GCS: self.token, self.expiry = client._get_gcs_access_token() elif self.target == GcpTarget.AUTOML: self.token, self.expiry = client._get_automl_access_token() elif self.target == GcpTarget.TRANSLATION: self.token, self.expiry = client._get_translation_access_token( ) elif self.target == GcpTarget.NATURAL_LANGUAGE: self.token, self.expiry = client._get_natural_language_access_token( ) elif self.target == GcpTarget.VIDEO_INTELLIGENCE: self.token, self.expiry = client._get_video_intelligence_access_token( ) elif self.target == GcpTarget.VISION: self.token, self.expiry = client._get_vision_access_token() except ConnectionError as e: Log.error(f"Connection error trying to refresh access token: {e}") print( "There was a connection error trying to fetch the access token. " f"Please ensure internet is on in order to use the {self.target.service} Integration." ) raise RefreshError( 'Unable to refresh access token due to connection error.' ) from e except Exception as e: Log.error(f"Error trying to refresh access token: {e}") if (not get_integrations().has_integration(self.target)): Log.error(f"No {self.target.service} integration found.") print( f"Please ensure you have selected a {self.target.service} account in the Notebook Add-ons menu." ) raise RefreshError('Unable to refresh access token.') from e
def call_get_gcs_access_token(): client = UserSecretsClient() secret_response = client._get_gcs_access_token() self.assertEqual(secret_response, (secret, now + timedelta(seconds=3600)))
def call_get_access_token(): client = UserSecretsClient() with self.assertRaises(BackendError): client.get_bigquery_access_token()
def call_get_secret(): client = UserSecretsClient() with self.assertRaises(NotFoundError): secret_response = client.get_gcloud_credential()
isLocalhost = True else: # print("We are running in Kaggle") isLocalhost = False if isLocalhost: # INPUT_FILES = '../output/cropped-cats-and-dogs/*.jpg' INPUT_FILES = "../output/cropped-cats-and-dogs/*.jpg" else: INPUT_FILES = "/kaggle/input/cropped-cats-and-dogs/*.jpg" from kaggle_secrets import UserSecretsClient user_secrets = UserSecretsClient() USER_ID = user_secrets.get_secret("user-id") API_TOKEN = user_secrets.get_secret("api-token") OUTPUT_DATASET_ID = "augmented-cats-and-dogs" OUTPUT_DATASET_NAME = "Augmented Cats and Dogs" OUTPUT_PATH = "./output/augmented-cats-and-dogs-multiprocessing" NUM_AUGMENTATIONS = 10 # final image size # Same size is used in Crop Cats and Dogs X_SIZE = 224 Y_SIZE = 224
def refresh(self, request): try: client = UserSecretsClient() self.token, self.expiry = client.get_bigquery_access_token() except Exception as e: raise RefreshError('Unable to refresh access token.') from e
def call_get_secret(): client = UserSecretsClient() secret_response = client.get_gcloud_credential() self.assertEqual(secret_response, secret)
def call_get_secret(): client = UserSecretsClient() secret_response = client.get_secret("secret_label") self.assertEqual(secret_response, secret)
def call_get_secret(): client = UserSecretsClient() with self.assertRaises(BackendError): secret_response = client.get_secret("secret_label")
credentials and identity. It is inside the `if not tfc.remote()` block to ensure that it is only run in the notebook, and will not be run when the notebook code is sent to Google Cloud. Note: For Kaggle Notebooks click on "Add-ons"->"Google Cloud SDK" before running the cell below. """ # Using tfc.remote() to ensure this code only runs in notebook if not tfc.remote(): # Authentication for Kaggle Notebooks if "kaggle_secrets" in sys.modules: from kaggle_secrets import UserSecretsClient UserSecretsClient().set_gcloud_credentials(project=GCP_PROJECT_ID) # Authentication for Colab Notebooks if "google.colab" in sys.modules: from google.colab import auth auth.authenticate_user() os.environ["GOOGLE_CLOUD_PROJECT"] = GCP_PROJECT_ID """ ## Model and data setup From here we are following the basic procedure for setting up a simple Keras model to run classification on the MNIST dataset. ### Load and split data
def test_no_token_fails(self): env = EnvironmentVarGuard() env.unset(_KAGGLE_USER_SECRETS_TOKEN_ENV_VAR_NAME) with env: with self.assertRaises(CredentialError): client = UserSecretsClient()
def _kaggle_gcp_authority(): user_secrets = UserSecretsClient() user_credential = user_secrets.get_gcloud_credential() user_secrets.set_tensorflow_credential(user_credential)
def call_get_access_token(): client = UserSecretsClient() secret_response = client.get_bigquery_access_token() self.assertEqual(secret_response, (secret, now + timedelta(seconds=3600)))
def call_get_access_token(): client = UserSecretsClient() secret_response = client.get_bigquery_access_token() self.assertEqual(secret_response, secret)
from kaggle_secrets import UserSecretsClient YANDEX_API_KEY = UserSecretsClient().get_secret("YANDEX_API_KEY") import requests def translate(x, key, src='ru', dest='en'): original = x.unique() url = 'https://translate.yandex.net/api/v1.5/tr.json/translate' params = dict( key=key, lang=src+'-'+dest ) payload = {'text': original} response = requests.post(url=url, params=params, data=payload) translated_text = response.json()['text'] dictionary = dict(zip(original, translated_text)) return([dictionary.get(item, item) for item in x]) import pandas as pd categories = pd.read_csv('/kaggle/input/competitive-data-science-predict-future-sales/item_categories.csv') categories['item_category_name_en'] = translate(categories['item_category_name'], YANDEX_API_KEY) categories.sample(10)