def test_refresh_token(self, get_info, get_token): http_mock = object() credentials = gce.AppAssertionCredentials() credentials.invalid = False credentials.service_account_email = '*****@*****.**' self.assertIsNone(credentials.access_token) credentials.get_access_token(http=http_mock) self.assertEqual(credentials.access_token, 'A') self.assertTrue(credentials.access_token_expired) get_token.assert_called_with(http_mock, service_account='*****@*****.**') credentials.get_access_token(http=http_mock) self.assertEqual(credentials.access_token, 'B') self.assertFalse(credentials.access_token_expired) get_token.assert_called_with(http_mock, service_account='*****@*****.**') get_info.assert_not_called()
def AcquireFromGCE(account=None): """Get credentials from a GCE metadata server. Args: account: str, The account name to use. If none, the default is used. Returns: client.Credentials, Credentials taken from the metadata server. Raises: c_gce.CannotConnectToMetadataServerException: If the metadata server cannot be reached. TokenRefreshError: If the credentials fail to refresh. TokenRefreshReauthError: If the credentials fail to refresh due to reauth. """ credentials = oauth2client_gce.AppAssertionCredentials(email=account) Refresh(credentials) return credentials
def test_refresh_token_failed_fetch(self): headers = { 'status': http_client.NOT_FOUND, 'content-type': 'application/json', } response = json.dumps({'access_token': 'a', 'expires_in': 100}) http = http_mock.HttpMock(headers=headers, data=response) credentials = gce.AppAssertionCredentials() credentials.invalid = False credentials.service_account_email = '*****@*****.**' with self.assertRaises(client.HttpAccessTokenRefreshError): credentials._refresh(http) # Verify mock. self.assertEqual(http.requests, 1) expected_uri = _metadata.METADATA_ROOT + METADATA_PATH self.assertEqual(http.uri, expected_uri) self.assertEqual(http.method, 'GET') self.assertIsNone(http.body) self.assertEqual(http.headers, _metadata.METADATA_HEADERS)
def test_token_info(self): credentials = gce.AppAssertionCredentials([]) http = transport.get_http_object() # First refresh to get the access token. self.assertIsNone(credentials.access_token) credentials.refresh(http) self.assertIsNotNone(credentials.access_token) # Then check the access token against the token info API. query_params = {'access_token': credentials.access_token} token_uri = (oauth2client.GOOGLE_TOKEN_INFO_URI + '?' + urllib.parse.urlencode(query_params)) response, content = transport.request(http, token_uri) self.assertEqual(response.status, http_client.OK) content = content.decode('utf-8') payload = json.loads(content) self.assertEqual(payload['access_type'], 'offline') self.assertLessEqual(int(payload['expires_in']), 3600)
def init(args): # pragma: no cover """Initialize the scraper library. The discovery interface means that the contents of some libraries is determined at runtime. Also, applications need to be authorized to use the necessary services. This performs both library initialization as well as application authorization. """ rsync_url = 'rsync://{}:{}/{}'.format(args.rsync_host, args.rsync_port, args.rsync_module) # Set up logging logging.basicConfig( level=logging.INFO, format='[%(asctime)s %(levelname)s %(filename)s:%(lineno)d ' + rsync_url + '] %(message)s') logging.info('Scraping from %s, putting the results in %s', rsync_url, args.bucket) # Authorize this application to use Google APIs. creds = gce.AppAssertionCredentials() # Set up cloud datastore and its dependencies datastore_service = cloud_datastore.Client( namespace=args.datastore_namespace) status = SyncStatus(datastore_service, rsync_url) logging.getLogger().addHandler(SyncStatusLogHandler(status)) # Set up cloud storage storage_service = apiclient.discovery.build('storage', 'v1', credentials=creds) # If the destination directory does not exist, make it exist. destination = os.path.join(args.data_dir, args.rsync_host, args.rsync_module) if not os.path.isdir(destination): os.makedirs(destination) return (rsync_url, status, destination, storage_service)
def validate_input(self): input_scheme = { "type": "object", "properties": { "deployment_name": { "type": "string", "minLength": 1 }, "gcp_access_credentials": { "type": "object" }, "required": ['deployment_name', 'gcp_access_credentials'], "additionalProperties": True } } validator = JsonSchemeValidator(self.input, input_scheme) validator.validate() self.deployment_name = self.input['deployment_name'] self._print_step_title('Connecting to GCP..') current_credential_file = os.path.join(self.input['opereto_workspace'], 'client_secret.json') with open(current_credential_file, 'w') as cf: cf.write(json.dumps(self.input['gcp_access_credentials'], indent=4)) os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = current_credential_file credentials = gce.AppAssertionCredentials( scope='https://www.googleapis.com/auth/cloud-platform') self.gcp_http_handler = credentials.authorize(httplib2.Http()) self.gcp_deploy_manager = build('deploymentmanager', 'v2') print 'Connected.'
def test_to_json(self): credentials = gce.AppAssertionCredentials() with self.assertRaises(NotImplementedError): credentials.to_json()
def _get_bigquery_service(self): credentials = gce.AppAssertionCredentials(scope='https://www.googleapis.com/auth/bigquery') http = httplib2.Http() http = credentials.authorize(http) return build("bigquery", "v2", http=http)
# Authorize server-to-server interactions from Google Compute Engine. import httplib2 from oauth2client.contrib import gce credentials = gce.AppAssertionCredentials( scope='https://www.googleapis.com/auth/devstorage.read_write') http = credentials.authorize(httplib2.Http())
def testIsGceAccountCredentials(self): oauth2client_cred = oauth2client_gce.AppAssertionCredentials() google_auth_cred = google_auth_gce_creds.Credentials() self.assertTrue(auth_util.IsGceAccountCredentials(oauth2client_cred)) self.assertTrue(auth_util.IsGceAccountCredentials(google_auth_cred))
def test_serialization_data(self): credentials = gce.AppAssertionCredentials() with self.assertRaises(NotImplementedError): getattr(credentials, 'serialization_data')
def validate_input(self): input_scheme = { "type": "object", "properties" : { "deployment_name": { "type" : "string", "minLength": 1 }, "gcp_project_id": { "type": "string", "minLength": 1 }, "deployment_template": { "type" : ["string", "null"] }, "deployment_import_templates": { "type": ["string", "null"] }, "deployment_parameters": { "type": ["object", "null"] }, "opereto_core_tools": { "type" : "boolean" }, "opereto_container_tools": { "type" : "boolean" }, "disable_rollback": { "type": "boolean" }, "gcp_access_credentials": { "type": "object" }, "agent_package_url": { "type": "object", "properties": { "windows": { "type": "string", "minLength": 1 }, "linux": { "type": "string", "minLength": 1 } }, "required": ['windows', 'linux'], "additionalProperties": True }, "required": ['gcp_project_id', 'deployment_template', 'gcp_access_credentials'], "additionalProperties": True } } validator = JsonSchemeValidator(self.input, input_scheme) validator.validate() self.install_core_tools = self.input['install_core_tools'] self.install_container_tools = self.input['install_container_tools'] self.agent_valid_os = ['linux', 'windows'] self.deployment_name = self.input['deployment_name'] if self.install_container_tools and not self.install_core_tools: raise Exception('Opereto container tools is dependant on opereto core tools. Please check the "install_core_tools" checkbox too.') source_user = self.client.input['opereto_originator_username'] agent_user = self.client.input['opereto_user'] self.users = [source_user, agent_user] self.owners = [source_user, agent_user] def linux_user_data(agent_name): agent_install_command = './install.sh -h {} -t {} -n {}'.format(self.input['opereto_host'], self.token, agent_name) if get_opereto_major_release(self.client)<3: agent_install_command = './install.sh -b {} -u {} -p {} -n {}'.format(self.input['opereto_host'], agent_user, self.input['opereto_password'], agent_name) data = """ items: - key: startup-script value: | #! /bin/bash curl -O {} tar -zxvf opereto-agent-latest.tar.gz cd opereto-agent-latest sudo chmod 777 -R * {}""".format(self.input['agent_package_url']['linux'], agent_install_command) return data def windows_user_data(agent_name): data = """ items: - key: startup-script value: | <powershell> Add-Type -AssemblyName System.IO.Compression.FileSystem function Unzip {{ param([string]$zipfile, [string]$outpath) [System.IO.Compression.ZipFile]::ExtractToDirectory($zipfile, $outpath) }} $MyDir = "c:" $filename = Join-Path -Path $MyDir -ChildPath "opereto-agent-latest.zip" $WebClient = New-Object System.Net.WebClient $WebClient.DownloadFile("{}", "$filename") Unzip "$MyDir\opereto-agent-latest.zip" "$MyDir\opereto" cd "$MyDir\opereto\opereto-agent-latest" ./opereto-install.bat {} {} {} javaw ./opereto-start.bat Remove-Item $filename </powershell> <persist>true</persist>""".format(self.input['agent_package_url']['windows'], self.input['opereto_host'], self.token, agent_name) return data def _add_agent_installation(json_template): if json_template.get('resources'): for resource_data in json_template['resources']: if resource_data["type"]=="compute.v1.instance": agent_os=None agent_name=None agent_display_name=None agent_description=None agent_id_found=False if resource_data['properties'].get("labels"): for key, value in resource_data['properties']["labels"].items(): if key=='opereto-agent-os': agent_os=value elif key=='opereto-agent-id': agent_id_found=True agent_name=value elif key=='opereto-agent-name': agent_display_name=value elif key=='opereto-agent-desc': agent_description=value json_template['resources'][json_template['resources'].index(resource_data)]['properties']["labels"]['opereto-agent-id'] = agent_name.lower() ## match gcp label value policy # del json_template['resources'][json_template['resources'].index(resource_data)]['properties']["labels"]['opereto-agent-name'] # del json_template['resources'][json_template['resources'].index(resource_data)]['properties']["labels"]['opereto-agent-desc'] if agent_os: if agent_os not in self.agent_valid_os: raise OperetoRuntimeError('OperetoAgentOs must be one of the following: {}'.format(str(self.agent_valid_os))) if not agent_name: agent_name = 'agent'+str(uuid.uuid4())[:10] else: try: JsonSchemeValidator(agent_name, default_variable_name_scheme).validate() except Exception as e: raise OperetoRuntimeError('Invalid agent identifier: {}'.format(str(e))) if agent_display_name: try: JsonSchemeValidator(agent_display_name, default_entity_name_scheme).validate() except Exception as e: raise OperetoRuntimeError('Invalid agent agent_install_commandname: {}'.format(str(e))) if agent_description: try: JsonSchemeValidator(agent_description, default_entity_description_scheme).validate() except Exception as e: raise OperetoRuntimeError('Invalid agent description: {}'.format(str(e))) if agent_os=='windows': agent_data = windows_user_data(agent_name) else: agent_data = linux_user_data(agent_name) self.agent_data_map[agent_name]=agent_data ## currently override user data, add fix to handle addition to existing user data ## ## json_template['resources'][json_template['resources'].index(resource_data)]['properties']["metadata"] = agent_name+'-meta-placeholder' self.agents[agent_name]={ 'agent_display_name': agent_display_name, 'agent_description': agent_description } template_in_yaml = yaml.dump(json_template) for agent_name, agent_data in self.agent_data_map.items(): new_template = template_in_yaml.replace(agent_name+'-meta-placeholder', agent_data) template_in_yaml = new_template return template_in_yaml self.deployment_template = _add_agent_installation(yaml.load(self.input['deployment_template'], Loader=yaml.SafeLoader)) self.deployment_import_templates = [] if self.input['deployment_import_templates']: for name, content in yaml.load(self.input['deployment_import_templates'], Loader=yaml.SafeLoader).items(): content = _add_agent_installation(content) entry = { "name": name, "content": """{} """.format(content) } self.deployment_import_templates.append(entry) self.deployment_exist = False self._print_step_title('Connecting to GCP..') current_credential_file = os.path.join(self.input['opereto_workspace'], 'client_secret.json') with open(current_credential_file, 'w') as cf: cf.write(json.dumps(self.input['gcp_access_credentials'], indent=4)) os.environ['GOOGLE_APPLICATION_CREDENTIALS']=current_credential_file credentials = gce.AppAssertionCredentials( scope='https://www.googleapis.com/auth/cloud-platform' ) self.gcp_http_handler = credentials.authorize(httplib2.Http()) self.gcp_compute_manager = build('compute', 'v1') self.gcp_deploy_manager = build('deploymentmanager', 'v2') print('Connected.')
def test_constructor(self): credentials = gce.AppAssertionCredentials() self.assertIsNone(credentials.assertion_type, None) self.assertIsNone(credentials.service_account_email) self.assertIsNone(credentials.scopes) self.assertTrue(credentials.invalid)
def test_sign_blob_not_implemented(self): credentials = gce.AppAssertionCredentials([]) with self.assertRaises(NotImplementedError): credentials.sign_blob(b'blob')
def test_create_scoped_required(self): credentials = gce.AppAssertionCredentials() self.assertFalse(credentials.create_scoped_required())
def credentials(self): return oauth2client_gce.AppAssertionCredentials()
def main(): credentials = gce.AppAssertionCredentials(scope='https://www.googleapis.com/auth/devstorage.read_write') http = credentials.authorize(httplib2.Http()) trainData = pd.read_csv("Headline_Testing.csv")
} self.deployment_import_templates.append(entry) self.deployment_exist = False self._print_step_title('Connecting to GCP..') current_credential_file = os.path.join(self.input['opereto_workspace'], 'client_secret.json') with open(current_credential_file, 'w') as cf: cf.write(json.dumps(self.input['gcp_access_credentials'], indent=4)) os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = current_credential_file credentials = gce.AppAssertionCredentials( scope='https://www.googleapis.com/auth/cloud-platform') self.gcp_http_handler = credentials.authorize(httplib2.Http()) self.gcp_compute_manager = build('compute', 'v1') self.gcp_deploy_manager = build('deploymentmanager', 'v2') print 'Connected.' def process(self): @retry(10, 60, 1) def verify_that_all_agents_connected(): for agent_name, attr in self.agents.items(): print 'Checking if agent %s is up and running' % agent_name try: self.client.get_agent_properties(agent_name) except: print 'Agent %s is not up yet. Recheck in one minute..' % agent_name