def _is_invalid_json(self, response): try: json.loads(response.text) return False except ValueError: self._log_imds_response(response, 'invalid json') return True
def _unpack_complex_cli_arg(argument_model, value, cli_name): type_name = argument_model.type_name if type_name == 'structure' or type_name == 'map': if value.lstrip()[0] == '{': try: return json.loads(value, object_pairs_hook=OrderedDict) except ValueError as e: raise ParamError( cli_name, "Invalid JSON: %s\nJSON received: %s" % (e, value)) raise ParamError(cli_name, "Invalid JSON:\n%s" % value) elif type_name == 'list': if isinstance(value, six.string_types): if value.lstrip()[0] == '[': return json.loads(value, object_pairs_hook=OrderedDict) elif isinstance(value, list) and len(value) == 1: single_value = value[0].strip() if single_value and single_value[0] == '[': return json.loads(value[0], object_pairs_hook=OrderedDict) try: # There's a couple of cases remaining here. # 1. It's possible that this is just a list of strings, i.e # --security-group-ids sg-1 sg-2 sg-3 => ['sg-1', 'sg-2', 'sg-3'] # 2. It's possible this is a list of json objects: # --filters '{"Name": ..}' '{"Name": ...}' member_shape_model = argument_model.member return [_unpack_cli_arg(member_shape_model, v, cli_name) for v in value] except (ValueError, TypeError) as e: # The list params don't have a name/cli_name attached to them # so they will have bad error messages. We're going to # attach the parent parameter to this error message to provide # a more helpful error message. raise ParamError(cli_name, value[0])
def unpack_complex_cli_arg(parameter, value): if parameter.type == 'structure' or parameter.type == 'map': if value.lstrip()[0] == '{': try: return json.loads(value, object_pairs_hook=OrderedDict) except ValueError as e: raise ParamError( parameter, "Invalid JSON: %s\nJSON received: %s" % (e, value)) raise ParamError(parameter, "Invalid JSON:\n%s" % value) elif parameter.type == 'list': if isinstance(value, six.string_types): if value.lstrip()[0] == '[': return json.loads(value, object_pairs_hook=OrderedDict) elif isinstance(value, list) and len(value) == 1: single_value = value[0].strip() if single_value and single_value[0] == '[': return json.loads(value[0], object_pairs_hook=OrderedDict) try: return [unpack_cli_arg(parameter.members, v) for v in value] except ParamError as e: # The list params don't have a name/cli_name attached to them # so they will have bad error messages. We're going to # attach the parent parmeter to this error message to provide # a more helpful error message. raise ParamError(parameter, e.message)
def _unpack_complex_cli_arg(argument_model, value, cli_name): type_name = argument_model.type_name if type_name == 'structure' or type_name == 'map': if value.lstrip()[0] == '{': try: return json.loads(value, object_pairs_hook=OrderedDict) except ValueError as e: raise ParamError( cli_name, "Invalid JSON: %s\nJSON received: %s" % (e, value)) raise ParamError(cli_name, "Invalid JSON:\n%s" % value) elif type_name == 'list': if isinstance(value, six.string_types): if value.lstrip()[0] == '[': return json.loads(value, object_pairs_hook=OrderedDict) elif isinstance(value, list) and len(value) == 1: single_value = value[0].strip() if single_value and single_value[0] == '[': return json.loads(value[0], object_pairs_hook=OrderedDict) try: # There's a couple of cases remaining here. # 1. It's possible that this is just a list of strings, i.e # --security-group-ids sg-1 sg-2 sg-3 => ['sg-1', 'sg-2', 'sg-3'] # 2. It's possible this is a list of json objects: # --filters '{"Name": ..}' '{"Name": ...}' member_shape_model = argument_model.member return [ _unpack_cli_arg(member_shape_model, v, cli_name) for v in value ] except (ValueError, TypeError) as e: # The list params don't have a name/cli_name attached to them # so they will have bad error messages. We're going to # attach the parent parameter to this error message to provide # a more helpful error message. raise ParamError(cli_name, value[0])
def _get_body_as_dict(request): # For query services, request.data is form-encoded and is already a # dict, but for other services such as rest-json it could be a json # string or bytes. In those cases we attempt to load the data as a # dict. data = request.data if isinstance(data, six.binary_type): data = json.loads(data.decode('utf-8')) elif isinstance(data, six.string_types): data = json.loads(data) return data
def json_decode_template_body(parsed, **kwargs): if 'TemplateBody' in parsed: try: value = json.loads(parsed['TemplateBody']) parsed['TemplateBody'] = value except (ValueError, TypeError): logger.debug('error loading JSON', exc_info=True)
def parse(self, s, encoding): try: decoded = s.decode(encoding) self.value = json.loads(decoded) self.get_response_errors() except Exception as err: logger.debug('Error loading JSON response body, %r', err)
def test_json_response(self): output = self.run_cmd('iam list-users', expected_rc=0)[0] parsed_output = json.loads(output) self.assertIn('Users', parsed_output) self.assertEqual(len(parsed_output['Users']), 2) self.assertEqual(sorted(parsed_output['Users'][0].keys()), ['Arn', 'CreateDate', 'Path', 'UserId', 'UserName'])
def test_create_connection(self): op = self.dc.get_operation('CreatePipeline') params = op.build_parameters(name='testpipeline', input_bucket='etc-input', output_bucket='etc-output', role='etc-role', notifications={ 'Completed': 'etc-topic', 'Progressing': 'etc-topic', 'Warning': 'etc-topic', 'Error': 'etc-topic' }) result = { "OutputBucket": "etc-output", "Notifications": { "Completed": "etc-topic", "Warning": "etc-topic", "Progressing": "etc-topic", "Error": "etc-topic" }, "Role": "etc-role", "Name": "testpipeline", "InputBucket": "etc-input" } json_body = json.loads(params['payload'].getvalue()) self.assertEqual(json_body, result)
def retrieve_iam_role_credentials(url=METADATA_SECURITY_CREDENTIALS_URL, timeout=None, num_attempts=1): if timeout is None: timeout = DEFAULT_METADATA_SERVICE_TIMEOUT d = {} try: r = _get_request(url, timeout, num_attempts) if r.content: fields = r.content.decode('utf-8').split('\n') for field in fields: if field.endswith('/'): d[field[0:-1]] = retrieve_iam_role_credentials( url + field, timeout, num_attempts) else: val = _get_request( url + field, timeout=timeout, num_attempts=num_attempts).content.decode('utf-8') if val[0] == '{': val = json.loads(val) d[field] = val else: logger.debug("Metadata service returned non 200 status code " "of %s for url: %s, content body: %s", r.status_code, url, r.content) except _RetriesExceededError: logger.debug("Max number of attempts exceeded (%s) when " "attempting to retrieve data from metadata service.", num_attempts) return d
def _unpack_json_cli_arg(argument_model, value, cli_name): try: return json.loads(value, object_pairs_hook=OrderedDict) except ValueError as e: raise ParamError( cli_name, "Invalid JSON: %s\nJSON received: %s" % (e, value))
def process_mfa_security_question(self, url, statetoken, question): while True: # response = self.get_response(question + " ") # response = self._password_prompter("%s\r\n" % question) response = unix_getpass("%s\r\n" % question) totp_response = self._requests_session.post(url, headers={ 'Content-Type': 'application/json', 'Accept': 'application/json' }, data=json.dumps({ 'stateToken': statetoken, 'answer': response })) totp_parsed = json.loads(totp_response.text) if totp_response.status_code == 200: return self.get_assertion_from_response(totp_parsed) elif totp_response.status_code >= 400: error = totp_parsed["errorCauses"][0]["errorSummary"] self._password_prompter("%s\r\nPress RETURN to continue\r\n" % error)
def retrieve_saml_assertion(self, config): self._validate_config_values(config) endpoint = config['saml_endpoint'] hostname = urlsplit(endpoint).netloc auth_url = 'https://%s/api/v1/authn' % hostname username = config['saml_username'] password = self._password_prompter("Password: ") logger.info( 'Sending HTTP POST with username (%s) and password to Okta API ' 'endpoint: %s', username, auth_url) response = self._requests_session.post(auth_url, headers={ 'Content-Type': 'application/json', 'Accept': 'application/json' }, data=json.dumps({ 'username': username, 'password': password })) parsed = json.loads(response.text) session_token = parsed['sessionToken'] saml_url = endpoint + '?sessionToken=%s' % session_token response = self._requests_session.get(saml_url) logger.info('Received HTTP response of status code: %s', response.status_code) r = self._extract_saml_assertion_from_response(response.text) logger.info('Received the following SAML assertion: \n%s', r, extra={'is_saml_assertion': True}) return r
def consumer(): for event in generator(): if event is do_wait: if self.watch: time.sleep(self.watch_interval) continue else: return output = [] if self.output_group_enabled: output.append( self.color( self.log_group_name.ljust(group_length, ' '), 'green' ) ) if self.output_stream_enabled: output.append( self.color( event['logStreamName'].ljust(max_stream_length, ' '), 'cyan' ) ) if self.output_timestamp_enabled: output.append( self.color( milis2iso(event['timestamp']), 'yellow' ) ) if self.output_ingestion_time_enabled: output.append( self.color( milis2iso(event['ingestionTime']), 'blue' ) ) message = event['message'] if self.query is not None and message[0] == '{': parsed = json.loads(event['message']) message = self.query_expression.search(parsed) if not isinstance(message, str): message = json.dumps(message) output.append(message.rstrip()) print(' '.join(output)) try: sys.stdout.flush() except IOError as e: if e.errno == errno.EPIPE: # SIGPIPE received, so exit os._exit(0) else: # We don't want to handle any other errors from this raise
def consumer(): if not self.watch: f = open(f'logs_{datetime.now().strftime("%m%d_%H%M%S")}.log', mode='w', encoding='utf-8') # added by kanazawa for event in generator(): if event is do_wait: if self.watch: time.sleep(self.watch_interval) continue else: return output = [] if self.output_group_enabled: output.append( self.color( self.log_group_name.ljust(group_length, ' '), 'green')) if self.output_stream_enabled: output.append( self.color( event['logStreamName'].ljust( max_stream_length, ' '), 'cyan')) if self.output_timestamp_enabled: output.append( self.color(milis2iso(event['timestamp']), 'yellow')) if self.output_ingestion_time_enabled: output.append( self.color(milis2iso(event['ingestionTime']), 'blue')) message = event['message'] if self.query is not None and message[0] == '{': parsed = json.loads(event['message']) message = self.query_expression.search(parsed) if not isinstance(message, six.string_types): message = json.dumps(message) output.append(message.rstrip()) # print(' '.join(output)) # comment out by kanazawa message = re.sub(r'^.+(\[20\d\d)', r'\1', message) # added by kanazawa if self.watch: print(message) # added by kanazawa else: f.write(message + "\n") # added by kanazawa try: sys.stdout.flush() except IOError as e: if e.errno == errno.EPIPE: # SIGPIPE received, so exit os._exit(0) else: # We don't want to handle any other errors from this raise if not self.watch: f.close()
def unpack_complex_cli_arg(parameter, value): if parameter.type == 'structure' or parameter.type == 'map': if value.lstrip()[0] == '{': d = json.loads(value, object_pairs_hook=OrderedDict) else: msg = 'The value for parameter "%s" must be JSON or path to file.' % ( parameter.cli_name) raise ValueError(msg) return d elif parameter.type == 'list': if isinstance(value, six.string_types): if value.lstrip()[0] == '[': return json.loads(value, object_pairs_hook=OrderedDict) elif isinstance(value, list) and len(value) == 1: single_value = value[0].strip() if single_value and single_value[0] == '[': return json.loads(value[0], object_pairs_hook=OrderedDict) return [unpack_cli_arg(parameter.members, v) for v in value]
def test_json_payload_scalar(self): payload = botocore.payload.JSONPayload() p = botocore.parameters.StringParameter(None, name='foo') payload.add_param(p, 'value1') p = botocore.parameters.StringParameter(None, name='bar') payload.add_param(p, 'value2') json_body = json.loads(payload.getvalue()) params = {"foo": "value1", "bar": "value2"} self.assertEqual(json_body, params)
def test_json_payload_list(self): payload = botocore.payload.JSONPayload() p = botocore.parameters.ListParameter(None, name='foo', members={'type': 'string'}) value = ['This', 'is', 'a', 'test'] payload.add_param(p, value) json_body = json.loads(payload.getvalue()) params = {"foo": ["This", "is", "a", "test"]} self.assertEqual(json_body, params)
def process_response(self, response, endpoint): parsed = json.loads(response.text) if response.status_code == 200: return self.get_assertion_from_response(endpoint, parsed) if response.status_code >= 400: error = parsed["errorCauses"][0]["errorSummary"] self.get_response("%s\r\nPress RETURN to continue\r\n" % error, False) return None
def retrieve_saml_assertion(self, config): self._validate_config_values(config) endpoint = config['saml_endpoint'] hostname = urlsplit(endpoint).netloc auth_url = 'https://%s/api/v1/authn' % hostname username = config['saml_username'] password = self._password_prompter("Password: "******"SUCCESS": session_token = parsed['sessionToken'] elif parsed['status'] == "MFA_REQUIRED": okta_mfa = OktaMFA(parsed) session_token = okta_mfa.get_session_token() # print "the session_token is: %s" % session_token else: logger.info('Cannot proceed with authentication.', parsed['status']) # end new code saml_url = endpoint + '?sessionToken=%s' % session_token response = self._requests_session.get(saml_url) logger.info('Received HTTP response of status code: %s', response.status_code) r = self._extract_saml_assertion_from_response(response.text) logger.info('Received the following SAML assertion: \n%s', r, extra={'is_saml_assertion': True}) return r
def yaml_parse(yamlstr): """Parse a yaml string""" try: # PyYAML doesn't support json as well as it should, so if the input # is actually just json it is better to parse it with the standard # json parser. return json.loads(yamlstr, object_pairs_hook=OrderedDict) except ValueError: yaml.SafeLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, _dict_constructor) yaml.SafeLoader.add_multi_constructor( "!", intrinsics_multi_constructor) return yaml.safe_load(yamlstr)
def switch_host_with_param(request, param_name): request_json = json.loads(request.data.decode('utf-8')) if request_json.get(param_name): new_endpoint = request_json[param_name] new_endpoint_components = urlsplit(new_endpoint) original_endpoint = request.url original_endpoint_components = urlsplit(original_endpoint) final_endpoint_components = (new_endpoint_components.scheme, new_endpoint_components.netloc, original_endpoint_components.path, original_endpoint_components.query, '') final_endpoint = urlunsplit(final_endpoint_components) request.url = final_endpoint
def yaml_parse(yamlstr): """Parse a yaml string""" try: # PyYAML doesn't support json as well as it should, so if the input # is actually just json it is better to parse it with the standard # json parser. return json.loads(yamlstr, object_pairs_hook=OrderedDict) except ValueError: loader = SafeLoaderWrapper loader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, _dict_constructor) loader.add_multi_constructor("!", intrinsics_multi_constructor) return yaml.load(yamlstr, loader)
def switch_host_with_param(request, param_name): request_json = json.loads(request.data.decode('utf-8')) if request_json.get(param_name): new_endpoint = request_json[param_name] new_endpoint_components = urlsplit(new_endpoint) original_endpoint = request.url original_endpoint_components = urlsplit(original_endpoint) final_endpoint_components = ( new_endpoint_components.scheme, new_endpoint_components.netloc, original_endpoint_components.path, original_endpoint_components.query, '' ) final_endpoint = urlunsplit(final_endpoint_components) request.url = final_endpoint
def process_mfa_push(self, endpoint, url, statetoken): self.get_response(("Press RETURN when you are ready to request the " "push notification"), False) while True: totp_response = self._requests_session.post( url, headers={ 'Content-Type': 'application/json', 'Accept': 'application/json' }, data=json.dumps({'stateToken': statetoken})) totp_parsed = json.loads(totp_response.text) if totp_parsed["status"] == "SUCCESS": return self.get_assertion_from_response(endpoint, totp_parsed) if totp_parsed["factorResult"] != "WAITING": raise SAMLError(self._ERROR_AUTH_CANCELLED)
def process_mfa_okta_push(self, url, statetoken): eprint("sent push to device, awaiting response...") while True: totp_response = self._requests_session.post( url, headers={ 'Content-Type': 'application/json', 'Accept': 'application/json' }, data=json.dumps({'stateToken': statetoken})) totp_parsed = json.loads(totp_response.text) if totp_parsed["status"] == "SUCCESS": return self.get_assertion_from_response(totp_parsed) elif totp_parsed["factorResult"] != "WAITING": raise SAMLError(self._ERROR_AUTH_CANCELLED)
def _get_response(self, full_url, headers, timeout): try: response = self._session.get(full_url, headers=headers, timeout=timeout) if response.status_code != 200: raise MetadataRetrievalError( error_msg="Received non 200 response (%s) from ECS metadata: %s" % (response.status_code, response.text) ) try: return json.loads(response.text) except ValueError: raise MetadataRetrievalError( error_msg=("Unable to parse JSON returned from " "ECS metadata: %s" % response.text) ) except RETRYABLE_HTTP_ERRORS as e: error_msg = "Received error when attempting to retrieve " "ECS metadata: %s" % e raise MetadataRetrievalError(error_msg=error_msg)
def list_logs(self, streams=None): streams = self._list_streams() max_stream_length = max([len(s) for s in streams]) if streams else 10 group_length = len(self.log_group_name) try: for event in self.iter_logs(): output = [] if self.output_group_enabled: output.append( self.color( self.log_group_name.ljust(group_length, ' '), 'green')) if self.output_stream_enabled: output.append( self.color( event['logStreamName'].ljust( max_stream_length, ' '), 'cyan')) if self.output_timestamp_enabled: output.append( self.color(milis2iso(event['timestamp']), 'yellow')) if self.output_ingestion_time_enabled: output.append( self.color(milis2iso(event['ingestionTime']), 'blue')) message = event['message'] if self.query is not None and message[0] == '{': parsed = json.loads(event['message']) message = self.query_expression.search(parsed) if not isinstance(message, six.string_types): message = json.dumps(message) output.append(message.rstrip()) self.io.write(' '.join(output) + '\n') try: self.io.flush() except IOError as e: if e.errno == errno.EPIPE: # SIGPIPE received, so exit os._exit(0) else: # We don't want to handle any other errors from this raise except KeyboardInterrupt: self.io.write('Closing...\n') os._exit(0)
def test_can_always_json_serialize_headers(self): parser = self.create_parser() original_headers = { 'x-amzn-requestid': 'request-id', 'Header1': 'foo', } headers = CustomHeaderDict(original_headers) output_shape = self.create_arbitary_output_shape() parsed = parser.parse( {'body': b'{}', 'headers': headers, 'status_code': 200}, output_shape) metadata = parsed['ResponseMetadata'] # We've had the contract that you can json serialize a # response. So we want to ensure that despite using a CustomHeaderDict # we can always JSON dumps the response metadata. self.assertEqual( json.loads(json.dumps(metadata))['HTTPHeaders']['header1'], 'foo')
def process_mfa_sms(self, url, statetoken): self.verify_sms_factor(url, statetoken, "") while True: response = self.get_response(self._MSG_SMS_CODE) if response == "RESEND": response = "" sms_response = self.verify_sms_factor(url, statetoken, response) # If we've just requested a resend, don't check the result # - just loop around to get the next response from the user. if response != "": sms_parsed = json.loads(sms_response.text) if sms_response.status_code == 200: return self.get_assertion_from_response(sms_parsed) elif sms_response.status_code >= 400: error = sms_parsed["errorCauses"][0]["errorSummary"] self._password_prompter( ("%s\r\n" "Press RETURN to continue\r\n") % error)
def _get_response(self, full_url, headers, timeout): try: response = self._session.get(full_url, headers=headers, timeout=timeout) if response.status_code != 200: raise MetadataRetrievalError( error_msg="Received non 200 response (%s) from ECS metadata: %s" % (response.status_code, response.text)) try: return json.loads(response.text) except ValueError: raise MetadataRetrievalError( error_msg=("Unable to parse JSON returned from " "ECS metadata: %s" % response.text)) except RETRYABLE_HTTP_ERRORS as e: error_msg = ("Received error when attempting to retrieve " "ECS metadata: %s" % e) raise MetadataRetrievalError(error_msg=error_msg)
def retrieve_iam_role_credentials(self): data = {} url = self._url timeout = self._timeout num_attempts = self._num_attempts try: r = self._get_request(url, timeout, num_attempts) if r.content: fields = r.content.decode('utf-8').split('\n') for field in fields: if field.endswith('/'): data[field[0:-1]] = self.retrieve_iam_role_credentials( url + field, timeout, num_attempts) else: val = self._get_request( url + field, timeout=timeout, num_attempts=num_attempts, ).content.decode('utf-8') if val[0] == '{': val = json.loads(val) data[field] = val else: logger.debug( "Metadata service returned non 200 status code " "of %s for url: %s, content body: %s", r.status_code, url, r.content) except _RetriesExceededError: logger.debug( "Max number of attempts exceeded (%s) when " "attempting to retrieve data from metadata service.", num_attempts) # We sort for stable ordering. In practice, this should only consist # of one role, but may need revisiting if this expands in the future. final_data = {} for role_name in sorted(data): final_data = { 'role_name': role_name, 'access_key': data[role_name]['AccessKeyId'], 'secret_key': data[role_name]['SecretAccessKey'], 'token': data[role_name]['Token'], 'expiry_time': data[role_name]['Expiration'], } return final_data
def retrieve_saml_assertion(self, config): # unix_getpass("hello?") self._validate_config_values(config) self.endpoint = config['saml_endpoint'] hostname = urlsplit(self.endpoint).netloc auth_url = 'https://%s/api/v1/authn' % hostname username = config['saml_username'] password = self._password_prompter("Password: "******"errorSummary"]) if "status" in parsed: if parsed["status"] == "SUCCESS": return self.get_assertion_from_response(parsed) elif parsed["status"] == "LOCKED_OUT": raise SAMLError(self._ERROR_LOCKED_OUT % parsed["_links"]["href"]) elif parsed["status"] == "PASSWORD_EXPIRED": raise SAMLError(self._ERROR_PASSWORD_EXPIRED % parsed["_links"]["href"]) elif parsed["status"] == "MFA_ENROLL": raise SAMLError(self._ERROR_MFA_ENROLL) elif parsed["status"] == "MFA_REQUIRED": return self.process_mfa_verification(parsed) raise SAMLError("Code logic failure")
def load_file(self, file_path): """Attempt to load the file path. :type file_path: str :param file_path: The full path to the file to load without the '.json' extension. :return: The loaded data if it exists, otherwise None. """ full_path = file_path + '.json' if not os.path.isfile(full_path): return # By default the file will be opened with locale encoding on Python 3. # We specify "utf8" here to ensure the correct behavior. with open(full_path, 'rb') as fp: payload = fp.read().decode('utf-8') return json.loads(payload, object_pairs_hook=OrderedDict)
def retrieve_iam_role_credentials(self): data = {} url = self._url timeout = self._timeout num_attempts = self._num_attempts try: r = self._get_request(url, timeout, num_attempts) if r.content: fields = r.content.decode("utf-8").split("\n") for field in fields: if field.endswith("/"): data[field[0:-1]] = self.retrieve_iam_role_credentials(url + field, timeout, num_attempts) else: val = self._get_request(url + field, timeout=timeout, num_attempts=num_attempts).content.decode( "utf-8" ) if val[0] == "{": val = json.loads(val) data[field] = val else: logger.debug( "Metadata service returned non 200 status code " "of %s for url: %s, content body: %s", r.status_code, url, r.content, ) except _RetriesExceededError: logger.debug( "Max number of attempts exceeded (%s) when " "attempting to retrieve data from metadata service.", num_attempts, ) # We sort for stable ordering. In practice, this should only consist # of one role, but may need revisiting if this expands in the future. final_data = {} for role_name in sorted(data): final_data = { "role_name": role_name, "access_key": data[role_name]["AccessKeyId"], "secret_key": data[role_name]["SecretAccessKey"], "token": data[role_name]["Token"], "expiry_time": data[role_name]["Expiration"], } return final_data
def launch_cmd(role_arn, profile=None): session = botocore.session.get_session() session.profile = profile sts = session.get_service('sts') endpoint = sts.get_endpoint() op = sts.get_operation('AssumeRole') creds = op.call(endpoint, role_arn=role_arn, role_session_name='foobar')[1] d = {'sessionId': creds['Credentials']['AccessKeyId'], 'sessionKey': creds['Credentials']['SecretAccessKey'], 'sessionToken': creds['Credentials']['SessionToken']} json_str = json.dumps(d) params = {'Action': 'getSigninToken', 'Session': json_str} r = botocore.vendored.requests.get(sign_in_url, params=params) d = json.loads(r.text) d['Action'] = 'login' d['Issuer'] = issuer_url d['Destination'] = console_url uri = sign_in_url + '?' + urlencode(d) webbrowser.open(uri)
def _get_response(self, full_url, headers, timeout): try: AWSRequest = botocore.awsrequest.AWSRequest request = AWSRequest(method='GET', url=full_url, headers=headers) response = self._session.send(request.prepare()) response_text = response.content.decode('utf-8') if response.status_code != 200: raise MetadataRetrievalError( error_msg="Received non 200 response (%s) from ECS metadata: %s" % (response.status_code, response_text)) try: return json.loads(response_text) except ValueError: raise MetadataRetrievalError( error_msg=("Unable to parse JSON returned from " "ECS metadata: %s" % response_text)) except RETRYABLE_HTTP_ERRORS as e: error_msg = ("Received error when attempting to retrieve " "ECS metadata: %s" % e) raise MetadataRetrievalError(error_msg=error_msg)
def test_create_connection(self): op = self.dc.get_operation('CreatePipeline') params = op.build_parameters(name='testpipeline', input_bucket='etc-input', output_bucket='etc-output', role='etc-role', notifications={'Completed': 'etc-topic', 'Progressing': 'etc-topic', 'Warning': 'etc-topic', 'Error': 'etc-topic'}) result = {"OutputBucket": "etc-output", "Notifications": {"Completed": "etc-topic", "Warning": "etc-topic", "Progressing": "etc-topic", "Error": "etc-topic"}, "Role": "etc-role", "Name": "testpipeline", "InputBucket": "etc-input"} json_body = json.loads(params['payload'].getvalue()) self.assertEqual(json_body, result)
def _search_md(url="http://169.254.169.254/latest/meta-data/iam/security-credentials/"): d = {} try: r = requests.get(url, timeout=0.1) if r.status_code == 200 and r.content: fields = r.content.decode("utf-8").split("\n") for field in fields: if field.endswith("/"): d[field[0:-1]] = _search_md(url + field) else: val = requests.get(url + field).content.decode("utf-8") if val[0] == "{": val = json.loads(val) else: p = val.find("\n") if p > 0: val = r.content.decode("utf-8").split("\n") d[field] = val except (requests.Timeout, requests.ConnectionError): pass return d
def load_json(data): """ Load a string of JSON data using OrderedDict for object pairs. """ return json.loads(data, object_pairs_hook=OrderedDict)
def test_constructed_result(self, call_patch): call_patch.return_value = CREATE_CLUSTER_RESULT cmd = DEFAULT_CMD result = self.run_cmd(cmd, expected_rc=0) result_json = json.loads(result[0]) self.assertEquals(result_json, CONSTRUCTED_RESULT)
def _run_main_command(self, parsed_args, parsed_globals): params = {} params['Name'] = parsed_args.name self._validate_release_label_ami_version(parsed_args) service_role_validation_message = ( " Either choose --use-default-roles or use both --service-role " "<roleName> and --ec2-attributes InstanceProfile=<profileName>.") if parsed_args.use_default_roles is True and \ parsed_args.service_role is not None: raise exceptions.MutualExclusiveOptionError( option1="--use-default-roles", option2="--service-role", message=service_role_validation_message) if parsed_args.use_default_roles is True and \ parsed_args.ec2_attributes is not None and \ 'InstanceProfile' in parsed_args.ec2_attributes: raise exceptions.MutualExclusiveOptionError( option1="--use-default-roles", option2="--ec2-attributes InstanceProfile", message=service_role_validation_message) if parsed_args.instance_groups is not None and \ parsed_args.instance_fleets is not None: raise exceptions.MutualExclusiveOptionError( option1="--instance-groups", option2="--instance-fleets") instances_config = {} if parsed_args.instance_fleets is not None: instances_config['InstanceFleets'] = \ instancefleetsutils.validate_and_build_instance_fleets( parsed_args.instance_fleets) else: instances_config['InstanceGroups'] = \ instancegroupsutils.validate_and_build_instance_groups( instance_groups=parsed_args.instance_groups, instance_type=parsed_args.instance_type, instance_count=parsed_args.instance_count) if parsed_args.release_label is not None: params["ReleaseLabel"] = parsed_args.release_label if parsed_args.configurations is not None: try: params["Configurations"] = json.loads( parsed_args.configurations) except ValueError: raise ValueError('aws: error: invalid json argument for ' 'option --configurations') if (parsed_args.release_label is None and parsed_args.ami_version is not None): is_valid_ami_version = re.match('\d?\..*', parsed_args.ami_version) if is_valid_ami_version is None: raise exceptions.InvalidAmiVersionError( ami_version=parsed_args.ami_version) params['AmiVersion'] = parsed_args.ami_version emrutils.apply_dict( params, 'AdditionalInfo', parsed_args.additional_info) emrutils.apply_dict(params, 'LogUri', parsed_args.log_uri) if parsed_args.use_default_roles is True: parsed_args.service_role = EMR_ROLE_NAME if parsed_args.ec2_attributes is None: parsed_args.ec2_attributes = {} parsed_args.ec2_attributes['InstanceProfile'] = EC2_ROLE_NAME emrutils.apply_dict(params, 'ServiceRole', parsed_args.service_role) if parsed_args.instance_groups is not None: for instance_group in instances_config['InstanceGroups']: if 'AutoScalingPolicy' in instance_group.keys(): if parsed_args.auto_scaling_role is None: raise exceptions.MissingAutoScalingRoleError() emrutils.apply_dict(params, 'AutoScalingRole', parsed_args.auto_scaling_role) if parsed_args.scale_down_behavior is not None: emrutils.apply_dict(params, 'ScaleDownBehavior', parsed_args.scale_down_behavior) if ( parsed_args.no_auto_terminate is False and parsed_args.auto_terminate is False): parsed_args.no_auto_terminate = True instances_config['KeepJobFlowAliveWhenNoSteps'] = \ emrutils.apply_boolean_options( parsed_args.no_auto_terminate, '--no-auto-terminate', parsed_args.auto_terminate, '--auto-terminate') instances_config['TerminationProtected'] = \ emrutils.apply_boolean_options( parsed_args.termination_protected, '--termination-protected', parsed_args.no_termination_protected, '--no-termination-protected') if (parsed_args.visible_to_all_users is False and parsed_args.no_visible_to_all_users is False): parsed_args.visible_to_all_users = True params['VisibleToAllUsers'] = \ emrutils.apply_boolean_options( parsed_args.visible_to_all_users, '--visible-to-all-users', parsed_args.no_visible_to_all_users, '--no-visible-to-all-users') params['Tags'] = emrutils.parse_tags(parsed_args.tags) params['Instances'] = instances_config if parsed_args.ec2_attributes is not None: self._build_ec2_attributes( cluster=params, parsed_attrs=parsed_args.ec2_attributes) debugging_enabled = emrutils.apply_boolean_options( parsed_args.enable_debugging, '--enable-debugging', parsed_args.no_enable_debugging, '--no-enable-debugging') if parsed_args.log_uri is None and debugging_enabled is True: raise exceptions.LogUriError if debugging_enabled is True: self._update_cluster_dict( cluster=params, key='Steps', value=[ self._build_enable_debugging(parsed_args, parsed_globals)]) if parsed_args.applications is not None: if parsed_args.release_label is None: app_list, ba_list, step_list = \ applicationutils.build_applications( region=self.region, parsed_applications=parsed_args.applications, ami_version=params['AmiVersion']) self._update_cluster_dict( params, 'NewSupportedProducts', app_list) self._update_cluster_dict( params, 'BootstrapActions', ba_list) self._update_cluster_dict( params, 'Steps', step_list) else: params["Applications"] = [] for application in parsed_args.applications: params["Applications"].append(application) hbase_restore_config = parsed_args.restore_from_hbase_backup if hbase_restore_config is not None: args = hbaseutils.build_hbase_restore_from_backup_args( dir=hbase_restore_config.get('Dir'), backup_version=hbase_restore_config.get('BackupVersion')) step_config = emrutils.build_step( jar=constants.HBASE_JAR_PATH, name=constants.HBASE_RESTORE_STEP_NAME, action_on_failure=constants.CANCEL_AND_WAIT, args=args) self._update_cluster_dict( params, 'Steps', [step_config]) if parsed_args.bootstrap_actions is not None: self._build_bootstrap_actions( cluster=params, parsed_boostrap_actions=parsed_args.bootstrap_actions) if parsed_args.emrfs is not None: self._handle_emrfs_parameters( cluster=params, emrfs_args=parsed_args.emrfs, release_label=parsed_args.release_label) if parsed_args.steps is not None: steps_list = steputils.build_step_config_list( parsed_step_list=parsed_args.steps, region=self.region, release_label=parsed_args.release_label) self._update_cluster_dict( cluster=params, key='Steps', value=steps_list) if parsed_args.security_configuration is not None: emrutils.apply_dict( params, 'SecurityConfiguration', parsed_args.security_configuration) if parsed_args.custom_ami_id is not None: emrutils.apply_dict( params, 'CustomAmiId', parsed_args.custom_ami_id ) if parsed_args.ebs_root_volume_size is not None: emrutils.apply_dict( params, 'EbsRootVolumeSize', int(parsed_args.ebs_root_volume_size) ) if parsed_args.repo_upgrade_on_boot is not None: emrutils.apply_dict( params, 'RepoUpgradeOnBoot', parsed_args.repo_upgrade_on_boot ) self._validate_required_applications(parsed_args) run_job_flow_response = emrutils.call( self._session, 'run_job_flow', params, self.region, parsed_globals.endpoint_url, parsed_globals.verify_ssl) constructed_result = self._construct_result(run_job_flow_response) emrutils.display_response(self._session, 'run_job_flow', constructed_result, parsed_globals) return 0
def test_jmespath_json_response(self): jmespath_query = 'Users[*].UserName' output = self.run_cmd('iam list-users --query %s' % jmespath_query, expected_rc=0)[0] parsed_output = json.loads(output) self.assertEqual(parsed_output, ['testuser-50', 'testuser-51'])
def load_def(self, json_string): return json.loads(json_string, object_pairs_hook=OrderedDict)
def decode_quoted_jsondoc(value): try: value = json.loads(unquote(value)) except (ValueError, TypeError): logger.debug('Error loading quoted JSON', exc_info=True) return value
def decode_jsondoc(event_name, shape, value, **kwargs): try: value = json.loads(value) except (ValueError, TypeError): logger.debug('error loading JSON', exc_info=True) return value
def switch_host_with_param(request, param_name): """Switches the host using a parameter value from a JSON request body""" request_json = json.loads(request.data.decode('utf-8')) if request_json.get(param_name): new_endpoint = request_json[param_name] _switch_hosts(request, new_endpoint)
def _get_credentials(self, role_name): r = self._get_request( url_path=self._URL_PATH + role_name, retry_func=self._needs_retry_for_credentials ) return json.loads(r.text)