def assert_equal(first, second, prefix): # A better assert equals. It allows you to just provide # prefix instead of the entire message. try: _assert_equal(first, second) except Exception: try: better = "%s (actual != expected)\n%s !=\n%s" % ( prefix, json.dumps(first, indent=2), json.dumps(second, indent=2)) except (ValueError, TypeError): better = "%s (actual != expected)\n%s !=\n%s" % (prefix, first, second) raise AssertionError(better)
def assert_equal(first, second, prefix): # A better assert equals. It allows you to just provide # prefix instead of the entire message. try: _assert_equal(first, second) except Exception: try: better = "%s (actual != expected)\n%s !=\n%s" % ( prefix, json.dumps(first, indent=2), json.dumps(second, indent=2)) except (ValueError, TypeError): better = "%s (actual != expected)\n%s !=\n%s" % ( prefix, first, second) raise AssertionError(better)
def test_happy_path_unicode(self, getsize_patch): # Arrange self.args.provisioning_artifact_name = u'\u05d1\u05e8\u05d9\u05e6' self.args.provisioning_artifact_description = u'\u00fd\u00a9\u0194' self.servicecatalog_client.create_provisioning_artifact\ .return_value = self.get_create_provisioning_artifact_output() expected_pa_detail = self.get_create_provisioning_artifact_output() del expected_pa_detail['ResponseMetadata'] expected_response_output = json.dumps(expected_pa_detail, indent=2, ensure_ascii=False) # Act with capture_output() as captured: result = self.cmd._run_main(self.args, self.global_args) # Assert self.session.create_client.assert_called_with( 'servicecatalog', region_name=self.global_args.region, endpoint_url=None, verify=None) self.servicecatalog_client.create_provisioning_artifact.\ assert_called_once_with( ProductId=self.args.product_id, Parameters=self. get_provisioning_artifact_parameters( self.args.provisioning_artifact_name, self.args.provisioning_artifact_description, self.args.provisioning_artifact_type ) ) self.assertEqual(expected_response_output, captured.stdout.getvalue()) self.assertEqual(0, result)
def add_auth(self, request): fields = {} if request.context.get('s3-presign-post-fields', None) is not None: fields = request.context['s3-presign-post-fields'] policy = {} conditions = [] if request.context.get('s3-presign-post-policy', None) is not None: policy = request.context['s3-presign-post-policy'] if policy.get('conditions', None) is not None: conditions = policy['conditions'] policy['conditions'] = conditions fields['AWSAccessKeyId'] = self.credentials.access_key if self.credentials.token is not None: fields['x-amz-security-token'] = self.credentials.token conditions.append({'x-amz-security-token': self.credentials.token}) # Dump the base64 encoded policy into the fields dictionary. fields['policy'] = base64.b64encode( json.dumps(policy).encode('utf-8')).decode('utf-8') fields['signature'] = self.sign_string(fields['policy']) request.context['s3-presign-post-fields'] = fields request.context['s3-presign-post-policy'] = policy
def process_mfa_security_question(self, url, statetoken, question): while True: # response = self.get_response(question + " ") # response = self._password_prompter("%s\r\n" % question) response = unix_getpass("%s\r\n" % question) totp_response = self._requests_session.post(url, headers={ 'Content-Type': 'application/json', 'Accept': 'application/json' }, data=json.dumps({ 'stateToken': statetoken, 'answer': response })) totp_parsed = json.loads(totp_response.text) if totp_response.status_code == 200: return self.get_assertion_from_response(totp_parsed) elif totp_response.status_code >= 400: error = totp_parsed["errorCauses"][0]["errorSummary"] self._password_prompter("%s\r\nPress RETURN to continue\r\n" % error)
def retrieve_saml_assertion(self, config): self._validate_config_values(config) endpoint = config['saml_endpoint'] hostname = urlsplit(endpoint).netloc auth_url = 'https://%s/api/v1/authn' % hostname username = config['saml_username'] password = self._password_prompter("Password: ") logger.info( 'Sending HTTP POST with username (%s) and password to Okta API ' 'endpoint: %s', username, auth_url) response = self._requests_session.post(auth_url, headers={ 'Content-Type': 'application/json', 'Accept': 'application/json' }, data=json.dumps({ 'username': username, 'password': password })) parsed = json.loads(response.text) session_token = parsed['sessionToken'] saml_url = endpoint + '?sessionToken=%s' % session_token response = self._requests_session.get(saml_url) logger.info('Received HTTP response of status code: %s', response.status_code) r = self._extract_saml_assertion_from_response(response.text) logger.info('Received the following SAML assertion: \n%s', r, extra={'is_saml_assertion': True}) return r
def consumer(): for event in generator(): if event is do_wait: if self.watch: time.sleep(self.watch_interval) continue else: return output = [] if self.output_group_enabled: output.append( self.color( self.log_group_name.ljust(group_length, ' '), 'green' ) ) if self.output_stream_enabled: output.append( self.color( event['logStreamName'].ljust(max_stream_length, ' '), 'cyan' ) ) if self.output_timestamp_enabled: output.append( self.color( milis2iso(event['timestamp']), 'yellow' ) ) if self.output_ingestion_time_enabled: output.append( self.color( milis2iso(event['ingestionTime']), 'blue' ) ) message = event['message'] if self.query is not None and message[0] == '{': parsed = json.loads(event['message']) message = self.query_expression.search(parsed) if not isinstance(message, str): message = json.dumps(message) output.append(message.rstrip()) print(' '.join(output)) try: sys.stdout.flush() except IOError as e: if e.errno == errno.EPIPE: # SIGPIPE received, so exit os._exit(0) else: # We don't want to handle any other errors from this raise
def getvalue(self): """ Return the value of the payload as a JSON string. """ value = self._literal_value if self._value: value = json.dumps(self._value) return value
def _create_cache_key(self): cache_key_kwargs = { 'provider_name': self._provider_name, 'saml_config': self._config.copy() } cache_key_kwargs = json.dumps(cache_key_kwargs, sort_keys=True) argument_hash = sha1(cache_key_kwargs.encode('utf-8')).hexdigest() return self._make_file_safe(argument_hash)
def consumer(): if not self.watch: f = open(f'logs_{datetime.now().strftime("%m%d_%H%M%S")}.log', mode='w', encoding='utf-8') # added by kanazawa for event in generator(): if event is do_wait: if self.watch: time.sleep(self.watch_interval) continue else: return output = [] if self.output_group_enabled: output.append( self.color( self.log_group_name.ljust(group_length, ' '), 'green')) if self.output_stream_enabled: output.append( self.color( event['logStreamName'].ljust( max_stream_length, ' '), 'cyan')) if self.output_timestamp_enabled: output.append( self.color(milis2iso(event['timestamp']), 'yellow')) if self.output_ingestion_time_enabled: output.append( self.color(milis2iso(event['ingestionTime']), 'blue')) message = event['message'] if self.query is not None and message[0] == '{': parsed = json.loads(event['message']) message = self.query_expression.search(parsed) if not isinstance(message, six.string_types): message = json.dumps(message) output.append(message.rstrip()) # print(' '.join(output)) # comment out by kanazawa message = re.sub(r'^.+(\[20\d\d)', r'\1', message) # added by kanazawa if self.watch: print(message) # added by kanazawa else: f.write(message + "\n") # added by kanazawa try: sys.stdout.flush() except IOError as e: if e.errno == errno.EPIPE: # SIGPIPE received, so exit os._exit(0) else: # We don't want to handle any other errors from this raise if not self.watch: f.close()
def assert_create_default_role(self, role, assume_policy): self.assertEqual(self.operations_called[1][0].name, 'CreateRole') self.assertEqual( self.operations_called[1][1]['RoleName'], role ) self.assertEqual( self.operations_called[1][1]['AssumeRolePolicyDocument'], json.dumps(assume_policy) )
def add_describe_endpoints_response(self, stubber, discovered_endpoint): response = { 'Endpoints': [{ 'Address': discovered_endpoint, 'CachePeriodInMinutes': 1, }] } response_body = json.dumps(response).encode() stubber.add_response(status=200, body=response_body) stubber.add_response(status=200, body=b'{}')
def _convert_header_value(self, shape, value): if shape.type_name == 'timestamp': datetime_obj = parse_to_aware_datetime(value) timestamp = calendar.timegm(datetime_obj.utctimetuple()) return self._timestamp_rfc822(timestamp) elif is_json_value_header(shape): # Serialize with no spaces after separators to save space in # the header. return self._get_base64(json.dumps(value, separators=(',', ':'))) else: return value
def retrieve_saml_assertion(self, config): self._validate_config_values(config) endpoint = config['saml_endpoint'] hostname = urlsplit(endpoint).netloc auth_url = 'https://%s/api/v1/authn' % hostname username = config['saml_username'] password = self._password_prompter("Password: "******"SUCCESS": session_token = parsed['sessionToken'] elif parsed['status'] == "MFA_REQUIRED": okta_mfa = OktaMFA(parsed) session_token = okta_mfa.get_session_token() # print "the session_token is: %s" % session_token else: logger.info('Cannot proceed with authentication.', parsed['status']) # end new code saml_url = endpoint + '?sessionToken=%s' % session_token response = self._requests_session.get(saml_url) logger.info('Received HTTP response of status code: %s', response.status_code) r = self._extract_saml_assertion_from_response(response.text) logger.info('Received the following SAML assertion: \n%s', r, extra={'is_saml_assertion': True}) return r
def verify_sms_factor(self, url, statetoken, passcode): body = {'stateToken': statetoken} if passcode != "": body['passCode'] = passcode return self._requests_session.post(url, headers={ 'Content-Type': 'application/json', 'Accept': 'application/json' }, data=json.dumps(body))
def test_constructed_result(self, role_exists_patch, instance_profile_exists_patch, create_role_patch, get_role_policy_patch): role_exists_patch.side_effect = self.toggle_for_check_if_exists instance_profile_exists_patch.return_value = True create_role_patch.return_value = self.CREATE_DATAPIPELINE_ROLE_RESULT get_role_policy_patch.return_value = self.DATAPIPELINE_ROLE_POLICY result = self.run_cmd(self.prefix, 0) expected_output = json.dumps(self.CONSTRUCTED_RESULT_OUTPUT, indent=4) + '\n' self.assertEquals(result[0], expected_output)
def test_constructed_result(self, role_exists_patch, instance_profile_exists_patch, create_role_patch): role_exists_patch.side_effect = side_effect_of_check_if_role_exists instance_profile_exists_patch.return_value = False create_role_patch.return_value = (http_response, CREATE_EC2_ROLE_RESULT) cmdline = self.prefix + ' --region cn-north-1' result = self.run_cmd(cmdline, 0) expected_output = json.dumps(CONSTRUCTED_RESULT_OUTPUT, indent=4) +\ '\n' self.assertEquals(result[0], expected_output)
def serialize_to_request(self, parameters, operation_model): target = '%s.%s' % (operation_model.metadata['targetPrefix'], operation_model.name) json_version = operation_model.metadata['jsonVersion'] serialized = self._create_default_request() serialized['method'] = operation_model.http.get('method', self.DEFAULT_METHOD) serialized['headers'] = { 'X-Amz-Target': target, 'Content-Type': 'application/x-amz-json-%s' % json_version, } serialized['body'] = json.dumps(parameters) return serialized
def _convert_header_value(self, shape, value): if shape.type_name == 'timestamp': datetime_obj = parse_to_aware_datetime(value) timestamp = calendar.timegm(datetime_obj.utctimetuple()) timestamp_format = shape.serialization.get( 'timestampFormat', self.HEADER_TIMESTAMP_FORMAT) return self._convert_timestamp_to_str(timestamp, timestamp_format) elif is_json_value_header(shape): # Serialize with no spaces after separators to save space in # the header. return self._get_base64(json.dumps(value, separators=(',', ':'))) else: return value
def test_happy_path_unicode(self, getsize_patch): # Arrange self.args.product_name = u'\u05d1\u05e8\u05d9\u05e6\u05e7\u05dc\u05d4' self.args.support_description = u'\u00fd\u00a9\u0194\u0292' actual_product_view_detail = self.get_product_view_detail() self.servicecatalog_client.create_product.return_value = \ actual_product_view_detail expected_product_view_detail = self.get_product_view_detail() del expected_product_view_detail["ResponseMetadata"] expected_response_output = json.dumps(expected_product_view_detail, indent=2) expected_args = self.get_args_dict() # Act with capture_output() as captured: result = self.cmd._run_main(self.args, self.global_args) # Assert self.session.create_client.assert_called_with( 'servicecatalog', region_name=self.global_args.region, endpoint_url=None, verify=None) self.servicecatalog_client.create_product.assert_called_once_with( Name=expected_args['product-name'], Owner=expected_args['product-owner'], Description=expected_args ['product-description'], Distributor=expected_args ['product-distributor'], SupportDescription=expected_args ['support-description'], SupportEmail=expected_args ['support-email'], ProductType=expected_args['product-type'], Tags=expected_args['tags'], ProvisioningArtifactParameters=self. get_provisioning_artifact_parameters( self.args.provisioning_artifact_name, self. args. provisioning_artifact_description, self.args.provisioning_artifact_type ) ) self.assertEqual(expected_response_output, captured.stdout.getvalue() ) self.assertEquals(0, result)
def set_http_responses_to(self, *responses): http_responses = [] for response in responses: if isinstance(response, Exception): # Simulating an error condition. http_response = response elif hasattr(response, 'status_code'): # It's a precreated fake_response. http_response = response else: http_response = self.fake_response( status_code=200, body=json.dumps(response)) http_responses.append(http_response) self.http.get.side_effect = http_responses
def test_endpoint_discovery_with_invalid_endpoint(self): response = { 'Error': { 'Code': 'InvalidEndpointException', 'Message': 'Test Error', } } response_body = json.dumps(response).encode() config = Config(endpoint_discovery_enabled=True) client, http_stubber = self.create_client(config=config) with http_stubber as stubber: stubber.add_response(status=421, body=response_body) with self.assertRaises(ClientError): client.describe_table(TableName='sometable')
def process_mfa_okta_push(self, url, statetoken): eprint("sent push to device, awaiting response...") while True: totp_response = self._requests_session.post( url, headers={ 'Content-Type': 'application/json', 'Accept': 'application/json' }, data=json.dumps({'stateToken': statetoken})) totp_parsed = json.loads(totp_response.text) if totp_parsed["status"] == "SUCCESS": return self.get_assertion_from_response(totp_parsed) elif totp_parsed["factorResult"] != "WAITING": raise SAMLError(self._ERROR_AUTH_CANCELLED)
def _create_request_object(self, operation, params): user_agent = self.session.user_agent() target = '%s.%s' % (self.service.target_prefix, operation.name) json_version = '1.0' if hasattr(self.service, 'json_version'): json_version = str(self.service.json_version) content_type = 'application/x-amz-json-%s' % json_version content_encoding = 'amz-1.0' data = json.dumps(params) request = AWSRequest(method='POST', url=self.host, data=data, headers={'User-Agent': user_agent, 'X-Amz-Target': target, 'Content-Type': content_type, 'Content-Encoding': content_encoding}) return request
def test_dynamodb_endpoint_discovery_enabled(self): discovered_endpoint = 'https://discovered.domain' response = { 'Endpoints': [{ 'Address': discovered_endpoint, 'CachePeriodInMinutes': 1, }] } response_body = json.dumps(response).encode() with self.http_stubber as stubber: stubber.add_response(status=200, body=response_body) stubber.add_response(status=200, body=b'{}') self.client.describe_table(TableName='sometable') self.assertEqual(len(self.http_stubber.requests), 2) discover_request = self.http_stubber.requests[1] self.assertEqual(discover_request.url, discovered_endpoint)
def process_mfa_push(self, endpoint, url, statetoken): self.get_response(("Press RETURN when you are ready to request the " "push notification"), False) while True: totp_response = self._requests_session.post( url, headers={ 'Content-Type': 'application/json', 'Accept': 'application/json' }, data=json.dumps({'stateToken': statetoken})) totp_parsed = json.loads(totp_response.text) if totp_parsed["status"] == "SUCCESS": return self.get_assertion_from_response(endpoint, totp_parsed) if totp_parsed["factorResult"] != "WAITING": raise SAMLError(self._ERROR_AUTH_CANCELLED)
def serialize_to_request(self, parameters, operation_model): target = '%s.%s' % (operation_model.metadata['targetPrefix'], operation_model.name) json_version = operation_model.metadata['jsonVersion'] serialized = self._create_default_request() serialized['method'] = operation_model.http.get('method', self.DEFAULT_METHOD) serialized['headers'] = { 'X-Amz-Target': target, 'Content-Type': 'application/x-amz-json-%s' % json_version, } body = {} input_shape = operation_model.input_shape if input_shape is not None: self._serialize(body, parameters, input_shape) serialized['body'] = json.dumps(body).encode(self.DEFAULT_ENCODING) return serialized
def test_can_always_json_serialize_headers(self): parser = self.create_parser() original_headers = { 'x-amzn-requestid': 'request-id', 'Header1': 'foo', } headers = CustomHeaderDict(original_headers) output_shape = self.create_arbitary_output_shape() parsed = parser.parse( {'body': b'{}', 'headers': headers, 'status_code': 200}, output_shape) metadata = parsed['ResponseMetadata'] # We've had the contract that you can json serialize a # response. So we want to ensure that despite using a CustomHeaderDict # we can always JSON dumps the response metadata. self.assertEqual( json.loads(json.dumps(metadata))['HTTPHeaders']['header1'], 'foo')
def serialize_to_request(self, parameters, operation_model): target = '%s.%s' % (operation_model.metadata['targetPrefix'], operation_model.name) json_version = operation_model.metadata['jsonVersion'] serialized = self._create_default_request() serialized['method'] = operation_model.http.get( 'method', self.DEFAULT_METHOD) serialized['headers'] = { 'X-Amz-Target': target, 'Content-Type': 'application/x-amz-json-%s' % json_version, } body = {} input_shape = operation_model.input_shape if input_shape is not None: self._serialize(body, parameters, input_shape) serialized['body'] = json.dumps(body).encode(self.DEFAULT_ENCODING) return serialized
def list_logs(self, streams=None): streams = self._list_streams() max_stream_length = max([len(s) for s in streams]) if streams else 10 group_length = len(self.log_group_name) try: for event in self.iter_logs(): output = [] if self.output_group_enabled: output.append( self.color( self.log_group_name.ljust(group_length, ' '), 'green')) if self.output_stream_enabled: output.append( self.color( event['logStreamName'].ljust( max_stream_length, ' '), 'cyan')) if self.output_timestamp_enabled: output.append( self.color(milis2iso(event['timestamp']), 'yellow')) if self.output_ingestion_time_enabled: output.append( self.color(milis2iso(event['ingestionTime']), 'blue')) message = event['message'] if self.query is not None and message[0] == '{': parsed = json.loads(event['message']) message = self.query_expression.search(parsed) if not isinstance(message, six.string_types): message = json.dumps(message) output.append(message.rstrip()) self.io.write(' '.join(output) + '\n') try: self.io.flush() except IOError as e: if e.errno == errno.EPIPE: # SIGPIPE received, so exit os._exit(0) else: # We don't want to handle any other errors from this raise except KeyboardInterrupt: self.io.write('Closing...\n') os._exit(0)
def _run_main(self, parsed_args, parsed_globals): super(GenerateProductCommand, self)._run_main(parsed_args, parsed_globals) self.region = self.get_and_validate_region(parsed_globals) self.s3_url = self.create_s3_url(parsed_args.bucket_name, parsed_args.file_path) self.scs_client = self._session.create_client( 'servicecatalog', region_name=self.region, endpoint_url=parsed_globals.endpoint_url, verify=parsed_globals.verify_ssl) response = self.create_product( self.build_args(parsed_args, self.s3_url), parsed_globals) sys.stdout.write(json.dumps(response, indent=2, ensure_ascii=False)) return 0
def retrieve_saml_assertion(self, config): # unix_getpass("hello?") self._validate_config_values(config) self.endpoint = config['saml_endpoint'] hostname = urlsplit(self.endpoint).netloc auth_url = 'https://%s/api/v1/authn' % hostname username = config['saml_username'] password = self._password_prompter("Password: "******"errorSummary"]) if "status" in parsed: if parsed["status"] == "SUCCESS": return self.get_assertion_from_response(parsed) elif parsed["status"] == "LOCKED_OUT": raise SAMLError(self._ERROR_LOCKED_OUT % parsed["_links"]["href"]) elif parsed["status"] == "PASSWORD_EXPIRED": raise SAMLError(self._ERROR_PASSWORD_EXPIRED % parsed["_links"]["href"]) elif parsed["status"] == "MFA_ENROLL": raise SAMLError(self._ERROR_MFA_ENROLL) elif parsed["status"] == "MFA_REQUIRED": return self.process_mfa_verification(parsed) raise SAMLError("Code logic failure")
def process_mfa_security_question(self, endpoint, url, statetoken): while True: response = self.get_response(self._MSG_ANSWER) totp_response = self._requests_session.post(url, headers={ 'Content-Type': 'application/json', 'Accept': 'application/json' }, data=json.dumps({ 'stateToken': statetoken, 'answer': response })) result = self.process_response(totp_response, endpoint) if result is not None: return result
def _run_main(self, parsed_args, parsed_globals): super(GenerateProductCommand, self)._run_main(parsed_args, parsed_globals) self.region = self.get_and_validate_region(parsed_globals) self.s3_url = self.create_s3_url(parsed_args.bucket_name, parsed_args.file_path) self.scs_client = self._session.create_client( 'servicecatalog', region_name=self.region, endpoint_url=parsed_globals.endpoint_url, verify=parsed_globals.verify_ssl ) response = self.create_product(self.build_args(parsed_args, self.s3_url), parsed_globals) sys.stdout.write(json.dumps(response, indent=2, ensure_ascii=False)) return 0
def launch_cmd(role_arn, profile=None): session = botocore.session.get_session() session.profile = profile sts = session.get_service('sts') endpoint = sts.get_endpoint() op = sts.get_operation('AssumeRole') creds = op.call(endpoint, role_arn=role_arn, role_session_name='foobar')[1] d = {'sessionId': creds['Credentials']['AccessKeyId'], 'sessionKey': creds['Credentials']['SecretAccessKey'], 'sessionToken': creds['Credentials']['SessionToken']} json_str = json.dumps(d) params = {'Action': 'getSigninToken', 'Session': json_str} r = botocore.vendored.requests.get(sign_in_url, params=params) d = json.loads(r.text) d['Action'] = 'login' d['Issuer'] = issuer_url d['Destination'] = console_url uri = sign_in_url + '?' + urlencode(d) webbrowser.open(uri)
def _create_request_object(self, operation, params): user_agent = self.session.user_agent() target = "%s.%s" % (self.service.target_prefix, operation.name) json_version = "1.0" if hasattr(self.service, "json_version"): json_version = str(self.service.json_version) content_type = "application/x-amz-json-%s" % json_version content_encoding = "amz-1.0" data = json.dumps(params) request = AWSRequest( method="POST", url=self.host, data=data, headers={ "User-Agent": user_agent, "X-Amz-Target": target, "Content-Type": content_type, "Content-Encoding": content_encoding, }, ) return request
def add_auth(self, request): datetime_now = datetime.datetime.utcnow() request.context['timestamp'] = datetime_now.strftime(SIGV4_TIMESTAMP) fields = {} if request.context.get('s3-presign-post-fields', None) is not None: fields = request.context['s3-presign-post-fields'] policy = {} conditions = [] if request.context.get('s3-presign-post-policy', None) is not None: policy = request.context['s3-presign-post-policy'] if policy.get('conditions', None) is not None: conditions = policy['conditions'] policy['conditions'] = conditions fields['x-amz-algorithm'] = 'AWS4-HMAC-SHA256' fields['x-amz-credential'] = self.scope(request) fields['x-amz-date'] = request.context['timestamp'] conditions.append({'x-amz-algorithm': 'AWS4-HMAC-SHA256'}) conditions.append({'x-amz-credential': self.scope(request)}) conditions.append({'x-amz-date': request.context['timestamp']}) if self.credentials.token is not None: fields['x-amz-security-token'] = self.credentials.token conditions.append({'x-amz-security-token': self.credentials.token}) # Dump the base64 encoded policy into the fields dictionary. fields['policy'] = base64.b64encode( json.dumps(policy).encode('utf-8')).decode('utf-8') fields['x-amz-signature'] = self.signature(fields['policy'], request) request.context['s3-presign-post-fields'] = fields request.context['s3-presign-post-policy'] = policy
def _serialize_body_params(self, params, shape): return json.dumps(params)
def _serialize_body_params(self, params, shape): serialized_body = self.MAP_TYPE() self._serialize(serialized_body, params, shape) return json.dumps(serialized_body)
def _serialize_body_params(self, params, shape): serialized_body = self.MAP_TYPE() self._serialize(serialized_body, params, shape) return json.dumps(serialized_body).encode(self.DEFAULT_ENCODING)
def _try_json_dump(obj): try: return json.dumps(obj) except (ValueError, TypeError): return str(obj)