def __init__(self): super().__init__() self.headers = CaseInsensitiveDict() self.raw_response = None
def forward(self, method): path = self.path if '://' in path: path = '/' + path.split('://', 1)[1].split('/', 1)[1] proxy_url = 'http://%s%s' % (self.proxy.forward_host, path) target_url = self.path if '://' not in target_url: target_url = 'http://%s%s' % (self.proxy.forward_host, target_url) data = None if method in ['POST', 'PUT', 'PATCH']: data_string = self.data_bytes try: if not isinstance(data_string, string_types): data_string = data_string.decode(DEFAULT_ENCODING) data = json.loads(data_string) except Exception as e: # unable to parse JSON, fallback to verbatim string/bytes data = data_string forward_headers = CaseInsensitiveDict(self.headers) # update original "Host" header (moto s3 relies on this behavior) if not forward_headers.get('Host'): forward_headers['host'] = urlparse(target_url).netloc if 'localhost.atlassian.io' in forward_headers.get('Host'): forward_headers['host'] = 'localhost' try: response = None modified_request = None # update listener (pre-invocation) if self.proxy.update_listener: listener_result = self.proxy.update_listener(method=method, path=path, data=data, headers=forward_headers, return_forward_info=True) if isinstance(listener_result, Response): response = listener_result elif isinstance(listener_result, Request): modified_request = listener_result data = modified_request.data forward_headers = modified_request.headers elif listener_result is not True: # get status code from response, or use Bad Gateway status code code = listener_result if isinstance(listener_result, int) else 503 self.send_response(code) self.end_headers() return if response is None: if modified_request: response = self.method(proxy_url, data=modified_request.data, headers=modified_request.headers) else: response = self.method(proxy_url, data=self.data_bytes, headers=forward_headers) # update listener (post-invocation) if self.proxy.update_listener: updated_response = self.proxy.update_listener(method=method, path=path, data=data, headers=forward_headers, response=response) if isinstance(updated_response, Response): response = updated_response # copy headers and return response self.send_response(response.status_code) for header_key, header_value in iteritems(response.headers): if header_key.lower() != 'Content-Length'.lower(): self.send_header(header_key, header_value) self.send_header('Content-Length', '%s' % len(response.content)) # allow pre-flight CORS headers by default if 'Access-Control-Allow-Origin' not in response.headers: self.send_header('Access-Control-Allow-Origin', '*') if 'Access-Control-Allow-Methods' not in response.headers: self.send_header('Access-Control-Allow-Methods', 'HEAD,GET,PUT,POST,DELETE,OPTIONS,PATCH') if 'Access-Control-Allow-Headers' not in response.headers: self.send_header('Access-Control-Allow-Headers', ','.join(['authorization', 'content-type', 'content-md5', 'x-amz-content-sha256', 'x-amz-date', 'x-amz-security-token', 'x-amz-user-agent'])) self.end_headers() if len(response.content): self.wfile.write(bytes_(response.content)) self.wfile.flush() except Exception as e: if not self.proxy.quiet or 'ConnectionRefusedError' not in str(traceback.format_exc()): LOGGER.error("Error forwarding request: %s %s" % (e, traceback.format_exc())) self.send_response(502) # bad gateway self.end_headers()
import base64 from random import randint, choice import json import os import requests import time import uuid from itertools import count from requests.structures import CaseInsensitiveDict headers = CaseInsensitiveDict() headers["Accept"] = "application/json" """ Variables to maintain customer ID and header """ cust_id = "" base64_str = base64.encodebytes( ('%s:%s' % ("user", "password")).encode()).decode().strip() print(base64_str) """ URL of the api endpoints of application to send and recieve requests """ url = "https://ca-staging-api.ctds-suite.com/api/ApplicationStatus/Get" #os.environ['URL'] #url = "https://ca-staging-api.ctds-suite.com/api/" jsonAssessment = { "jsonDocumentData": "good", "documentId": "1", "documentTypeVersion": 0, "documentTypeName": "ok", "assessmentDateTimeUtc": "2021-12-01T08:14:39.783Z",
class Config(object): """ This class is responsible for: 1) Building and giving access to `effective_configuration` from: * `Config.__DEFAULT_CONFIG` -- some sane default values * `dynamic_configuration` -- configuration stored in DCS * `local_configuration` -- configuration from `config.yml` or environment 2) Saving and loading `dynamic_configuration` into 'patroni.dynamic.json' file located in local_configuration['postgresql']['data_dir'] directory. This is necessary to be able to restore `dynamic_configuration` if DCS was accidentally wiped 3) Loading of configuration file in the old format and converting it into new format 4) Mimicking some of the `dict` interfaces to make it possible to work with it as with the old `config` object. """ PATRONI_ENV_PREFIX = 'PATRONI_' PATRONI_CONFIG_VARIABLE = PATRONI_ENV_PREFIX + 'CONFIGURATION' __CACHE_FILENAME = 'patroni.dynamic.json' __DEFAULT_CONFIG = { 'ttl': 30, 'loop_wait': 10, 'retry_timeout': 10, 'maximum_lag_on_failover': 1048576, 'check_timeline': False, 'master_start_timeout': 300, 'synchronous_mode': False, 'synchronous_mode_strict': False, 'standby_cluster': { 'create_replica_methods': '', 'host': '', 'port': '', 'primary_slot_name': '', 'restore_command': '', 'archive_cleanup_command': '', 'recovery_min_apply_delay': '' }, 'postgresql': { 'bin_dir': '', 'use_slots': True, 'parameters': CaseInsensitiveDict({p: v[0] for p, v in ConfigHandler.CMDLINE_OPTIONS.items()}) }, 'watchdog': { 'mode': 'automatic', } } def __init__(self): self._modify_index = -1 self._dynamic_configuration = {} self.__environment_configuration = self._build_environment_configuration() # Patroni reads the configuration from the command-line argument if it exists, otherwise from the environment self._config_file = len(sys.argv) >= 2 and os.path.isfile(sys.argv[1]) and sys.argv[1] if self._config_file: self._local_configuration = self._load_config_file() else: config_env = os.environ.pop(self.PATRONI_CONFIG_VARIABLE, None) self._local_configuration = config_env and yaml.safe_load(config_env) or self.__environment_configuration if not self._local_configuration: print('Usage: {0} config.yml'.format(sys.argv[0])) print('\tPatroni may also read the configuration from the {0} environment variable'. format(self.PATRONI_CONFIG_VARIABLE)) sys.exit(1) self.__effective_configuration = self._build_effective_configuration({}, self._local_configuration) self._data_dir = self.__effective_configuration['postgresql']['data_dir'] self._cache_file = os.path.join(self._data_dir, self.__CACHE_FILENAME) self._load_cache() self._cache_needs_saving = False @property def config_file(self): return self._config_file @property def dynamic_configuration(self): return deepcopy(self._dynamic_configuration) def check_mode(self, mode): return bool(parse_bool(self._dynamic_configuration.get(mode))) def _load_config_file(self): """Loads config.yaml from filesystem and applies some values which were set via ENV""" with open(self._config_file) as f: config = yaml.safe_load(f) patch_config(config, self.__environment_configuration) return config def _load_cache(self): if os.path.isfile(self._cache_file): try: with open(self._cache_file) as f: self.set_dynamic_configuration(json.load(f)) except Exception: logger.exception('Exception when loading file: %s', self._cache_file) def save_cache(self): if self._cache_needs_saving: tmpfile = fd = None try: (fd, tmpfile) = tempfile.mkstemp(prefix=self.__CACHE_FILENAME, dir=self._data_dir) with os.fdopen(fd, 'w') as f: fd = None json.dump(self.dynamic_configuration, f) tmpfile = shutil.move(tmpfile, self._cache_file) self._cache_needs_saving = False except Exception: logger.exception('Exception when saving file: %s', self._cache_file) if fd: try: os.close(fd) except Exception: logger.error('Can not close temporary file %s', tmpfile) if tmpfile and os.path.exists(tmpfile): try: os.remove(tmpfile) except Exception: logger.error('Can not remove temporary file %s', tmpfile) # configuration could be either ClusterConfig or dict def set_dynamic_configuration(self, configuration): if isinstance(configuration, ClusterConfig): if self._modify_index == configuration.modify_index: return False # If the index didn't changed there is nothing to do self._modify_index = configuration.modify_index configuration = configuration.data if not deep_compare(self._dynamic_configuration, configuration): try: self.__effective_configuration = self._build_effective_configuration(configuration, self._local_configuration) self._dynamic_configuration = configuration self._cache_needs_saving = True return True except Exception: logger.exception('Exception when setting dynamic_configuration') def reload_local_configuration(self, dry_run=False): if self.config_file: try: configuration = self._load_config_file() if not deep_compare(self._local_configuration, configuration): new_configuration = self._build_effective_configuration(self._dynamic_configuration, configuration) if dry_run: return not deep_compare(new_configuration, self.__effective_configuration) self._local_configuration = configuration self.__effective_configuration = new_configuration return True else: logger.info('No configuration items changed, nothing to reload.') except Exception: logger.exception('Exception when reloading local configuration from %s', self.config_file) if dry_run: raise @staticmethod def _process_postgresql_parameters(parameters, is_local=False): return {name: value for name, value in (parameters or {}).items() if name not in ConfigHandler.CMDLINE_OPTIONS or not is_local and ConfigHandler.CMDLINE_OPTIONS[name][1](value)} def _safe_copy_dynamic_configuration(self, dynamic_configuration): config = deepcopy(self.__DEFAULT_CONFIG) for name, value in dynamic_configuration.items(): if name == 'postgresql': for name, value in (value or {}).items(): if name == 'parameters': config['postgresql'][name].update(self._process_postgresql_parameters(value)) elif name not in ('connect_address', 'listen', 'data_dir', 'pgpass', 'authentication'): config['postgresql'][name] = deepcopy(value) elif name == 'standby_cluster': for name, value in (value or {}).items(): if name in self.__DEFAULT_CONFIG['standby_cluster']: config['standby_cluster'][name] = deepcopy(value) elif name in config: # only variables present in __DEFAULT_CONFIG allowed to be overriden from DCS if name in ('synchronous_mode', 'synchronous_mode_strict'): config[name] = value else: config[name] = int(value) return config @staticmethod def _build_environment_configuration(): ret = defaultdict(dict) def _popenv(name): return os.environ.pop(Config.PATRONI_ENV_PREFIX + name.upper(), None) for param in ('name', 'namespace', 'scope'): value = _popenv(param) if value: ret[param] = value def _fix_log_env(name, oldname): value = _popenv(oldname) name = Config.PATRONI_ENV_PREFIX + 'LOG_' + name.upper() if value and name not in os.environ: os.environ[name] = value for name, oldname in (('level', 'loglevel'), ('format', 'logformat'), ('dateformat', 'log_datefmt')): _fix_log_env(name, oldname) def _set_section_values(section, params): for param in params: value = _popenv(section + '_' + param) if value: ret[section][param] = value _set_section_values('restapi', ['listen', 'connect_address', 'certfile', 'keyfile']) _set_section_values('postgresql', ['listen', 'connect_address', 'config_dir', 'data_dir', 'pgpass', 'bin_dir']) _set_section_values('log', ['level', 'format', 'dateformat', 'max_queue_size', 'dir', 'file_size', 'file_num', 'loggers']) def _parse_dict(value): if not value.strip().startswith('{'): value = '{{{0}}}'.format(value) try: return yaml.safe_load(value) except Exception: logger.exception('Exception when parsing dict %s', value) return None value = ret.get('log', {}).pop('loggers', None) if value: value = _parse_dict(value) if value: ret['log']['loggers'] = value def _get_auth(name): ret = {} for param in ('username', 'password'): value = _popenv(name + '_' + param) if value: ret[param] = value return ret restapi_auth = _get_auth('restapi') if restapi_auth: ret['restapi']['authentication'] = restapi_auth authentication = {} for user_type in ('replication', 'superuser', 'rewind'): entry = _get_auth(user_type) if entry: authentication[user_type] = entry if authentication: ret['postgresql']['authentication'] = authentication def _parse_list(value): if not (value.strip().startswith('-') or '[' in value): value = '[{0}]'.format(value) try: return yaml.safe_load(value) except Exception: logger.exception('Exception when parsing list %s', value) return None for param in list(os.environ.keys()): if param.startswith(Config.PATRONI_ENV_PREFIX): # PATRONI_(ETCD|CONSUL|ZOOKEEPER|EXHIBITOR|...)_(HOSTS?|PORT|..) name, suffix = (param[8:].split('_', 1) + [''])[:2] if suffix in ('HOST', 'HOSTS', 'PORT', 'USE_PROXIES', 'PROTOCOL', 'SRV', 'URL', 'PROXY', 'CACERT', 'CERT', 'KEY', 'VERIFY', 'TOKEN', 'CHECKS', 'DC', 'REGISTER_SERVICE', 'SERVICE_CHECK_INTERVAL', 'NAMESPACE', 'CONTEXT', 'USE_ENDPOINTS', 'SCOPE_LABEL', 'ROLE_LABEL', 'POD_IP', 'PORTS', 'LABELS') and name: value = os.environ.pop(param) if suffix == 'PORT': value = value and parse_int(value) elif suffix in ('HOSTS', 'PORTS', 'CHECKS'): value = value and _parse_list(value) elif suffix == 'LABELS': value = _parse_dict(value) elif suffix in ('USE_PROXIES', 'REGISTER_SERVICE'): value = parse_bool(value) if value: ret[name.lower()][suffix.lower()] = value if 'etcd' in ret: ret['etcd'].update(_get_auth('etcd')) users = {} for param in list(os.environ.keys()): if param.startswith(Config.PATRONI_ENV_PREFIX): name, suffix = (param[8:].rsplit('_', 1) + [''])[:2] # PATRONI_<username>_PASSWORD=<password>, PATRONI_<username>_OPTIONS=<option1,option2,...> # CREATE USER "<username>" WITH <OPTIONS> PASSWORD '<password>' if name and suffix == 'PASSWORD': password = os.environ.pop(param) if password: users[name] = {'password': password} options = os.environ.pop(param[:-9] + '_OPTIONS', None) options = options and _parse_list(options) if options: users[name]['options'] = options if users: ret['bootstrap']['users'] = users return ret def _build_effective_configuration(self, dynamic_configuration, local_configuration): config = self._safe_copy_dynamic_configuration(dynamic_configuration) for name, value in local_configuration.items(): if name == 'postgresql': for name, value in (value or {}).items(): if name == 'parameters': config['postgresql'][name].update(self._process_postgresql_parameters(value, True)) elif name != 'use_slots': # replication slots must be enabled/disabled globally config['postgresql'][name] = deepcopy(value) elif name not in config or name in ['watchdog']: config[name] = deepcopy(value) if value else {} # restapi server expects to get restapi.auth = 'username:password' if 'authentication' in config['restapi']: config['restapi']['auth'] = '{username}:{password}'.format(**config['restapi']['authentication']) # special treatment for old config # 'exhibitor' inside 'zookeeper': if 'zookeeper' in config and 'exhibitor' in config['zookeeper']: config['exhibitor'] = config['zookeeper'].pop('exhibitor') config.pop('zookeeper') pg_config = config['postgresql'] # no 'authentication' in 'postgresql', but 'replication' and 'superuser' if 'authentication' not in pg_config: pg_config['use_pg_rewind'] = 'pg_rewind' in pg_config pg_config['authentication'] = {u: pg_config[u] for u in ('replication', 'superuser') if u in pg_config} # no 'superuser' in 'postgresql'.'authentication' if 'superuser' not in pg_config['authentication'] and 'pg_rewind' in pg_config: pg_config['authentication']['superuser'] = pg_config['pg_rewind'] # no 'name' in config if 'name' not in config and 'name' in pg_config: config['name'] = pg_config['name'] updated_fields = ( 'name', 'scope', 'retry_timeout', 'synchronous_mode', 'synchronous_mode_strict', ) pg_config.update({p: config[p] for p in updated_fields if p in config}) return config def get(self, key, default=None): return self.__effective_configuration.get(key, default) def __contains__(self, key): return key in self.__effective_configuration def __getitem__(self, key): return self.__effective_configuration[key] def copy(self): return deepcopy(self.__effective_configuration)
def setUp(self): response.headers = CaseInsensitiveDict({ "content-type": "application/json", "Access-Control-Allow-Origin": "*" })
def cache_response(self, request, response): """ Algorithm for caching requests. This assumes a requests Response object. """ # From httplib2: Don't cache 206's since we aren't going to # handle byte range requests if response.status not in [200, 203]: return # Cache Session Params cache_auto = getattr(request, 'cache_auto', False) cache_urls = getattr(request, 'cache_urls', []) cache_max_age = getattr(request, 'cache_max_age', None) response_headers = CaseInsensitiveDict(response.headers) # Check if we are wanting to cache responses from specific urls only cache_url = self.cache_url(request.url) if len(cache_urls) > 0 and not any(s in cache_url for s in cache_urls): return cc_req = self.parse_cache_control(request.headers) cc = self.parse_cache_control(response_headers) # Delete it from the cache if we happen to have it stored there no_store = cc.get('no-store') or cc_req.get('no-store') if no_store and self.cache.get(cache_url): self.cache.delete(cache_url) # If we've been given an etag, then keep the response if self.cache_etags and 'etag' in response_headers: self.cache.set(cache_url, self.serializer.dumps(request, response)) # If we want to cache sites not setup with cache headers then add the proper headers and keep the response elif cache_auto and not cc and response_headers: headers = { 'Cache-Control': 'public,max-age=%d' % int(cache_max_age or 900) } response.headers.update(headers) if 'expires' not in response_headers: if getattr(response_headers, 'expires', None) is None: expires = datetime.datetime.utcnow() + datetime.timedelta( days=1) expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT") headers = {'Expires': expires} response.headers.update(headers) self.cache.set(cache_url, self.serializer.dumps(request, response)) # Add to the cache if the response headers demand it. If there # is no date header then we can't do anything about expiring # the cache. elif 'date' in response_headers: # cache when there is a max-age > 0 if cc and cc.get('max-age'): if int(cc['max-age']) > 0: if isinstance(cache_max_age, int): cc['max-age'] = int(cache_max_age) response.headers['cache-control'] = ''.join([ '%s=%s' % (key, value) for (key, value) in cc.items() ]) self.cache.set(cache_url, self.serializer.dumps(request, response)) # If the request can expire, it means we should cache it # in the meantime. elif 'expires' in response_headers: if response_headers['expires']: self.cache.set( cache_url, self.serializer.dumps(request, response), )
def forward(self, method): path = self.path if '://' in path: path = '/' + path.split('://', 1)[1].split('/', 1)[1] proxy_url = '%s%s' % (self.proxy.forward_url, path) target_url = self.path if '://' not in target_url: target_url = '%s%s' % (self.proxy.forward_url, target_url) data = self.data_bytes forward_headers = CaseInsensitiveDict(self.headers) # update original "Host" header (moto s3 relies on this behavior) if not forward_headers.get('Host'): forward_headers['host'] = urlparse(target_url).netloc if 'localhost.atlassian.io' in forward_headers.get('Host'): forward_headers['host'] = 'localhost' try: response = None modified_request = None # update listener (pre-invocation) if self.proxy.update_listener: listener_result = self.proxy.update_listener.forward_request( method=method, path=path, data=data, headers=forward_headers) if isinstance(listener_result, Response): response = listener_result elif isinstance(listener_result, Request): modified_request = listener_result data = modified_request.data forward_headers = modified_request.headers elif listener_result is not True: # get status code from response, or use Bad Gateway status code code = listener_result if isinstance(listener_result, int) else 503 self.send_response(code) self.end_headers() return # perform the actual invocation of the backend service if response is None: if modified_request: response = self.method(proxy_url, data=modified_request.data, headers=modified_request.headers) else: response = self.method(proxy_url, data=self.data_bytes, headers=forward_headers) # update listener (post-invocation) if self.proxy.update_listener: kwargs = { 'method': method, 'path': path, 'data': data, 'headers': forward_headers, 'response': response } if 'request_handler' in inspect.getargspec( self.proxy.update_listener.return_response)[0]: # some listeners (e.g., sqs_listener.py) require additional details like the original # request port, hence we pass in a reference to this request handler as well. kwargs['request_handler'] = self updated_response = self.proxy.update_listener.return_response( **kwargs) if isinstance(updated_response, Response): response = updated_response # copy headers and return response self.send_response(response.status_code) content_length_sent = False for header_key, header_value in iteritems(response.headers): # filter out certain headers that we don't want to transmit if header_key.lower() not in ('transfer-encoding', 'date', 'server'): self.send_header(header_key, header_value) content_length_sent = content_length_sent or header_key.lower( ) == 'content-length' if not content_length_sent: self.send_header( 'Content-Length', '%s' % len(response.content) if response.content else 0) # allow pre-flight CORS headers by default if 'Access-Control-Allow-Origin' not in response.headers: self.send_header('Access-Control-Allow-Origin', '*') if 'Access-Control-Allow-Methods' not in response.headers: self.send_header('Access-Control-Allow-Methods', ','.join(CORS_ALLOWED_METHODS)) if 'Access-Control-Allow-Headers' not in response.headers: self.send_header('Access-Control-Allow-Headers', ','.join(CORS_ALLOWED_HEADERS)) self.end_headers() if response.content and len(response.content): self.wfile.write(to_bytes(response.content)) self.wfile.flush() except Exception as e: trace = str(traceback.format_exc()) conn_errors = ('ConnectionRefusedError', 'NewConnectionError') conn_error = any(e in trace for e in conn_errors) error_msg = 'Error forwarding request: %s %s' % (e, trace) if 'Broken pipe' in trace: LOGGER.warn( 'Connection prematurely closed by client (broken pipe).') elif not self.proxy.quiet or not conn_error: LOGGER.error(error_msg) if os.environ.get(ENV_INTERNAL_TEST_RUN): # During a test run, we also want to print error messages, because # log messages are delayed until the entire test run is over, and # hence we are missing messages if the test hangs for some reason. print('ERROR: %s' % error_msg) self.send_response(502) # bad gateway self.end_headers()
def test_iter(self): cid = CaseInsensitiveDict({'Spam': 'spam', 'Eggs': 'eggs'}) keys = frozenset(['Spam', 'Eggs']) assert frozenset(iter(cid)) == keys
def test_setdefault(self): cid = CaseInsensitiveDict({'Spam': 'blueval'}) assert cid.setdefault('spam', 'notblueval') == 'blueval' assert cid.setdefault('notspam', 'notblueval') == 'notblueval'
def main(): #Création des tokens: # tokenCfast = getTokenCfast() # tokenSubs = getTokenSubs() #Déclaration des objets cf_contrat = M_CF_Contract(tokenCfast) cf_services = M_CF_Service(tokenCfast) cf_subs = M_CF_Subscription(tokenSubs) db_contrat = M_ATH_Contrat() #déclaration des compteurs nbContratAjoute = 0 nbContratMaj = 0 nbFichierIgnore = 0 tempsExecution = 0 #Récupération des clients url = "https://v2.cfast.fr/api/cfast/company" headers = CaseInsensitiveDict() headers["Authorization"] = "Bearer %s" % tokenCfast resp = requests.get(url, headers=headers) companies = resp.json() #création de la barre de chargement progress = tqdm(total=len(companies)) times = 0 for company in companies: ref_company = company["companyReference"] ref_interne = company["internalCompanyReference"] id_comp = company["id"] servicesFromComp = cf_services.get_serviceFromCompany(id_comp) json_services = servicesFromComp.json() nbServices = len(json_services) #dictionnaire des objets dict_contrats = [] dict_contrats_add = [] dict_contrats_up = [] #! --- CONSTRUCTION DES OBJETS --- def buildObjects(service): service_id = service["id"] serviceDetail = cf_services.get_serviceDetail(int(service_id)) json_serviceDetail = serviceDetail.json() #! récupérations des subs subsOfTheService = cf_subs.get_subsFromService(service_id) json_subsOfTheService = subsOfTheService.json() dict_subs = [] for sub in json_subsOfTheService: # logger.debug(sub) paramsSub = { "id": sub["id"], "activationDate": sub["activationDate"], "endDate": sub["cancellationDate"] if sub["cancellationDate"] != None else sub["endDate"], "periodicity": sub["periodicity"], "periodicPrice": sub["periodicPrice"], "supplierName": sub["supplierName"], "name": sub["name"], "quantity": sub["quantity"], "strictEndDate": sub["strictEndDate"], "tokenSubs": tokenSubs } obj_sub = Sub(paramsSub) dict_subs.append(obj_sub) try: if ((json_serviceDetail["dataService"] != None) and (json_serviceDetail["dataService"]["offerTypeName"] != "")): offerType = json_serviceDetail["dataService"][ "offerTypeName"] else: offerType = json_serviceDetail[ "billingServiceReplacementLabel"] except KeyError: offerType = json_serviceDetail[ "billingServiceReplacementLabel"] paramsServ = { "idCfast": json_serviceDetail["id"], "libServ": json_serviceDetail["serviceLabel"], "libNdi": json_serviceDetail["billingServiceReplacementLabel"] if json_serviceDetail["billingServiceReplacementLabel"] != "" else json_serviceDetail["serviceLabel"], "techno": json_serviceDetail["technology"]["name"], "offerType": offerType, "activity": json_serviceDetail["activity"], "whiteLabel": json_serviceDetail["site"]["billingPoint"]["company"] ["whiteLabel"]["reference"], "subs": dict_subs } # print(json_serviceDetail) obj_service = Service(paramsServ) # obj_service.afficheService() #! conversion service -> contrat #prepa des variables pour la construction du contrat cod_direction = { "GT": "D01", "EM": "D02", "86": "D03", "AB": "DO4", "MBO": "DO4" } cod_period = {1: "M", 3: "T", 4: "Q", 6: "S", 12: "A"} nbSubs = len(dict_subs) no_societe = ref_interne dict_date = obj_service.getBiggerDate() techno = obj_service.getTechno() if techno == "SIP": if "@centile.fr" in obj_service.getLibServ(): objContrat = "ABO TELNGO" elif "fax" in obj_service.getLibServ(): objContrat = "ABO FAX TO MAIL" else: objContrat = "ABO SIP %s" % obj_service.getLibNdi() elif techno == "MOBILE": objContrat = "ABO MOBILE %s" % obj_service.getLibNdi() else: objContrat = "ABO %s %s" % (obj_service.getTechno(), obj_service.getLibNdi()) #construction du contrat paramsContrat = { "idCfast": obj_service.getIdCfast(), "reference": obj_service.getLibNdi(), "objet": objContrat, "cod_etat": "01" if obj_service.getActivity() == "Actif" else "03", "cod_statut": "00" if obj_service.getActivity() == "Actif" else "04", "cod_direction": cod_direction.get(obj_service.getWhiteLabel()), "dat_debut": dict_date["date_debut"], "dat_fin": dict_date["date_fin"], "cod_period": cod_period.get(obj_service.getSubs()[0].getPeriodicity()) if nbSubs > 0 else "", "sicd": obj_service.getBiggerSicd() if nbSubs > 0 else "", "commentaire": obj_service.createCommentaire() if nbSubs > 0 else "", "pvPeriodique": obj_service.getPPSum() if nbSubs > 0 else 0, "fournisseur": obj_service.getFournisseur() if nbSubs > 0 else "", "serviceId": obj_service.getLibServ() if nbSubs > 0 else "", "no_societe": no_societe } contrat = Contrat(paramsContrat) dict_contrats.append(contrat) #!--- --- --- --- --- --- --- dict_services = [] for service in json_services: #*Parcour les services de l'entreprise dict_services.append(service) with concurrent.futures.ThreadPoolExecutor() as executor: futures = [] for service in dict_services: futures.append(executor.submit(buildObjects, service)) for future in concurrent.futures.as_completed(futures): if (future.result() != None): print(future.result()) # futures = [] # futures.append(executor.submit(convertAndInsertContract, dict_services[3951])) # for future in concurrent.futures.as_completed(futures): # if(future.result() != None): # print(future.result()) for contrat in dict_contrats: try: db_no_contrat = db_contrat.getNoContrat(contrat) except pymysql.err.OperationalError: logger.debug( "pymysql.err.OperationalError: db_contrat.getNoContrat(contrat)" ) #! test si le contrat est connu d'athénéo if (db_no_contrat == ""): dict_contrats_add.append(contrat) else: contrat.setNoContrat(db_no_contrat) dict_contrats_up.append(contrat) #! traitement des contrats à Ajouter for contrat in dict_contrats_add: new_no_contrat = db_contrat.getNewId() contrat.setNoContrat(new_no_contrat) # contrat.afficheContrat() db_contrat.addContrat(contrat) #!insertion dans la bdd logger.info("Ajout du contrat n°%s dans Athénéo." % contrat.getNoContrat()) nbContratAjoute += 1 # for contrat in dict_contrats_up: # try: # db_no_contrat = db_contrat.getNoContrat(contrat) # except pymysql.err.ProgrammingError: # logger.error("pymysql error. getNoContrat(): " ) # #récuperation du contrat depuis athénéo pour la comparaison # contratFromDb = db_contrat.getContratFromDb(db_no_contrat) # #test si le contrat à été modifié # isSimilar = contrat.compareContrats(contratFromDb) # if not (isSimilar): # newParam = contrat.getParamsAsJson() # oldParam = contratFromDb.getParamsAsJson() # # contrat.afficheContrat() # no_contrat = newParam["no_contrat"] # i = 0 # for param in newParam: # if (newParam[param] != oldParam[param]): # #met a jour les champs ayant été modifier # logger.debug(""" # Champ : %s # -- Nouveau: %s, type: %s # -- Ancien: %s, type: %s""" # % (param, newParam[param], type(newParam[param]), oldParam[param], type(oldParam[param]))) # db_contrat.updateContrat(param, newParam[param], no_contrat) #!maj dans la bdd # i += 1 # logger.info("Mise à jour du contrat n°%s sur %d champ(s)." % (contrat.getNoContrat(), i)) # nbContratMaj += 1 # else: # logger.info("Les données sont à jour.") #mise a jour de la barre de chargement progress.update(1) times += 1 #fin du chrono finish = time.perf_counter() if (finish - start > 60): str_tps_exect = "Finis en %d minute(s)" % round( (finish - start) / 60, 2) logger.info(str_tps_exect) tempsExecution = round((finish - start) / 60, 2) else: str_tps_exect = "Finis en %d seconde(s)" % round(finish - start, 2) logger.info(str_tps_exect) tempsExecution = round(finish - start, 2) db_contrat.closeConnection() sendRapport(nbContratMaj, nbContratAjoute, nbFichierIgnore, str_tps_exect, EMAIL_ADRESS, EMAIL_PASS)
def test_update_retains_unchanged(self): cid = CaseInsensitiveDict({'foo': 'foo', 'bar': 'bar'}) cid.update({'foo': 'newfoo'}) assert cid['bar'] == 'bar'
def forward(self, method): data = self.data_bytes forward_headers = CaseInsensitiveDict(self.headers) # force close connection if forward_headers.get('Connection', '').lower() != 'keep-alive': self.close_connection = 1 path = self.path if '://' in path: path = path.split('://', 1)[1] path = '/%s' % (path.split('/', 1)[1] if '/' in path else '') forward_base_url = self.proxy.forward_base_url proxy_url = '%s%s' % (forward_base_url, path) for listener in self._listeners(): if listener: proxy_url = listener.get_forward_url( method, path, data, forward_headers) or proxy_url target_url = self.path if '://' not in target_url: target_url = '%s%s' % (forward_base_url, target_url) # update original "Host" header (moto s3 relies on this behavior) if not forward_headers.get('Host'): forward_headers['host'] = urlparse(target_url).netloc if 'localhost.atlassian.io' in forward_headers.get('Host'): forward_headers['host'] = 'localhost' forward_headers['X-Forwarded-For'] = self.build_x_forwarded_for( forward_headers) try: response = None modified_request = None # update listener (pre-invocation) for listener in self._listeners(): if not listener: continue listener_result = listener.forward_request( method=method, path=path, data=data, headers=forward_headers) if isinstance(listener_result, Response): response = listener_result break if isinstance(listener_result, dict): response = Response() response._content = json.dumps(listener_result) response.headers['Content-Type'] = APPLICATION_JSON response.status_code = 200 break elif isinstance(listener_result, Request): modified_request = listener_result data = modified_request.data forward_headers = modified_request.headers break elif listener_result is not True: # get status code from response, or use Bad Gateway status code code = listener_result if isinstance(listener_result, int) else 503 self.send_response(code) self.send_header('Content-Length', '0') # allow pre-flight CORS headers by default self._send_cors_headers() self.end_headers() return # perform the actual invocation of the backend service if response is None: forward_headers['Connection'] = forward_headers.get( 'Connection') or 'close' data_to_send = self.data_bytes request_url = proxy_url if modified_request: if modified_request.url: request_url = '%s%s' % (forward_base_url, modified_request.url) data_to_send = modified_request.data response = self.method(request_url, data=data_to_send, headers=forward_headers, stream=True) # prevent requests from processing response body if not response._content_consumed and response.raw: response._content = response.raw.read() # update listener (post-invocation) if self.proxy.update_listener: kwargs = { 'method': method, 'path': path, 'data': data, 'headers': forward_headers, 'response': response } if 'request_handler' in inspect.getargspec( self.proxy.update_listener.return_response)[0]: # some listeners (e.g., sqs_listener.py) require additional details like the original # request port, hence we pass in a reference to this request handler as well. kwargs['request_handler'] = self updated_response = self.proxy.update_listener.return_response( **kwargs) if isinstance(updated_response, Response): response = updated_response # copy headers and return response self.send_response(response.status_code) content_length_sent = False for header_key, header_value in iteritems(response.headers): # filter out certain headers that we don't want to transmit if header_key.lower() not in ('transfer-encoding', 'date', 'server'): self.send_header(header_key, header_value) content_length_sent = content_length_sent or header_key.lower( ) == 'content-length' if not content_length_sent: self.send_header( 'Content-Length', '%s' % len(response.content) if response.content else 0) # allow pre-flight CORS headers by default self._send_cors_headers(response) self.end_headers() if response.content and len(response.content): self.wfile.write(to_bytes(response.content)) except Exception as e: trace = str(traceback.format_exc()) conn_errors = ('ConnectionRefusedError', 'NewConnectionError', 'Connection aborted', 'Unexpected EOF', 'Connection reset by peer') conn_error = any(e in trace for e in conn_errors) error_msg = 'Error forwarding request: %s %s' % (e, trace) if 'Broken pipe' in trace: LOG.warn( 'Connection prematurely closed by client (broken pipe).') elif not self.proxy.quiet or not conn_error: LOG.error(error_msg) if os.environ.get(ENV_INTERNAL_TEST_RUN): # During a test run, we also want to print error messages, because # log messages are delayed until the entire test run is over, and # hence we are missing messages if the test hangs for some reason. print('ERROR: %s' % error_msg) self.send_response(502) # bad gateway self.end_headers() # force close connection self.close_connection = 1 finally: try: self.wfile.flush() except Exception as e: LOG.warning('Unable to flush write file: %s' % e)
def __init__(self, url, headers={}, status_code=200): self.url = url self.headers = CaseInsensitiveDict(headers) self.status_code = status_code
assert select_proxy(url, proxies) == expected @pytest.mark.parametrize('value, expected', (('foo="is a fish", bar="as well"', { 'foo': 'is a fish', 'bar': 'as well' }), ('key_without_value', { 'key_without_value': None }))) def test_parse_dict_header(value, expected): assert parse_dict_header(value) == expected @pytest.mark.parametrize('value, expected', ( (CaseInsensitiveDict(), None), (CaseInsensitiveDict({'content-type': 'application/json; charset=utf-8' }), 'utf-8'), (CaseInsensitiveDict({'content-type': 'text/plain'}), 'ISO-8859-1'), )) def test_get_encoding_from_headers(value, expected): assert get_encoding_from_headers(value) == expected @pytest.mark.parametrize('value, length', ( ('', 0), ('T', 1), ('Test', 4), ('Cont', 0), ('Other', -5), ('Content', None),
def get_rich_item(self, item): eitem = {} for f in self.RAW_FIELDS_COPY: if f in item: eitem[f] = item[f] else: eitem[f] = None # The real data message = CaseInsensitiveDict(item['data']) # Fields that are the same in message and eitem copy_fields = ["Date", "Subject", "Message-ID"] for f in copy_fields: if f in message: eitem[f] = message[f] else: eitem[f] = None # Fields which names are translated map_fields = {"Subject": "Subject_analyzed"} for fn in map_fields: if fn in message: eitem[map_fields[fn]] = message[fn] else: eitem[map_fields[fn]] = None # Enrich dates eitem["email_date"] = parser.parse( item["metadata__updated_on"]).isoformat() eitem["list"] = item["origin"] if 'Subject' in message and message['Subject']: eitem['Subject'] = eitem['Subject'][:self.KEYWORD_MAX_SIZE] # Root message if 'In-Reply-To' in message: eitem["root"] = False else: eitem["root"] = True # Part of the body is needed in studies like kafka_kip eitem["body_extract"] = "" # Size of the message eitem["size"] = None if 'plain' in message['body']: eitem["body_extract"] = "\n".join( message['body']['plain'].split("\n")[:MAX_LINES_FOR_VOTE]) eitem["size"] = len(message['body']['plain']) # Time zone try: message_date = parser.parse(message['Date']) eitem["tz"] = int(message_date.strftime("%z")[0:3]) except Exception: eitem["tz"] = None identity = self.get_sh_identity(message['from']) eitem["mbox_author_domain"] = self.get_identity_domain(identity) if self.sortinghat: eitem.update(self.get_item_sh(item)) if self.prjs_map: eitem.update(self.get_item_project(eitem)) self.add_repository_labels(eitem) self.add_metadata_filter_raw(eitem) eitem.update(self.get_grimoire_fields(message['Date'], "message")) return eitem
def test_mapping_init(self): cid = CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'}) assert len(cid) == 2 assert 'foo' in cid assert 'bar' in cid
def headers(self): if self._headers is None and self.response: self._headers = self.response.headers return self._headers or CaseInsensitiveDict()
def test_iterable_init(self): cid = CaseInsensitiveDict([('Foo', 'foo'), ('BAr', 'bar')]) assert len(cid) == 2 assert 'foo' in cid assert 'bar' in cid
def cached_request(self, request): cache_url = self.cache_url(request.url) cc = self.parse_cache_control(request.headers) # non-caching states no_cache = True if 'no-cache' in cc else False if 'max-age' in cc and cc['max-age'] == 0: no_cache = True # Bail out if no-cache was set if no_cache: return False # It is in the cache, so lets see if it is going to be # fresh enough resp = self.serializer.loads(request, self.cache.get(cache_url)) # Check to see if we have a cached object if not resp: return False headers = CaseInsensitiveDict(resp.headers) now = time.time() date = calendar.timegm(parsedate_tz(headers['date'])) current_age = max(0, now - date) # TODO: There is an assumption that the result will be a # urllib3 response object. This may not be best since we # could probably avoid instantiating or constructing the # response until we know we need it. resp_cc = self.parse_cache_control(headers) # determine freshness freshness_lifetime = 0 if 'max-age' in resp_cc and resp_cc['max-age'].isdigit(): freshness_lifetime = int(resp_cc['max-age']) elif 'expires' in headers: expires = parsedate_tz(headers['expires']) if expires is not None: expire_time = calendar.timegm(expires) - date freshness_lifetime = max(0, expire_time) # determine if we are setting freshness limit in the req if 'max-age' in cc: try: freshness_lifetime = int(cc['max-age']) except ValueError: freshness_lifetime = 0 if 'min-fresh' in cc: try: min_fresh = int(cc['min-fresh']) except ValueError: min_fresh = 0 # adjust our current age by our min fresh current_age += min_fresh # see how fresh we actually are fresh = (freshness_lifetime > current_age) if fresh: return resp # we're not fresh. If we don't have an Etag, clear it out if 'etag' not in headers: self.cache.delete(cache_url) # return the original handler return False
def test_kwargs_init(self): cid = CaseInsensitiveDict(FOO='foo', BAr='bar') assert len(cid) == 2 assert 'foo' in cid assert 'bar' in cid
def forward(self, method): path = self.path if '://' in path: path = '/' + path.split('://', 1)[1].split('/', 1)[1] proxy_url = 'http://%s%s' % (self.proxy.forward_host, path) target_url = self.path if '://' not in target_url: target_url = 'http://%s%s' % (self.proxy.forward_host, target_url) data = None if method in ['POST', 'PUT', 'PATCH']: data_string = self.data_bytes try: if not isinstance(data_string, string_types): data_string = data_string.decode(DEFAULT_ENCODING) data = json.loads(data_string) except Exception as e: # unable to parse JSON, fallback to verbatim string/bytes data = data_string forward_headers = CaseInsensitiveDict(self.headers) # update original "Host" header forward_headers['host'] = urlparse(target_url).netloc try: response = None modified_request = None # update listener (pre-invocation) if self.proxy.update_listener: listener_result = self.proxy.update_listener( method=method, path=path, data=data, headers=forward_headers, return_forward_info=True) if isinstance(listener_result, Response): response = listener_result elif isinstance(listener_result, Request): modified_request = listener_result data = modified_request.data forward_headers = modified_request.headers elif listener_result is not True: # get status code from response, or use Bad Gateway status code code = listener_result if isinstance(listener_result, int) else 503 self.send_response(code) self.end_headers() return if response is None: if modified_request: response = self.method(proxy_url, data=modified_request.data, headers=modified_request.headers) else: response = self.method(proxy_url, data=self.data_bytes, headers=forward_headers) # update listener (post-invocation) if self.proxy.update_listener: updated_response = self.proxy.update_listener( method=method, path=path, data=data, headers=forward_headers, response=response) if isinstance(updated_response, Response): response = updated_response # copy headers and return response self.send_response(response.status_code) for header_key, header_value in iteritems(response.headers): if header_key != 'Content-Length': self.send_header(header_key, header_value) self.send_header('Content-Length', '%s' % len(response.content)) self.end_headers() self.wfile.write(bytes_(response.content)) self.wfile.flush() except Exception as e: if not self.proxy.quiet: LOGGER.exception("Error forwarding request: %s" % str(e))
def test_docstring_example(self): cid = CaseInsensitiveDict() cid['Accept'] = 'application/json' assert cid['aCCEPT'] == 'application/json' assert list(cid) == ['Accept']
def prepare_request(self, method: str, url: str, *, headers: Optional[dict] = None, params: Optional[dict] = None, data: Optional[Union[str, dict]] = None, files: Optional[Union[Dict[str, Tuple[str]], List[Tuple[str, Tuple[str, ...]]]]] = None, **kwargs) -> dict: """Build a dict that represents an HTTP service request. Clean up headers, add default http configuration, convert data into json, process files, and merge all into a single request dict. Args: method: The HTTP method of the request ex. GET, POST, etc. url: The origin + pathname according to WHATWG spec. Keyword Arguments: headers: Headers of the request. params: Querystring data to be appended to the url. data: The request body. Converted to json if a dict. files: 'files' can be a dictionary (i.e { '<part-name>': (<tuple>)}), or a list of tuples [ (<part-name>, (<tuple>))... ] Returns: Prepared request dictionary. """ # pylint: disable=unused-argument; necessary for kwargs request = {'method': method} # validate the service url is set if not self.service_url: raise ValueError('The service_url is required') # Combine the service_url and operation path to form the request url. # Note: we have already stripped any trailing slashes from the service_url # and we know that the operation path ('url') will start with a slash. request['url'] = strip_extra_slashes(self.service_url + url) headers = remove_null_values(headers) if headers else {} headers = cleanup_values(headers) headers = CaseInsensitiveDict(headers) if self.default_headers is not None: headers.update(self.default_headers) if 'user-agent' not in headers: headers.update(self.user_agent_header) request['headers'] = headers params = remove_null_values(params) params = cleanup_values(params) request['params'] = params if isinstance(data, str): data = data.encode('utf-8') elif isinstance(data, dict) and data: data = remove_null_values(data) if headers.get('content-type') is None: headers.update({'content-type': 'application/json'}) data = json_import.dumps(data).encode('utf-8') request['data'] = data self.authenticator.authenticate(request) # Compress the request body if applicable if (self.get_enable_gzip_compression() and 'content-encoding' not in headers and request['data'] is not None): headers['content-encoding'] = 'gzip' uncompressed_data = request['data'] request_body = gzip.compress(uncompressed_data) request['data'] = request_body request['headers'] = headers # Next, we need to process the 'files' argument to try to fill in # any missing filenames where possible. # 'files' can be a dictionary (i.e { '<part-name>': (<tuple>)} ) # or a list of tuples [ (<part-name>, (<tuple>))... ] # If 'files' is a dictionary we'll convert it to a list of tuples. new_files = [] if files is not None: # If 'files' is a dictionary, transform it into a list of tuples. if isinstance(files, dict): files = remove_null_values(files) files = files.items() # Next, fill in any missing filenames from file tuples. for part_name, file_tuple in files: if file_tuple and len( file_tuple) == 3 and file_tuple[0] is None: file = file_tuple[1] if file and hasattr(file, 'name'): filename = basename(file.name) file_tuple = (filename, file_tuple[1], file_tuple[2]) new_files.append((part_name, file_tuple)) request['files'] = new_files return request
def test_len(self): cid = CaseInsensitiveDict({'a': 'a', 'b': 'b'}) cid['A'] = 'a' assert len(cid) == 2
def request(self, method, url, accept_json=False, headers=None, params=None, json=None, data=None, files=None, **kwargs): full_url = self.url + url input_headers = _remove_null_values(headers) if headers else {} headers = CaseInsensitiveDict( {'user-agent': 'watson-developer-cloud-python-' + __version__}) if accept_json: headers['accept'] = 'application/json' headers.update(input_headers) # Remove keys with None values params = _remove_null_values(params) params = _cleanup_param_values(params) json = _remove_null_values(json) data = _remove_null_values(data) files = _remove_null_values(files) # Support versions of requests older than 2.4.2 without the json input if not data and json is not None: data = json_import.dumps(json) headers.update({'content-type': 'application/json'}) auth = None if self.username and self.password: auth = (self.username, self.password) if self.api_key is not None: if params is None: params = {} if full_url.startswith( 'https://gateway-a.watsonplatform.net/calls'): params['apikey'] = self.api_key else: params['api_key'] = self.api_key if self.x_watson_learning_opt_out: headers['x-watson-learning-opt-out'] = 'true' response = requests.request(method=method, url=full_url, cookies=self.jar, auth=auth, headers=headers, params=params, data=data, files=files, **kwargs) if 200 <= response.status_code <= 299: if accept_json: response_json = response.json() if 'status' in response_json and response_json['status'] \ == 'ERROR': response.status_code = 400 error_message = 'Unknown error' if 'statusInfo' in response_json: error_message = response_json['statusInfo'] if error_message == 'invalid-api-key': response.status_code = 401 raise WatsonException('Error: ' + error_message) return response_json return response else: if response.status_code == 401: error_message = 'Unauthorized: Access is denied due to ' \ 'invalid credentials ' else: error_message = self._get_error_message(response) raise WatsonException(error_message)
def test_getitem(self): cid = CaseInsensitiveDict({'Spam': 'blueval'}) assert cid['spam'] == 'blueval' assert cid['SPAM'] == 'blueval'
async def set_configuration_setting( self, configuration_setting, match_condition=MatchConditions.Unconditionally, **kwargs ): # type: (ConfigurationSetting, Optional[MatchConditions], **Any) -> ConfigurationSetting """Add or update a ConfigurationSetting. If the configuration setting identified by key and label does not exist, this is a create. Otherwise this is an update. :param configuration_setting: the ConfigurationSetting to be added (if not exists) \ or updated (if exists) to the service :type configuration_setting: :class:`ConfigurationSetting` :param match_condition: The match condition to use upon the etag :type match_condition: :class:`~azure.core.MatchConditions` :keyword dict headers: if "headers" exists, its value (a dict) will be added to the http request header :return: The ConfigurationSetting returned from the service :rtype: :class:`~azure.appconfiguration.ConfigurationSetting` :raises: :class:`HttpResponseError`, :class:`ClientAuthenticationError`, \ :class:`ResourceReadOnlyError`, :class:`ResourceModifiedError`, :class:`ResourceNotModifiedError`, \ :class:`ResourceNotFoundError`, :class:`ResourceExistsError` Example .. code-block:: python # in async function config_setting = ConfigurationSetting( key="MyKey", label="MyLabel", value="my set value", content_type="my set content type", tags={"my set tag": "my set tag value"} ) returned_config_setting = await async_client.set_configuration_setting(config_setting) """ key_value = configuration_setting._to_generated() custom_headers = CaseInsensitiveDict(kwargs.get("headers")) # type: Mapping[str, Any] error_map = {401: ClientAuthenticationError, 409: ResourceReadOnlyError} if match_condition == MatchConditions.IfNotModified: error_map[412] = ResourceModifiedError if match_condition == MatchConditions.IfModified: error_map[412] = ResourceNotModifiedError if match_condition == MatchConditions.IfPresent: error_map[412] = ResourceNotFoundError if match_condition == MatchConditions.IfMissing: error_map[412] = ResourceExistsError try: key_value_set = await self._impl.put_key_value( entity=key_value, key=key_value.key, # type: ignore label=key_value.label, if_match=prep_if_match(configuration_setting.etag, match_condition), if_none_match=prep_if_none_match( configuration_setting.etag, match_condition ), headers=custom_headers, error_map=error_map, ) return ConfigurationSetting._from_generated(key_value_set) except HttpResponseError as error: e = error_map[error.status_code] raise e(message=error.message, response=error.response) except binascii.Error: raise binascii.Error("Connection string secret has incorrect padding")
def test_delitem(self): cid = CaseInsensitiveDict() cid['Spam'] = 'someval' del cid['sPam'] assert 'spam' not in cid assert len(cid) == 0
def make_header(control, retry=None): h = CaseInsensitiveDict({"X-Throttling-Control": control}) if retry: h["Retry-After"] = retry return h
def __init__(self, **kwargs): super().__init__() self._request = dict(method='GET', body='', headers=CaseInsensitiveDict()) self._request.update(kwargs) self.response = MockResponse()