def fake_urlopen(url_or_request, data=None, timeout=None): """A stub urlopen() implementation that loads json responses from the filesystem. It first strips off the host part of the url and then uses the path info together with the post data to find a matching response. If no response has been recorded before, it raises an Exception() about the missing file. """ if isinstance(url_or_request, Request): data = url_or_request.data fake_data = fake_session_id(data, data) # Fix the key order to get the correct hash. When json can not be # decoded, use the original string. try: fake_data = json.dumps(json.loads(fake_data.decode("utf-8")), sort_keys=True).encode("utf-8") except ValueError: pass rid = request_id(fake_data) response = open(response_file_path(rid), "rb").read() http_status = int(open(status_file_path(rid), "rb").read()) obj = StringIO(response) obj.getcode = lambda: http_status return obj
def fake_urlopen(url, data=None, timeout=None): """A stub urlopen() implementation that loads json responses from the filesystem. It first strips off the host part of the url and then uses the path info together with the post data to find a matching response. If no response has been recorded before, it raises an Exception() about the missing file. """ fake_data = fake_session_id(data, data) # Fix the key order to get the correct hash. When json can not be # decoded, use the original string. try: fake_data = json.dumps(json.loads(fake_data.decode("utf-8")), sort_keys=True).encode("utf-8") except ValueError: pass rid = request_id(fake_data) response = open(response_file_path(rid), "rb").read() http_status = int(open(status_file_path(rid), "rb").read()) obj = StringIO(response) obj.getcode = lambda: http_status return obj
def wrap_urlopen(url, data=None, timeout=None): """Wraps urlopen to record the response when communicating with a real CCU.""" assert utils.is_byte_string(data) try: obj = urlopen(url, data=data, timeout=timeout) response = obj.read() http_status = obj.getcode() except HTTPError as e: response = e.reason.encode("utf-8") http_status = e.code assert utils.is_byte_string(response) if not os.path.exists(resources_path): os.makedirs(resources_path) # FIXME: The ccu is performing wrong encoding at least for output of # executed rega scripts. But maybe this is a generic problem. Let's see # and only fix the known issues for the moment. if b"ReGa.runScript" in data or b"Interface.getParamsetDescription" in data: response = pmatic.api.AbstractAPI._replace_wrong_encoded_json( response.decode("utf-8")).encode("utf-8") # Fake the session id to a fixed one for offline testing. This is needed # to make the recorded data change less frequently. fake_data = fake_session_id(data, data) fake_response = fake_session_id(data, response) # Ensure normalized sorting of keys. # For hashing we need a constant sorted representation of the data. # CCU API has always JSON, but pushover notify has urlencoded data. if "pushover.net" not in url: fake_data = json.dumps(json.loads(fake_data.decode("utf-8")), sort_keys=True).encode("utf-8") # When json can not be parsed, write the original response to the file try: fake_response = json.dumps(json.loads(fake_response.decode("utf-8")), sort_keys=True).encode("utf-8") except ValueError: pass rid = request_id(fake_data) open(response_file_path(rid), "wb").write(fake_response) open(status_file_path(rid), "wb").write(str(http_status).encode("utf-8")) open(data_file_path(rid), "wb").write(fake_data) obj = StringIO(response) obj.getcode = lambda: http_status return obj