def followTime(user): print(user) alturl = 'https://api.twitch.tv/helix/users?login='******'&login='******'Client-ID',CLIENTID) try: a = urlopen(r) streamjson = json.loads(a.read())['data'] print(streamjson) uid1 = streamjson[0]['id'] uid2 = streamjson[1]['id'] print(uid1, uid2) followurl = 'https://api.twitch.tv/helix/users/follows?from_id='+uid1+'&to_id='+uid2 s = Request(followurl) s.add_header('Client-ID',CLIENTID) try: b = urlopen(s) stuff = json.loads(b.read()) stuff = stuff['data'][0]['followed_at'] followed_at = datetime.strptime(stuff, '%Y-%m-%dT%H:%M:%SZ') now = datetime.utcnow() timedelta = now - followed_at timedelta = str(timedelta)[:-7] return timedelta except urllib.error.HTTPError as error: data = error.read() print(data) return except urllib.error.HTTPError as error: data = error.read() print(data) print(user+" doesn't follow the channel.") return
def send_request(which, url, post_data=None): if post_data is not None: post_data = json.dumps(post_data).encode("utf-8") full_url = "%s/%s" % (config.get(which, 'url'), url) req = urllib.request.Request(full_url, post_data) username = config.get(which, 'username') password = config.get(which, 'password') req.add_header("Authorization", b"Basic " + base64.urlsafe_b64encode(username.encode("utf-8") + b":" + password.encode("utf-8"))) req.add_header("Content-Type", "application/json") req.add_header("Accept", "application/json") req.add_header("User-Agent", "IQAndreas/github-issues-import") try: response = urllib.request.urlopen(req) json_data = response.read() except urllib.error.HTTPError as error: error_details = error.read(); error_details = json.loads(error_details.decode("utf-8")) if error.code in http_error_messages: sys.exit(http_error_messages[error.code]) else: error_message = "ERROR: There was a problem importing the issues.\n%s %s" % (error.code, error.reason) if 'message' in error_details: error_message += "\nDETAILS: " + error_details['message'] sys.exit(error_message) return json.loads(json_data.decode("utf-8"))
def send_request(self, post_data=None): if post_data is not None: post_data = json.dumps(post_data).encode("utf-8") currdate = datetime.datetime.utcnow() date_range = (currdate - timedelta(days=self.date_range)) date_range_str = date_range.strftime("%Y-%m-%d") + 'T00:00:00Z' full_url = self.git_url + "/repos/%s/%s/commits?since=%s" % (self.git_org, self.git_repo, date_range_str) req = urllib.request.Request(full_url, post_data) req.add_header("Authorization", b"Basic " + base64.urlsafe_b64encode(self.git_user.encode("utf-8") + b":" + self.git_passwd.encode("utf-8"))) req.add_header("Content-Type", "application/json") req.add_header("Accept", "application/json") try: response = urllib.request.urlopen(req) json_data = response.read() except urllib.error.HTTPError as error: error_details = error.read() error_details = json.loads(error_details.decode("utf-8")) if error.code in http_error_messages: sys.exit(http_error_messages[error.code]) else: error_message = "ERROR: There was a problem with git query.\n%s %s" % (error.code, error.reason) if 'message' in error_details: error_message += "\nDETAILS: " + error_details['message'] sys.exit(error_message) # Also write to git_debug.json with open('git_debug.json', 'w') as fl: fl.write(json_data.decode("utf-8")) return json.loads(json_data.decode("utf-8"))
def get_blob(self, thread_id, blob_id): """Returns a file-like object with the contents of the given blob from the given thread. The object is described in detail here: https://docs.python.org/2/library/urllib2.html#urllib2.urlopen """ request = urllib.request.Request( url=self._url("blob/%s/%s" % (thread_id, blob_id))) if self.access_token: request.add_header("Authorization", "Bearer " + self.access_token) try: return urllib.request.urlopen(request, timeout=self.request_timeout) except urllib.error.HTTPError as error: try: # Extract the developer-friendly error message from the response message = json.loads(error.read())["error_description"] except Exception: raise error if (self.retry_rate_limit and error.code == 503 and message == "Over Rate Limit"): # Retry later. reset_time = float(error.headers.get("X-RateLimit-Reset")) delay = max(2, reset_time - time.time() + 1) logging.warning("Rate Limit, delaying for %d seconds" % delay) time.sleep(delay) return self.get_blob(thread_id, blob_id) else: raise QuipError(error.code, message, error)
def _solve_http_errors(url, error): global _cookie_jar data = error.read() if error.code == 503 and 'cf-browser-verification' in data: data = cloudflare.solve(url, _cookie_jar, UA) error.close() return data
def send_request(which, url, post_data=None, method=None, content_length=None, custom_media_type=None, can_retry=True): if post_data is not None: post_data = json.dumps(post_data).encode("utf-8") full_url = "%s/%s" % (config.get(which, 'url'), url) req = urllib.request.Request(full_url, data=post_data, method=method) username = config.get(which, 'username') password = config.get(which, 'password') req.add_header( "Authorization", b"Basic " + base64.urlsafe_b64encode( username.encode("utf-8") + b":" + password.encode("utf-8"))) req.add_header("Content-Type", "application/json") if content_length is not None: req.add_header("Content-Length", content_length) req.add_header("Accept", "application/json") if custom_media_type is not None: req.add_header("Accept", custom_media_type) req.add_header("User-Agent", "zadarastorage") while True: try: response = urllib.request.urlopen(req) json_data = response.read() break except urllib.error.HTTPError as error: error_details = error.read() error_details = json.loads(error_details.decode("utf-8")) if 'message' in error_details and\ error_details['message'].startswith('You have triggered an abuse detection mechanism and have been temporarily blocked from content creation') or\ error_details['message'].startswith('API rate limit exceeded'): progress_msg(' .... GITHUB RATE LIMITING HIT, SLEEP ...') time.sleep(60) continue error_msg('HTTP ERROR: {0} {1}'.format(error.code, error.reason)) error_msg('Request: {0}, data: {1}'.format(url, post_data)) error_msg('ERROR DETAILS:') for detail in error_details: error_msg('==={0}===:'.format(detail)) error_msg(error_details[detail]) raise except Exception as exc: error_msg('EXCEPTION: {0}'.format(str(exc))) if can_retry: progress_msg(' .... SLEEP AND RETRY ....') time.sleep(60) continue raise if json_data is None or len(json_data.strip()) == 0: return None return json.loads(json_data.decode("utf-8"))
def direct_auth(client_id, client_secret, username, password, number=None, https=False, scope='all'): if not https and not scope.count('nohttps'): scope += ',nohttps' parameters = { 'grant_type': 'password', 'client_id': client_id, 'client_secret': client_secret, 'username': username, 'password': password, 'v': API_v, 'scope': scope } parameters = urllib.parse.urlencode(parameters) try: response = urllib.request.urlopen( 'https://oauth.vk.com/token?{}'.format(parameters) ) except urllib.error.HTTPError as error: response = error.read().decode('utf-8') else: response = response.read().decode('utf-8') result = json.loads(response) if 'error' in result: if result['error'] == 'need_validation' and number: return validate_number(number, result['redirect_uri']) raise VKAPIError(result['error'], -1, result) return result
def downloadModel(destination): modelUrl = "https://github.com/coosto/dutch-word-embeddings/releases/download/v1.0/model.bin" # Optional token to speed up the download username = getpass.getpass( "(Optional, but increases download speed)\nGitHub username:"******"GitHub Token:") if username and token: password_mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, "https://github.com/coosto/", username, token) handler = urllib.request.HTTPBasicAuthHandler(password_mgr) opener = urllib.request.build_opener(handler) urllib.request.install_opener(opener) # Request model data try: request = urllib.request.Request(modelUrl) with urllib.request.urlopen(request) as response, open( destination, 'wb') as fp: shutil.copyfileobj(response, fp) sys.stderr.write("Model saved: {}\n".format(fp.name)) except urllib.error.HTTPError as error: sys.stderr.write( "Error fetching model from github.com - {}\n".format(error)) sys.stderr.write("{}".format(error.read())) exit(1)
def main(): parser = argparse.ArgumentParser() parser.add_argument('-q', '--term', dest='term', default=DEFAULT_TERM, type=str, help='Search term (default: %(default)s)') parser.add_argument('-l', '--location', dest='location', default=DEFAULT_LOCATION, type=str, help='Search location (default: %(default)s)') input_values = parser.parse_args() try: query_api(input_values.term, input_values.location) except HTTPError as error: sys.exit( 'Encountered HTTP error {0} on {1}:\n {2}\nAbort program.'.format( error.code, error.url, error.read(), ))
def f(port, doc, silent=False): try: req = opener.open("http://localhost:" + str(port) + "/", json.dumps(doc).encode("utf-8"), 1) return json.loads(req.read().decode("utf-8")) except urllib.error.URLError as error: if not silent: sublime.error_message(error.read().decode("utf-8")) return None
def execute(self, operation, headers={}): operation_json = to_json(operation) json_body = bytes(json.dumps(operation_json), "ascii") headers["Content-Type"] = "application/json;charset=utf-8" request = urllib.request.Request(self._host + "/graph/operations/execute", headers=headers, data=json_body) try: response = self._opener.open(request) except urllib.error.HTTPError as error: error_body = error.read().decode('utf-8') new_error_string = ('HTTP error ' + str(error.code) + ' ' + error.reason + ': ' + error_body) raise ConnectionError(new_error_string) response_text = response.read().decode('utf-8') if response_text is not None and response_text is not '': return json.loads(response_text) else: return None
def f(port, doc): try: req = opener.open("http://" + localhost + ":" + str(port) + "/", json.dumps(doc).encode("utf-8"), 1) return json.loads(req.read().decode("utf-8")) except urllib.error.URLError as error: raise Req_Error(error.read().decode("utf-8"))
def _fetch_json(self, path, post_data=None, **args): request = urllib.request.Request(url=self._url(path, **args)) if post_data: post_data = dict((k, v) for k, v in list(post_data.items()) if v or isinstance(v, int)) request.data = urllib.parse.urlencode(self._clean(**post_data)).encode("utf8") if self.access_token: request.add_header("Authorization", "Bearer " + self.access_token) try: return json.loads( urllib.request.urlopen(request, timeout=self.request_timeout).read().decode("utf8")) except urllib.error.HTTPError as error: try: # Extract the developer-friendly error message from the response message = json.loads(error.read())["error_description"] except Exception: raise error if (self.retry_rate_limit and error.code == 503 and message == "Over Rate Limit"): # Retry later. reset_time = float(error.headers.get("X-RateLimit-Reset")) delay = max(2, reset_time - time.time() + 1) logging.warning("Rate Limit, delaying for %d seconds" % delay) time.sleep(delay) return self._fetch_json(path, post_data, **args) else: raise QuipError(error.code, message, error)
def send_query(self, sql, output_format='csv', output_file=None, delete_job=True): """ Send an SQL query `sql`. If `output_file` is ``None``, a preview of the results is shown. Otherwise, results are saved in a file with name `output_file` and in the format defined by `output_format`. Parameters ---------- sql : str SQL query. output_format : str, optional Available formats: 'csv', 'csv.gz', 'sqlite3', or 'fits'. output_file : str or ``None`` Name of the file for storing the query results. If ``None``, a preview of the results is shown. delete_job : bool Delete job and results from the user space. By default is ``True``. """ formats = ['csv', 'csv.gz', 'sqlite3', 'fits'] try: if output_file is None: self.__preview(self.credential, sql, sys.stdout) else: if output_format not in formats: error_message = 'Unknown output format: {}' raise ValueError(error_message.format(output_format)) job = self.__submit_job(self.credential, sql, output_format) self.__block_until_job_finishes(self.credential, job['id']) with open(output_file, 'wb') as output: self.__download(self.credential, job['id'], output) if delete_job: self.__delete_job(self.credential, job['id']) except urllib.error.HTTPError as error: if error.code == 401: print('invalid id or password.', file=sys.stderr) if error.code == 406: print(error.read(), file=sys.stderr) else: print(error, file=sys.stderr) except QueryError as error: print(error, file=sys.stderr) except KeyboardInterrupt: if job is not None: self.__job_cancel(self.credential, job['id']) raise
def make_request_py3(port, doc, silent): import urllib.request, urllib.error try: req = urllib.request.urlopen("http://localhost:" + str(port) + "/", json.dumps(doc).encode("utf-8"), 1) return json.loads(req.read().decode("utf-8")) except urllib.error.URLError as error: if not silent: sublime.error_message(error.read().decode("utf-8")) return None
def send_request(self, doc, url="/"): import urllib.request, urllib.error opener = urllib.request.build_opener(urllib.request.ProxyHandler({})) try: req = opener.open("http://localhost:" + str(self.port) + url, json.dumps(doc).encode("utf-8"), 1) return json.loads(req.read().decode("utf-8")) except urllib.error.URLError as error: raise Req_Error(error.read().decode("utf-8"))
def f(port, doc): try: req = opener.open("http://" + localhost + ":" + str(port) + "/", json.dumps(doc).encode("utf-8"), 1) return json.loads(req.read().decode("utf-8")) except urllib.error.URLError as error: if hasattr(error, "read"): raise Req_Error(error.read().decode("utf-8")) else: raise error
def lookUpUser(id): userURL = 'http://www.speedrun.com/api/v1/users/'+id try: userRequest = Request(userURL) userOpen = urlopen(userRequest) userJson = json.loads(userOpen.read()) return userJson['data']['names']['international'] except urllib.error.HTTPError as error: data = error.read() print(data)
def _populate_cache(self) -> bool: request = urllib.request.Request(url=self.list_url) if (self.cache_time): request.add_header( 'If-Modified-Since', self.cache_time.strftime('%a, %d %b %Y %H:%M:%S GMT')) pass try: if (self.log): self.log.detail('Fetching public suffix list from:', self.list_url) with urllib.request.urlopen(request) as response: list_bytes = response.read() try: cache_dir = os.path.dirname(self.cache_path) self._make_dirs(cache_dir) temp_path = os.path.join(cache_dir, 'public_suffix_list.part') with open(temp_path, 'wb') as public_suffix_list_file: public_suffix_list_file.write(list_bytes) os.replace(temp_path, self.cache_path) if (self.log): self.log.detail('Downloaded', len(list_bytes), 'bytes') self.cache_time = self._file_time(self.cache_path) return True except Exception as error: if (self.log): self.log.error( 'Unable to write public suffix list to:', self.cache_path, '-', error) else: raise except urllib.error.HTTPError as error: if ((400 <= error.code) and (error.code < 500)): if (self.log): self.log.warning( 'Unable to retrieve public suffix list from:', self.list_url, 'HTTP error:', error.code, error.reason, error.read()) elif (304 == error.code): now_timestamp = time.time() os.utime(self.cache_path, (now_timestamp, now_timestamp)) self.cache_time = self._file_time(self.cache_path) if (self.log): self.log.detail('Public suffix list not modified') elif (self.log): self.log.warning('Unable to retrieve public suffix list from:', self.list_url, 'HTTP error:', error.code, error.reason) except Exception as error: if (self.log): self.log.warning('Unable to retrieve public suffix list from:', self.list_url, '-', error) return False
def send_request(which, url, post_data=None, method=None, content_length=None, custom_media_type=None, can_retry=True): if post_data is not None: post_data = json.dumps(post_data).encode("utf-8") full_url = "%s/%s" % (config.get(which, 'url'), url) req = urllib.request.Request(full_url, data=post_data, method=method) username = config.get(which, 'username') password = config.get(which, 'password') req.add_header("Authorization", b"Basic " + base64.urlsafe_b64encode(username.encode("utf-8") + b":" + password.encode("utf-8"))) req.add_header("Content-Type", "application/json") if content_length is not None: req.add_header("Content-Length", content_length) req.add_header("Accept", "application/json") if custom_media_type is not None: req.add_header("Accept", custom_media_type) req.add_header("User-Agent", "zadarastorage") while True: try: response = urllib.request.urlopen(req) json_data = response.read() break except urllib.error.HTTPError as error: error_details = error.read(); error_details = json.loads(error_details.decode("utf-8")) if 'message' in error_details and\ error_details['message'].startswith('You have triggered an abuse detection mechanism and have been temporarily blocked from content creation') or\ error_details['message'].startswith('API rate limit exceeded'): progress_msg(' .... GITHUB RATE LIMITING HIT, SLEEP ...') time.sleep(60) continue error_msg('HTTP ERROR: {0} {1}'.format(error.code, error.reason)) error_msg('Request: {0}, data: {1}'.format(url, post_data)) error_msg('ERROR DETAILS:') for detail in error_details: error_msg('==={0}===:'.format(detail)) error_msg(error_details[detail]) raise except Exception as exc: error_msg('EXCEPTION: {0}'.format(str(exc))) if can_retry: progress_msg(' .... SLEEP AND RETRY ....') time.sleep(60) continue raise if json_data is None or len(json_data.strip()) == 0: return None return json.loads(json_data.decode("utf-8"))
def get(self, path): request = urllib.request.Request(self._host + path) try: response = self._opener.open(request) except urllib.error.HTTPError as error: error_body = error.read().decode('utf-8') new_error_string = ('HTTP error ' + str(error.code) + ' ' + error.reason + ': ' + error_body) raise ConnectionError(new_error_string) return json.loads(response.read().decode('utf-8'))
def get_author(title): host = 'https://www.googleapis.com/books/v1/volumes?q={}&key={}&country=US'.format(title, key) request = urllib.request.Request(host) try: response = urllib.request.urlopen(request) except urllib.error.HTTPError as error: contents = error.read() print ('Received error from Books API {}'.format(contents)) return str(contents) html = response.read() author = json.loads(html)['items'][0]['volumeInfo']['authors'][0] return author
def submitToServer(score, uid, pwd): t1 = 'http://pcred.' urlSudoku = t1 + 'yonsei.ac.kr' + '/~lkj0509/pacman/' try: url = urlSudoku + 'pacman_test.php?' url += 'uid=' + str(uid) + '&pw=' + str(pwd) + '&cnt=' + str(score) resp = urllib.request.urlopen(url) contents = resp.read() except urllib.error.HTTPError as error: contents = error.read() return contents
def send_request(repo, url, post_data=None, method=None): if post_data is not None: post_data = json.dumps(post_data).encode("utf-8") repo_url = get_repository_option(repo, 'url') full_url = "%s/%s" % (repo_url, url) req = urllib.request.Request(full_url, post_data) if method is not None: req.method = method username = get_repository_option(repo, 'username') password = get_repository_option(repo, 'password') auth = base64.urlsafe_b64encode( ('%s:%s' % (username, password)).encode('utf-8')) req.add_header("Authorization", b'Basic ' + auth) req.add_header("Content-Type", "application/json") req.add_header("Accept", "application/json") req.add_header("User-Agent", "jareguo/github-issues-import") retry = True while retry: retry = False try: response = urllib.request.urlopen(req) json_data = response.read() except urllib.error.HTTPError as error: error_details = json.loads(error.read().decode("utf-8")) def getErrorDetails(error): message = "CODE: %s\nREASON: %s" % (error.code, error.reason) message += "\nREQUEST: %s %s" % (url, post_data) if 'message' in error_details: message += "\nDETAILS: " + error_details['message'] if 'errors' in error_details: message += "\n " + str(error_details['errors']) return message if error.code == 403: print("Got 403, assuming rate limit error and waiting for 1 minute...") print(getErrorDetails(error)); time.sleep(60) retry = True elif error.code in HTTP_ERROR_MESSAGES: sys.exit(HTTP_ERROR_MESSAGES[error.code]) else: error_message = "ERROR: There was a problem importing the issues.\n" error_message += getErrorDetails(error) sys.exit(error_message) except urllib.error.URLError as error: retry = True return json.loads(json_data.decode("utf-8"))
def _request(self, controller, method, **parameters): ''' Get a response from the specified controller using the given parameters. ''' log.info('REQUEST: %s %s' % (controller, method)) salt, b64signature = self._generate_signature() if method == 'GET': url = '%s?e=%s&apikey=%s&salt=%s&signature=%s' % (self.api_url, urllib.parse.quote(controller), urllib.parse.quote(self.api_key), salt, urllib.parse.quote(b64signature)) # Append additional query args if necessary data = self._post_data(**self._sanitize_parameters(**parameters)) if parameters else None if data: url = '%s&%s' % (url, data) request = urllib.request.Request(url) elif method == 'POST' or method == 'PUT': url = '%s?e=%s' % (self.api_url, urllib.parse.quote(controller)) # Auth parameters go in the body for these methods parameters['apikey'] = self.api_key parameters['salt'] = salt parameters['signature'] = b64signature data = parameters # request = urllib.request.Request(url, data=data.encode(), headers={'Content-length' : len(data) if data else 0}) # request.get_method = lambda: method elif method == 'DELETE': # DELETE url = '%s?e=%s&apikey=%s&salt=%s&signature=%s' % (self.api_url, urllib.parse.quote(controller), urllib.parse.quote(self.api_key), salt, urllib.parse.quote(b64signature)) data = parameters # request = urllib.request.Request(url, data=data.encode(), headers={'Content-length' : len(data) if data else 0}) # request.get_method = lambda: method else: raise KayakoRequestError('Invalid request method: %s not supported.' % method) log.debug('REQUEST URL: %s' % url) log.debug('REQUEST DATA: %s' % data) if method == 'GET': try: response = urllib.request.urlopen(request, timeout=None) except urllib.error.HTTPError as error: response_error = KayakoResponseError('%s: %s' % (error, error.read())) log.error(response_error) raise response_error except urllib.error.URLError as error: request_error = KayakoRequestError(error) log.error(request_error) raise request_error else: response = requests.post(url, data=data) if response.status_code == 200: response = response.content return response
def requestB(opener, url, headers, data, method='POST'): answer = '' retcode = None additionalInfo = '[None]' contentLenght = None if ImportCookie: headers = processingCookies(headers) data = urllib.parse.urlencode(data) if method == 'GET': if data: url = url + '?' + data data = None elif method == 'POST': headers['Content-Length'] = len(data) data = data.encode() request = urllib.request.Request(url, data, headers) try: printRequest(request.get_method(), request.get_full_url()) if data: printData(data) f = opener.open(request) headers = f.getheaders() code = f.code retcode = code answer = f.read() m = hashlib.md5() m.update(answer) for h in headers: if h[0].lower() == 'content-length': contentLenght = h[1] additionalInfo = '[' + str(h[1]) + ']' printAnswer(code, additionalInfo) printHeaders(headers, 'Set-Cookie') except urllib.error.HTTPError as error: for h in error.headers: if h.lower() == 'content-length': printAnswer(str(error.code), ' [' + str(error.headers[h]) + ']') else: printAnswer(str(error.code), ' [-1]') retcode = error.code answer = error.read() except urllib.error.URLError as error: printAnswer(str(error)) return answer, retcode
def send_request(which, url, post_data=None, method=None): if post_data is not None: post_data = json.dumps(post_data).encode("utf-8") full_url = "%s/%s" % (config.get(which, 'url'), url) if method: req = urllib.request.Request(full_url, post_data, method=method) else: req = urllib.request.Request(full_url, post_data, method=method) username = config.get(which, 'username') password = config.get(which, 'password') req.add_header( "Authorization", b"Basic " + base64.urlsafe_b64encode( username.encode("utf-8") + b":" + password.encode("utf-8"))) req.add_header("Content-Type", "application/json") req.add_header("Accept", "application/json") req.add_header("User-Agent", "github-issues-import-ng") try: response = urllib.request.urlopen(req) json_data = response.read() except urllib.error.HTTPError as error: error_details = error.read() error_details = json.loads(error_details.decode("utf-8")) print( "DEBUG: '%s' could not execute a webhook request with json. Type is '%s'." % (which, url)) print("fullurl: '%s'" % (full_url)) if error.code in http_error_messages: print("code: '%i'" % (error.code)) print(error_details) sys.exit(http_error_messages[error.code]) else: error_message = "ERROR: There was a problem importing the issues.\n%s %s" % ( error.code, error.reason) print("DEBUG: full_url '%s'." % (full_url)) print(error_details) if 'message' in error_details: error_message += "\nDETAILS: " + error_details['message'] sys.exit(error_message) except urllib.error.URLError as error: print("Encountered a fatal error requesting URL {0}".format( req.get_full_url()), file=sys.stderr) sys.exit(error) return json.loads(json_data.decode("utf-8"))
def request_last_update(self, url): util.debug('request: %s' % url) lastmod = None req = urllib.request.Request(url) req.add_header('User-Agent', util.UA) try: response = urllib.request.urlopen(req) lastmod = response.headers['last-modified'][:16] response.close() except urllib.error.HTTPError as error: util.debug(error.read()) error.close() return lastmod
def requestB(opener,url, headers, data, method = 'POST'): answer = '' retcode = None additionalInfo = '[None]' contentLenght = None if ImportCookie: headers = processingCookies(headers) data = urllib.parse.urlencode(data) if method == 'GET': if data: url = url + '?' + data data = None elif method == 'POST': headers['Content-Length'] = len(data) data = data.encode() request = urllib.request.Request(url,data,headers) try: printRequest(request.get_method(), request.get_full_url()) if data: printData(data) f = opener.open(request) headers = f.getheaders() code = f.code retcode = code answer = f.read() m = hashlib.md5() m.update(answer) for h in headers: if h[0].lower() == 'content-length': contentLenght = h[1] additionalInfo = '[' + str(h[1]) + ']' printAnswer(code,additionalInfo) printHeaders(headers,'Set-Cookie') except urllib.error.HTTPError as error: for h in error.headers: if h.lower() == 'content-length': printAnswer(str(error.code) , ' [' + str(error.headers[h]) + ']') else: printAnswer(str(error.code) , ' [-1]') retcode = error.code answer = error.read() except urllib.error.URLError as error: printAnswer(str(error)) return answer, retcode
def checkLive(): url = 'https://api.twitch.tv/helix/streams/' + CHANNEL r = Request(url) r.add_header('Client-ID', CLIENTID) try: a = urlopen(r) streamjson = json.loads(a.read()) stream = streamjson['stream'] if stream is None: return False else: return True except urllib.error.HTTPError as error: data = error.read()
def execute_get(self, operation, headers={}): url = self._host + operation.get_url() headers['Content-Type'] = 'application/json;charset=utf-8' request = urllib.request.Request(url, headers=headers) try: response = self._opener.open(request) except urllib.error.HTTPError as error: error_body = error.read().decode('utf-8') new_error_string = ('HTTP error ' + str(error.code) + ' ' + error.reason + ': ' + error_body) raise ConnectionError(new_error_string) return response.read().decode('utf-8')
def handle_docgen(self, suite): logging.info('Generating build results for suite [%s]' % suite.directory) url = '%s/review/api' % self.__server_url__ host, port = self.__server__.split(':') loginToken = getToken(host, port, getpass.getuser()) query = { 'user': getpass.getuser(), 'action': 'search', 'ltoken': loginToken } # Build the query project_name = self.get_project_name(suite) query['project'] = project_name query['query'] = 'severity:1-3' data = urllib.parse.urlencode(query) request = urllib.request.Request(url, data.encode('utf-8')) # Execute the query try: response = urllib.request.urlopen(request) except urllib.error.HTTPError as error: logging.error('ERROR: %s' % error.read().decode('UTF-8')) sys.exit(1) data = response.add_results().decode('UTF-8').split('\n')[:-1] for record in data: json_obj = json.loads(record, object_hook=from_json) filename = os.path.split(json_obj.file)[-1] function = json_obj.method line = 0 # We don't get line numbers from the Klocwork API checker = json_obj.code if not filename in suite.files: suite[filename] = File(filename) if not function in suite[filename].functions: suite[filename][function] = Function(function) if not line in suite[filename][function].lines: suite[filename][function][line] = Line(line) suite[filename][function][line].add_Bug(Bug(checker)) logging.info('Found [%s] bugs for suite [%s]' % (len(suite.get_Bugs()), suite.directory))
def get_data(limit=1000, offset=0, days=1): days = int(days) endpoint = 'https://data.cityofchicago.org/resource/r5kz-chrr.csv?' yesterday = (date.today() - timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%S') today = date.today().strftime('%Y-%m-%dT%H:%M:%S') query = "$where=date_issued between '{}' and '{}'".format(yesterday, today) qquery = urllib.parse.quote(query, '=&?$') url = endpoint + qquery try: licenses = pd.read_csv(url) return licenses except urllib.error.HTTPError as error: print(error.read())
def query(self, rics, topics, start_date, end_date): params = { 'startTime': start_date, 'endTime': end_date, 'ric': ','.join(map(lambda ric: 'RIC_' + ric, rics)), 'topicCode': ','.join(topics) } encoded = [] for key, value in params.items(): encoded.append(key + '=' + urllib.parse.quote(value)) url = 'http://138.68.255.10/api/newsdata?' + '&'.join(encoded) try: with urllib.request.urlopen(url) as conn: raw = conn.read() except urllib.error.HTTPError as error: with error: raw = error.read().decode() data = json.loads(raw) if not data['logfile']['success']: # something failed return Articles(False, error=data['error']) article_list = data['NewsDataSet'] articles = [] for article in article_list: rics = article['InstrumentIDs'] topics = article['TopicCode'] timestamp = article['TimeStamp'] headline = article['Headline'] body = article['NewsText'] # Patch their results to match the universal format we're testing against: rics = list(map(lambda ric: ric[4:], rics)) topics = list(map(lambda topic: topic[3:], topics)) millis = timestamp[timestamp.rfind('.') + 1:-1] while len(millis) < 3: millis += '0' timestamp = timestamp[:timestamp.rfind('.') + 1] + millis + 'Z' # Add article to results articles.append(Article(rics, topics, timestamp, headline, body)) return Articles(True, articles=articles, time=float(data['logfile']['info']['elapsed'][:-8]))
def apicall(): id = "asus-geforce-gtx750ti-oc-2gd5-performance-graphics-gddr5-2gb" chk = [] data = { "Inputs": { "input1": { "ColumnNames": ["month", "price", "item"], "Values": [] }, }, "GlobalParameters": {} } for i in range(1, 13): data["Inputs"]["input1"]["Values"].append([str(i), "0", id]) body = str.encode(json.dumps(data)) url = 'https://ussouthcentral.services.azureml.net/workspaces/24af1e57b2794253957d65b08d00a725/services/dca5e0834086488e986f1efd0375ae2f/execute?api-version=2.0&details=true' api_key = 'IQW3L6TA21jChGgRgm8Z2w8xGeYtpUISi3b7fXY+/DKw1O7Mxrklwd3PKxUTHBuu43HybpCOtLfN+AuPOkYmbw==' # Replace this with the API key for the web service headers = { 'Content-Type': 'application/json', 'Authorization': ('Bearer ' + api_key) } req = urllib.request.Request(url, body, headers) try: response = urllib.request.urlopen(req) result = response.read().decode("utf-8") chk.append(json.JSONDecoder().decode(result)) except (urllib.error.HTTPError): print("The request failed with status code: " + str(error.code)) # Print the headers - they include the requert ID and the timestamp, which are useful for debugging the failure print(error.info()) print(json.loads(error.read())) ref = chk[0]["Results"]["output1"]["value"]["Values"] minN = ref[0][3] res = ref[0][0] for i in range(1, len(ref)): if (ref[i][3] < minN): res = ref[i][0] return render_template('index.html', result=calendar.month_name[int(res)])
def http_request(url, values=None, headers={}, timeout=20, method=None, ssl_context=None): """ Send an HTTP request (GET if no values, POST if values provided or method if provided) and attempt to return the response data. Values are encoded as utf-8. Response content is decoded based on content type in response header. Returns a tuple containing content, content type """ if values != None: data = json.dumps(values) data = data.encode('utf-8') else: data=None req = urllib.request.Request(url, data, headers, method=method) try: if ssl_context: resp = urllib.request.urlopen(req, timeout=timeout, context=ssl_context) else: resp = urllib.request.urlopen(req, timeout=timeout) response_data = resp.read() # Check the encoding type of the content and decode charset = resp.headers.get_content_charset() if charset: response_data = response_data.decode(charset) content_type = resp.headers.get_content_type() # Exceptions raised for non 2XX response codes except urllib.error.HTTPError as error: response_data = error.read() charset = error.headers.get_content_charset() if charset: response_data = response_data.decode(charset) content_type = error.headers.get_content_type() if error.code == 401: # Unauthorized raise OpenAMError('401 Unauthorised to access {0}'.format(url), 401, response_data, content_type) from error if 300 <= error.code <= 399: # Redirection errors raise OpenAMError('{0} Redirection response ({1}) from {2}'.format(error.code, error.reason, url), error.code, response_data, content_type) from error if 400 <= error.code <= 499: # Client errors raise OpenAMError('{0} Client error ({1}) from {2}.'.format(error.code, error.reason, url), error.code, response_data, content_type) from error if 500 <= error.code <= 599: # Server errors raise OpenAMError('{0} Server error ({1}) from {2}'.format(error.code, error.reason, url), error.code, response_data, content_type) from error except urllib.error.URLError as error: # Could not contact server raise OpenAMError('Communication error when trying {0}. {1}'.format(url, error.reason)) from error return (response_data, content_type)
def compile_code(base_url, uml_code, output_format, use_post=False, timeout=None, user_agent=None, ssl_context=None): # pylint: disable=too-many-arguments """Compile PlantUML code into the specified format data by PlantUML server. Args: base_url (str): URL of PlantUML server. uml_code (str): PlantUML code. output_format (str): The target format. use_post (bool): Whether or not to use HTTP POST method for compiling. PlantUML server supports POST method from version 1.2018.5. timeout (int): The server communication timeout in seconds. ssl_context (ssl.SSLContext): SSL Context for server communication. Returns: bytes: output data with the specified format. Raises: CompileError: If PlantUML code can not be compiled. urllib.error.HTTPError: If some server error occurs. urllib.error.URLError: If some protocol error occurs. """ headers = sabacan.utils.make_headers(user_agent) pattern = _FORMAT_TO_URL_PATTERN_TABLE.get(output_format, output_format) if use_post: url = base_url + '/' + pattern + '/' data = uml_code.encode('utf-8') headers['Content-Type'] = 'text/plain;charset="UTF-8"' request = urllib.request.Request(url, data, headers) else: encoded_uml = encode_code(uml_code) url = base_url + '/' + pattern + '/' + encoded_uml request = urllib.request.Request(url, headers=headers) try: with urllib.request.urlopen(request, timeout=timeout, context=ssl_context) as response: return response.read() except urllib.error.HTTPError as error: with error: if error.code != 400: raise raise CompileError(error.reason, error.read())
def send_request(which, url, verb='get', post_data=None): if post_data is None: json_data = None else: json_data = json.dumps(post_data).encode("utf-8") full_url = "%s/%s" % (config.get(which, 'url'), url) req = urllib.request.Request(full_url, json_data) if config.has_option(which, 'oauthtoken'): req.add_header("Authorization", b"bearer " + config.get(which, 'oauthtoken').encode('utf-8')) if config.has_option(which, 'username'): username = config.get(which, 'username') password = config.get(which, 'password') req.add_header("Authorization", b"Basic " + base64.urlsafe_b64encode(username.encode("utf-8") + b":" + password.encode("utf-8"))) req.add_header("Content-Type", "application/json") req.add_header("Accept", "application/json") req.add_header("User-Agent", "IQAndreas/github-issues-import") req.get_method = lambda: verb.upper() if verb.upper() not in ('HEAD', 'GET') and config.getboolean('settings', 'dry-run'): post_data['number'] = post_data.get('number', 0) post_data['url'] = '<dry-run>' post_data['html_url'] = '<dry-run>' print("dry-run:", verb.upper(), full_url) return post_data try: response = urllib.request.urlopen(req) json_data = response.read() except urllib.error.HTTPError as error: error_details = error.read(); error_details = json.loads(error_details.decode("utf-8")) if error.code in http_error_messages: sys.exit(http_error_messages[error.code]) else: error_message = "ERROR: There was a problem importing the issues.\n%s %s" % (error.code, error.reason) if 'message' in error_details: error_message += "\nDETAILS: " + error_details['message'] sys.exit(error_message) return json.loads(json_data.decode("utf-8"))
def is_operation_supported(self, operation=None, headers={}): url = self._host + '/graph/operations/' + operation.get_operation() headers['Content-Type'] = 'application/json;charset=utf-8' request = urllib.request.Request(url, headers=headers) try: response = self._opener.open(request) except urllib.error.HTTPError as error: error_body = error.read().decode('utf-8') new_error_string = ('HTTP error ' + str(error.code) + ' ' + error.reason + ': ' + error_body) raise ConnectionError(new_error_string) response_text = response.read().decode('utf-8') return response_text
def get_data(limit=1000,offset=0, days=1): days = int(days) endpoint = 'http://data.cityofchicago.org/resource/ydr8-5enu.json?' yesterday = (date.today() - timedelta(days=days)).isoformat() #yesterday = date.today().isoformat() query = "$where=_issue_date>'%s' &$limit=%s&$offset=%s" % (yesterday, limit, offset) qquery = urllib.parse.quote(query, '=&?$') url = endpoint+qquery try: resp = urllib.request.urlopen(url) contents = resp.read() return contents except urllib.error.HTTPError as error: print(error.read())
def is_import_needed(check_endpoint): request = urllib.request.Request(url=check_endpoint, headers={'Accept': 'application/json'}) try: with urllib.request.urlopen(request) as response: content = response.read().decode('utf-8').strip().lower() print('Import check API endpoint "%s" returned: [%d %s] %s' % (check_endpoint, response.status, response.msg, content)) return content == 'true' except urllib.error.HTTPError as error: print('Import check API endpoint "%s" has failed: [%d %s] %s' % (check_endpoint, error.code, error.msg, error.read().decode('utf-8'))) raise error
def connect(self, server, ts, key): parameters = { 'act': 'a_check', 'key': key, 'ts': ts, 'wait': self.wait, 'mode': self.mode } parameters = urllib.parse.urlencode(parameters) try: response = urllib.request.urlopen( 'http://{}?{}'.format(server, parameters) ) except urllib.error.HTTPError as error: response = error.read().decode('utf-8') else: response = response.read().decode('utf-8') result = json.loads(response) return result
def get_blob(self, thread_id, blob_id): """Returns a file-like object with the contents of the given blob from the given thread. The object is described in detail here: https://docs.python.org/2/library/urllib2.html#urllib2.urlopen """ request = Request( url=self._url("blob/%s/%s" % (thread_id, blob_id))) if self.access_token: request.add_header("Authorization", "Bearer " + self.access_token) try: return urlopen(request, timeout=self.request_timeout) except HTTPError as error: try: # Extract the developer-friendly error message from the response message = json.loads(error.read().decode())["error_description"] except Exception: raise error raise QuipError(error.code, message, error)
def call(self, method, **parameters): if not 'v' in parameters: parameters['v'] = API_v parameters['access_token'] = self.access_token parameters = urllib.parse.urlencode(parameters) request = '/method/%s?%s' % (method, parameters) if not self.use_https: url = 'http' else: url = 'https' url += '://api.vk.com' + request if self.secret: sig_param = urllib.parse.urlencode({'sig': self.get_sig(request)}) url += '&{}'.format(sig_param) try: response = urllib.request.urlopen(url) except urllib.error.HTTPError as error: response = error.read().decode('utf-8') else: response = response.read().decode('utf-8') result = json.loads(response) if 'response' in result: return result['response'] elif 'error' in result: if result['error']['error_code'] == 17: if self.number: validate_number( self.number, result['error']['redirect_uri'] ) return self.call(method, **parameters) print( 'Redirect URI: {}'.format(result['error']['redirect_uri']) ) raise VKAPIError( result['error']['error_msg'], result['error']['error_code'], result['error'] ) else: raise Exception()
def fetch(self, location, method="GET", postdata=None, headers=None): """ This provides a convenience function for making requests. This interfaces with urllib2 and provides the ability to make GET, POST, PUT and DELETE requests. The return data from this function is headers, content, http status, and the timedelta from a succesful request""" # Checks to ensure that header values and postdata are in the appropriate format if type(headers) != dict and headers != None: raise TypeError(("headers are not a valid Python dictionary")) if type(postdata) != str and postdata != None: raise TypeError(("postdata is not a valid Python string")) if headers: req = urllib.request.Request(location, method, headers=headers) else: req = urllib.request.Request(location, method) req.get_method = lambda: method.upper() req.add_data(postdata) # Anticipate errors from either unavailable content or nonexistent resources try: start = datetime.datetime.now() response = urllib.request.urlopen(req) end = datetime.datetime.now() except urllib.error.HTTPError as error: return(error.headers, error.read(), error.code, None) except urllib.error.URLError as error: # Noneexistent resources won't have headers or status codes return(None, error.reason, None, None) else: headers = response.info() content = response.read() # Grab the HTTP Status Code code = response.getcode() # Compute timedelta from a successful request time = end - start return(headers, content, code, time)
def send_request(repo, url, post_data=None, method=None): if post_data is not None: post_data = json.dumps(post_data).encode("utf-8") repo_url = get_repository_option(repo, 'url') full_url = "%s/%s" % (repo_url, url) req = urllib.request.Request(full_url, post_data) if method is not None: req.method = method username = get_repository_option(repo, 'username') password = get_repository_option(repo, 'password') auth = base64.urlsafe_b64encode( ('%s:%s' % (username, password)).encode('utf-8')) req.add_header("Authorization", b'Basic ' + auth) req.add_header("Content-Type", "application/json") req.add_header("Accept", "application/json") req.add_header("User-Agent", "spacetelescope/github-issues-import") try: response = urllib.request.urlopen(req) json_data = response.read() except urllib.error.HTTPError as error: error_details = error.read(); error_details = json.loads(error_details.decode("utf-8")) if error.code in HTTP_ERROR_MESSAGES: sys.exit(HTTP_ERROR_MESSAGES[error.code]) else: error_message = ("ERROR: There was a problem importing the " "issues.\n%s %s" % (error.code, error.reason)) if 'message' in error_details: error_message += "\nDETAILS: " + error_details['message'] sys.exit(error_message) return json.loads(json_data.decode("utf-8"))
def execute_operation_chain(self, operation_chain): """ This method queries Gaffer with the provided operation chain. """ # Construct the full URL path to the Gaffer server url = self._host + '/graph/doOperation' # Query Gaffer if self._verbose: print('\nQuery operations:\n' + json.dumps(operation_chain.toJson(), indent=4) + '\n') # Convert the query dictionary into JSON and post the query to Gaffer json_body = bytes(json.dumps(operation_chain.toJson()), 'ascii') request = urllib.request.Request(url, headers={ 'Content-Type': 'application/json;charset=utf-8'}, data=json_body) try: response = self._opener.open(request) except urllib.error.HTTPError as error: error_body = error.read().decode('utf-8') new_error_string = 'HTTP error ' + str( error.code) + ' ' + error.reason + ': ' + error_body raise ConnectionError(new_error_string) response_text = response.read().decode('utf-8') if self._verbose: print('Query response: ' + response_text) if response_text is not None and response_text is not '': result = json.loads(response_text) else: result = None return operation_chain.operations[-1].convert_result(result)
def get_travis_status(self, repo): status = None try: request = urllib.request.urlopen('https://api.travis-ci.org/repos/' + repo + '.json') with request as travis_json: travis_json = json.loads(travis_json.readall().decode('utf-8')) status = travis_json["last_build_status"] except urllib.error.HTTPError as error: if settings.get('debug_enable', False): print('[Travis-CI API Error] ' + str(error.code) + ': ' + error.reason) except urllib.error.URLError as error: if settings.get('debug_enable', False): print('[Travis-CI API Error] ' + str(error.code) + ': ' + error.reason) except Exception as error: if settings.get('debug_enable', False): print('[Travis-CI API Error] ' + error.read().decode()) if status is None and settings.get('debug_enable', False): print('[Travis-CI API Error] ' + repo + ' is not an active repository on Travis') return status
def _fetch_json(self, path, post_data=None, **args): request = Request(url=self._url(path, **args)) if post_data: post_data = dict((k, v) for k, v in post_data.items() if v or isinstance(v, int)) request_data = urlencode(self._clean(**post_data)) if PY3: request.data = request_data.encode() else: request.data = request_data if self.access_token: request.add_header("Authorization", "Bearer " + self.access_token) try: return json.loads( urlopen( request, timeout=self.request_timeout).read().decode()) except HTTPError as error: try: # Extract the developer-friendly error message from the response message = json.loads(error.read().decode())["error_description"] except Exception: raise error raise QuipError(error.code, message, error)
def encodeASP(text): """ base64 encode function for (ASP).NET """ isbytes = True if not isinstance(text, bytes): text = base64.urlsafe_b64encode(text.encode()) isbytes = False else: text = base64.urlsafe_b64encode(text) count = len(re.findall(b'=',text)) for i in range(count): text = text[:-1] text = text + str(count).encode() if isbytes: return text else: return text.decode() class SmartRedirectHandler(urllib.request.HTTPRedirectHandler): def http_error_301(self, req, fp, code, msg, headers): self.preProcessingRedirection(req, fp, code, msg, headers) result = super(SmartRedirectHandler, self).http_error_301(req, fp, code, msg, headers) self.postProcessingRedirection(result) return result def http_error_302(self, req, fp, code, msg, headers): self.preProcessingRedirection(req, fp, code, msg, headers) result = super(SmartRedirectHandler, self).http_error_302(req, fp, code, msg, headers) self.postProcessingRedirection(result) return result def preProcessingRedirection(self, req, fp, code, msg, headers): location = '' for i in headers._headers: if i[0] == 'Location': location = i[1].strip() req.add_header('Host',urlparse(location).netloc) printAnswer(code, str(msg) + " " + location) printHeaders(headers._headers,'Set-Cookie') def postProcessingRedirection(self, result): printRequest("GET", result.geturl()) def stringToHexCSV(s): hexs = s.encode('hex') ret = ' '.join(hexs[i:i+2] for i in range(0, len(hexs), 2)) return ret def defaultCreateOpener(withCookieJar = True, withBurpProxy = True): global cookieJar if withCookieJar: cookieJar = urllib.request.HTTPCookieProcessor(http.cookiejar.CookieJar()) proxy_handler = None if withBurpProxy: proxy_handler = urllib.request.ProxyHandler({'https': 'https://127.0.0.1:8080/', 'http': 'http://127.0.0.1:8080/'}) ret = None if withCookieJar and withBurpProxy: ret = urllib.request.build_opener(proxy_handler, SmartRedirectHandler(), cookieJar) elif withCookieJar: ret = urllib.request.build_opener(SmartRedirectHandler(), cookieJar) elif withBurpProxy: ret = urllib.request.build_opener(proxy_handler, SmartRedirectHandler()) return ret def processingCookies(headers): cookies = headers['Cookie'] final = '' if type(cookies) == type(""): return for c in cookies.keys(): final += " " + c + "=" + cookies[c] + ";" headers['Cookie'] = final return headers createOpener = defaultCreateOpener def requestC(opener,url, headers, data, method = 'POST'): [answer, code] = requestB(opener,url, headers, data, method) return answer def requestB(opener,url, headers, data, method = 'POST'): answer = '' retcode = None additionalInfo = '[None]' contentLenght = None if ImportCookie: headers = processingCookies(headers) data = urllib.parse.urlencode(data) if method == 'GET': if data: url = url + '?' + data data = None elif method == 'POST': headers['Content-Length'] = len(data) data = data.encode() request = urllib.request.Request(url,data,headers) try: printRequest(request.get_method(), request.get_full_url()) if data: printData(data) f = opener.open(request) headers = f.getheaders() code = f.code retcode = code answer = f.read() m = hashlib.md5() m.update(answer) for h in headers: if h[0].lower() == 'content-length': contentLenght = h[1] additionalInfo = '[' + str(h[1]) + ']' printAnswer(code,additionalInfo) printHeaders(headers,'Set-Cookie') except urllib.error.HTTPError as error: for h in error.headers: if h.lower() == 'content-length': printAnswer(str(error.code) , ' [' + str(error.headers[h]) + ']') else: printAnswer(str(error.code) , ' [-1]') retcode = error.code answer = error.read() except urllib.error.URLError as error: printAnswer(str(error)) return answer, retcode def parseBurpData(fileName): global Protocol url = '' host = '' data = None contentType = 'None' headers = {} indata = None try: indata = open(fileName,"r") except IOError as error: print(str(error)) sys.exit(1) line = indata.readline() res = line.partition(' ') method = res[0] printDebug('method ' + method) res = res[2].rpartition(' ') uri = res[0] printDebug('URI: ' + str(uri)) if Protocol == None: rulo = urlparse(uri) printOut('Scheme not given, trying to guess it from burp request.') if rulo.scheme != 'http' or rulo.scheme != 'https': printOut('** Could not determine the scheme from the HTTP request, please configure one **') sys.exit(1) else: Protocol = rulo.scheme line = indata.readline() while line.strip(): res = line.partition(':') if res[0] == 'Host': host = res[2].strip() if res[0] == 'Content-Type': contentType = res[2].strip() if res[0] == 'Cookie': if ImportCookie: cookies = res[2].split(';') for c in cookies: tm = c.strip().partition('=') if res[0] not in headers: headers[res[0]] = {tm[0]:tm[2]} else: headers[res[0]][tm[0]] = tm[2] line = indata.readline() continue headers[res[0]] = res[2].strip() line = indata.readline() if method == 'POST': url = Protocol + '://' + host + uri data = indata.read().strip() if len(data) == 0: data = None else: urlencodedcontenttype = re.compile('application\/x-www-form-urlencoded') if urlencodedcontenttype.match(contentType): data = urllib.parse.parse_qs(data) for d in list(data.keys()): if len(data[d]) > 1: printOut("Multiple value for the same field. Odd... taking the first one") data[d] = data[d][0] elif contentType == 'text/xml; charset=UTF-8': data = parseString(data) else: printOut('Unknown Content type: ' + str(contentType)) elif method == 'GET': res = uri.rpartition('?') uri = res[0] if len(res) == 3: if uri == '': uri = res[2] data = urllib.parse.parse_qs(res[2]) for d in list(data.keys()): if len(data[d]) > 1: printOut("Multiple value for the same field. Odd... taking the first one") data[d] = data[d][0] url = Protocol + '://' + host + uri indata.close() return url, headers,data, method