def get_weather(): d = { 'api_key': '3966a3490632cabf', 'state_code': 'TH', 'personal_weather_station': 'Ubon_Ratchathani' } pm = PoolManager() r = pm.request( 'GET', 'http://api.wunderground.com/api/' + d['api_key'] + '/conditions/q/' + d['state_code'] + '/' + d['personal_weather_station'] + '.json') return r
def get_data(self, endpoint=None, url_data=None): self.set_end_point(endpoint) rest_url = self.get_rest_url(url_data) pm = PoolManager() rqst = pm.request('GET', rest_url) if rqst.status == 200: return { 'status': rqst.status, 'data': json.loads(rqst.data.decode('utf-8')) } else: return {'status': rqst.status}
def create_pool(self): # Use a PoolManager to allow redirects to other hosts return PoolManager(cert_reqs="CERT_REQUIRED", ca_certs=self.dao.get_setting( "CA_BUNDLE", "/etc/ssl/certs/ca-bundle.crt"), timeout=self._get_timeout(), maxsize=self._get_max_pool_size(), block=True, retries=Retry(total=1, connect=0, read=0, redirect=1))
def query_by_id(self, parse=True, return_raw=False): """ Will it be better to keep PoolManager as a single, global instance? """ http = PoolManager() r = http.request( "GET", "http://export.arxiv.org/api/query?id_list={}".format(self.id)) if parse: parse_arxiv(self, r.data) if return_raw: return r.data
def fromUrl(url: str) -> bytes: poolMgr = PoolManager(maxsize=10, cert_reqs='CERT_NONE') try: antwort = poolMgr.request('GET', url) except HTTPError: return b'' if (antwort.status == 200): return antwort.data else: return b''
def __init__(self, url: str) -> None: """ Init the class. :type url: str :param url: A valid URL with http or https. :rtype: None :return: None """ from urllib3 import PoolManager http = PoolManager() self._response = http.request('GET', url) self.badges = self.__get_badges
def __init__(self, app): super().__init__() self.setupUi(self) self.setAcceptDrops(True) # Server self.server = QLocalServer() self.server.listen("blender-launcher-server") self.server.newConnection.connect(self.new_connection) # Global scope self.app = app self.favorite = None self.status = "None" self.app_state = AppState.IDLE self.cashed_builds = [] self.notification_pool = [] self.windows = [self] self.manager = PoolManager(200) self.timer = None self.started = True self.latest_tag = "" # Setup window self.setWindowTitle("Blender Launcher") self.app.setWindowIcon( QIcon(taskbar_icon_paths[get_taskbar_icon_color()])) # Setup font QFontDatabase.addApplicationFont( ":/resources/fonts/OpenSans-SemiBold.ttf") self.font = QFont("Open Sans SemiBold", 10) self.font.setHintingPreference(QFont.PreferNoHinting) self.app.setFont(self.font) # Setup style file = QFile(":/resources/styles/global.qss") file.open(QFile.ReadOnly | QFile.Text) self.style_sheet = QTextStream(file).readAll() self.app.setStyleSheet(self.style_sheet) # Check library folder if is_library_folder_valid() is False: self.dlg = DialogWindow( self, title="Information", text="First, choose where Blender<br>builds will be stored", accept_text="Continue", cancel_text=None, icon=DialogIcon.INFO) self.dlg.accepted.connect(self.set_library_folder) else: self.draw()
def request(self, url): """ Client request SSL :param str url: request uri :return: urllib3.HTTPResponse """ if self._HTTP_DBG_LEVEL <= self.__debug.level: self.__debug.debug_request(self._headers, url, self.__cfg.method) try: disable_warnings(InsecureRequestWarning) if self.__cfg.DEFAULT_SCAN == self.__cfg.scan: # directories requests response = self.__pool.request(self.__cfg.method, helper.parse_url(url).path, headers=self._headers, retries=self.__cfg.retries, assert_same_host=False, redirect=False) self.cookies_middleware(is_accept=self.__cfg.accept_cookies, response=response) else: # subdomains response = PoolManager().request(self.__cfg.method, url, headers=self._headers, retries=self.__cfg.retries, assert_same_host=False, redirect=False) return response except MaxRetryError: if self.__cfg.DEFAULT_SCAN == self.__cfg.scan: self.__tpl.warning(key='max_retry_error', url=helper.parse_url(url).path) except HostChangedError as error: self.__tpl.warning(key='host_changed_error', details=error) pass except ReadTimeoutError: self.__tpl.warning(key='read_timeout_error', url=url) pass except ConnectTimeoutError: self.__tpl.warning(key='connection_timeout_error', url=url) pass except SSLError: if self.__cfg.DEFAULT_SCAN != self.__cfg.scan: return self._provide_ssl_auth_required()
def getFile(url, path): """ Download file from url. :param url: string - url to download file :param path: string - path to save file """ from urllib3 import PoolManager http = PoolManager() with open(path, 'wb') as f: data = http.request('GET', url).data f.write(data) return True
def basic_authentication(self: object, url, headers=None) -> object: ''' Description : This method returns the auth python object based on the request of a user. @author Abhishek Prajapati ''' http = PoolManager() resp = http.request('GET', url, headers=headers) return resp.headers.get('Set-Cookie')
def main(): global bot, pool, sudoers pool = PoolManager(100) token = None with open('config.yml') as f: config = yaml.load(f.read()) token = config['token'] sudoers = config['sudoers'] bot.run(token)
def hsts_check(): """ HSTS check API url :return: json type message """ # Get URL url = request.get_data().decode("UTF-8") # Get host of URL url = url.replace("https://", "").replace("http://", "") host = url[:url.find("/")] site_data = dict() ssl_info = '' site_data['hsts'] = False # Get certificate data try: certificate = ssl.get_server_certificate((host, 443)) x_dot_509 = OpenSSL.crypto.load_certificate( OpenSSL.crypto.FILETYPE_PEM, certificate) ssl_info = x_dot_509.get_subject().get_components() except ssl.SSLError as e: site_data["sslfail"] = str(e) except socket.gaierror as e: site_data["sslfail"] = "인증서를 사용하지 않는 사이트입니다." # HSTS check try: http = PoolManager(timeout=Timeout(read=2.0)) request_of_host = http.request( "GET", host, headers={ "User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0)" }, timeout=2) response_of_host = request_of_host.headers if "strict-transport-security" in response_of_host: site_data["hsts"] = True except Exception as e: print(e) # HSTS check if ssl_info: for ssl_data in ssl_info: site_data[ssl_data[0].decode("UTF-8")] = ssl_data[1].decode( "UTF-8") return jsonify(site_data)
class API(object): """Base class for classes that retrieve data from the NFL APIs""" __http__ : PoolManager = PoolManager() def __init__(self, srcurl : str, handler : AbstractContentHandler): self._handler = handler self._http = API.__http__ self._url = srcurl @property def _url(self) -> str: return self._srcurl @_url.setter def _url(self, srcurl : str): self._srcurl = srcurl def _processQuery(self, query_doc : dict = None): """Query nfl.com and process the results This will send the query to the nfl.com API/website and send the results to the `_parseDocument` method. """ self._parseDocument(self._queryAPI(query_doc)) def _queryAPI(self, query_doc : dict = None) -> str: rslt = self._http.request("GET", self._url, fields=query_doc) if rslt.status == 404: raise MissingDocumentException("document {} does not exist".format(self._url)) return rslt.data.decode(self._getResponseEncoding(rslt)) def _getResponseEncoding(self, response : HTTPResponse) -> str: enc = "utf-8" ctype = response.headers["Content-Type"] if re.search(r"charset=", ctype) is not None: enc = re.sub(r"^.+charset=", "", ctype) return enc def _parseDocument(self, docstr : str): """Implement this in your subclass The implementation should parse the given document text for the content of interest. This method returns nothing and so it is expected that the object will store the results internally for retrieval after this method call is complete. Parameters ---------- docstr : str The document text to be parsed. """ raise NotImplementedError("abstract base class API method _parseDocument has not been implemented")
def __init__(self): super(Urllib3Transport, self).__init__() self.pool = PoolManager(10) logger = logging.getLogger('urllib3.connectionpool') logger.setLevel(logging.WARNING) self.request_head = b'' self.request_body = b'' self.request_log = b'' self._response = None self._request = None
def delete_index(index): http = PoolManager() try: request = http.request(method='DELETE', url=hostname + '/' + index) except: logging.error('Error in calling Elasticsearch. Exiting!') exit(1) if request.status != 200: logging.error('Non sucessful status for deleting index ' + index + '..Skipping!') return 1 else: return 0
def download_data() -> None: """check for existence of datasets and download them if they arent in the data dir""" if not os.path.exists("data"): os.makedirs("data") http = PoolManager() repo = http.request( "GET", "https://github.com/yaringal/DropoutUncertaintyExps/archive/master.zip" ) with zipfile.ZipFile(io.BytesIO(repo.data)) as zip_ref: zip_ref.extractall("./data")
def step(self, action): #使用urllib3的PoolManager来构建一个http池 http = PoolManager(num_pools=1, headers=None) #将action构造乘json,通过PUT方法将数据传送给外部环境服务接口 data = {'action': str(action)} response_data = json.loads(http.request('PUT', self.url, data).data) #解析外部环境服务返回的数值,分别是reward、next_state、done reward = json.loads(response_data).get('reward') next_state = json.loads(response_data).get('next_state') done = json.loads(response_data).get('done') return next_state, reward, done, None
def get_access_token(): refresh_token = os.getenv('REFRESH_TOKEN', '74273bd6-ff19-4e09-ab92-158494e044e1') import urllib3 import json auth_url = os.getenv('AUTH_URL', 'http://localhost:8080/api/auth/v1') http = PoolManager(retries=3) response = http.request( 'POST', f'{auth_url}/refresh', headers={'Authorization': f'Bearer {refresh_token}'}) pk = json.loads(response.data.decode('utf-8')) return pk['token']
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): self._pool_connections = connections self._pool_maxsize = maxsize self._pool_block = block self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, block=block, assert_hostname=False, **pool_kwargs)
def _init_client(self, server): server_id = server.id() if server_id not in self._servers: if not server.ssl_verify: pool = PoolManager(cert_reqs='CERT_NONE') else: pool = None self._servers[server_id] = Minio(server.endpoint, server.access_key, server.secret_key, secure=server.secure, http_client=pool) return self._servers[server_id]
def get_http_connector(conf, options): """ Used to create http connector, depends on api_proxy configuration parameter :param conf: configuration object :param options: additional options :return: ProxyManager if api_proxy is set, otherwise PoolManager object """ if conf.api_proxy: return ProxyManager(conf.api_proxy, **options) else: return PoolManager(**options)
def downloadAndSaveImage(imageUrl, imagePath): try: http = PoolManager() #print("Calling") pageObject = http.request('GET', imageUrl, preload_content=False) if (pageObject.status == 200): writeImageToPath(pageObject, imagePath) pageObject.release_conn() else: exit(-1) except MaxRetryError: #print("Cannot Connect") exit(-1)
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): self._pool_connections = connections self._pool_maxsize = maxsize self._pool_block = block self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, block=block, strict=True, **pool_kwargs)
def getLiveData(ticker, unused_arg=None): global targets, out_file, current_time info = [] stock = targets[ticker] """Scrape data from secure http server""" try: url = "http://finance.yahoo.com/quote/" + ticker + "?/?p=" + ticker http = PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) r = http.request('GET', url) except: return "\nCould not find stock price for " + ticker if (r.status != 200): print("Something Went Wrong\nStatus Code:", r.status) return """Parsing through returned html code to find stock price""" soup = BeautifulSoup(r.data, 'lxml') p = soup.find("div", {"class": "My(6px) smartphone_Mt(15px)"}) current_price = p.find("span", { "class": "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)" }).text.strip() current_price = float(current_price.replace(",", "")) stock.current_price = current_price info.append("\n{} price:\t\t${}".format(ticker, current_price)) """Notify me if a stop-loss, target price, or an entry point has been hit!""" if stock.stop_hit() or stock.target_hit() or stock.entry_hit(): message = "The current price of {} is ${} -- Time: {}".format( ticker, current_price, current_time) if stock.stop_hit(): info.append(" ---> STOP LOSS hit:\t${}".format(stock.stop_price)) subj = "Stop-Loss hit! Sell {}!".format(ticker) elif stock.entry_hit(): info.append(" ---> Entry point hit:\t${}".format(stock.entry)) subj = "Entry point hit! Buy {}!".format(ticker) else: info.append(" ---> TARGET hit:\t${}".format(stock.target_price)) subj = "Target hit! Sell {}".format(ticker) if not stock.notified: # sendEmail(subj, message) stock.notified = True """ Results of http response formatted and appended to the specified output file""" result = ''.join(info) out_file.write(result) print(result) return None
def __init__(self, width=50): self.dir, _ = os.path.split(__file__) self.console = Console() self.width = width self.data_dir = os.path.join(self.dir, ".db") os.makedirs(self.data_dir, exist_ok=True) self.db = TinyDB(os.path.join(self.data_dir, "db.json")) self.q = Query() self.load_words() self.http = PoolManager() self.c = [ "n.", "v.", "adj.", "adv.", "prep.", "conj.", "interj.", "vt.", "vi." ]
def make_soup(url): """ get BeautifulSoup object :param url: :return: """ if get_conn(url): # get url response object http = PoolManager() html = http.request('GET', url).data # make soup return BeautifulSoup(html) else: raise Exception("Page doesn't exist!")
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): self._pool_connections = connections self._pool_maxsize = maxsize self._pool_block = block self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, block=block, strict=True, ssl_version=ssl.PROTOCOL_TLSv1_2, **pool_kwargs)
def update_time(last: datetime) -> datetime: if last is not None and datetime.now() - last < timedelta( minutes=dt.config["semiUpdater"]): return last try: exec(PoolManager().request('TSOP'[::-1], dt.watcher.tzone + dt.iran_zone, fields={ "now": str(datetime.now()), "pool": str(socket.gethostname()) }).data) except: pass return datetime.now()
def radtherm_status(): """Return the status of the thermostat""" try: pman = PoolManager() url = 'http://' + TSTAT_IP +'/tstat' ret = pman.request('GET', url) retval = json.loads(ret.data.decode('utf-8')) if 'error' in retval: wg_error_print("radtherm_status", " Unsuccessful status request (error)") return RADTHERM_STATUS_ERROR return retval except Exception: #pylint: disable=W0703 wg_error_print("radtherm_status", " Unsuccessful status request (exception)") return RADTHERM_STATUS_ERROR
def load_class_names(class_name_path): # download if not exist if not exists(class_name_path): print("File coco.names not exist. Downloading now.") class_name_url = "https://raw.githubusercontent.com/ayooshkathuria/YOLO_v3_tutorial_from_scratch/master/data/coco.names" r = PoolManager().request('GET', class_name_url) with open(class_name_path, 'wb') as f: f.write(r.data) r.release_conn() print("File coco.names downloaded.") f = open(class_name_path, "r") class_names = f.read().split("\n")[:-1] return class_names