def send_request(endpoint, data, pool_manager=None): if pool_manager is None: pool_manager = PoolManager() return pool_manager.urlopen('POST', endpoint, headers={'Content-Type': 'application/json'}, body=json.dumps(data))
def call_salesforce(url, method, session, headers, **kwargs): """Utility that generates a request to salesforce using urllib3 instead of requests package. This is necessary for connections that use the mutual authentication with encrypted certificates, the package requests can't handle it. PrepareRequest and HttpAdapter are used so that it returns a regular Response <requests.Response> that is expected for the rest of the process. """ additional_headers = kwargs.pop('additional_headers', dict()) headers.update(additional_headers or dict()) request_args = {'method': method.upper(), 'headers': headers} context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) # We will try and load the cert file and pass from the environment variables cert_file = os.environ.get('SIMPLE_SALESFORCE_CERT_FILE', None) cert_pass = os.environ.get('SIMPLE_SALESFORCE_PASSWORD', None) if cert_file and cert_pass: context.load_cert_chain(certfile=cert_file, password=cert_pass) request = PreparedRequest() parsed = urlparse(url) parsed = parsed._replace(netloc="{}:{}".format(parsed.hostname, 8443)) request.prepare(url=parsed.geturl(), data=kwargs.get('data') or {}, **request_args) http = PoolManager(ssl_context=context, cert_reqs='CERT_REQUIRED') result = http.urlopen(url=request.url, body=request.body, redirect=False, assert_same_host=False, preload_content=False, decode_content=False, **request_args) adapter = HTTPAdapter() response = adapter.build_response(request, result) if response.status_code >= 300: from simple_salesforce.util import exception_handler exception_handler(response) adapter.close() return response
def closed(self, reason): self.log("\n\n *** finder spider completed ***\n ", level=log.INFO) self.log("number of unique onions queued: " + str(len(self.onions)), level=log.INFO) pool = PoolManager() for onion in self.onions: onion = onion.replace("https:", "http:") onion = onion.replace(".onion", ".onion/") onion = onion[:30] json = '{"url":"' + onion + '"}' self.log("POSTing onion to ahmia: " + json) post_url = "https://ahmia.fi/address/" content_type = {"Content-Type": "application/json"} req = pool.urlopen("POST", post_url, headers=content_type, body=json) if req.status != 200 and req.status != 403: self.log("Failed to POST " + onion + " server responded with HTTP " + str(req.status), level=log.ERROR)
def closed(self, reason): self.log("\n\n *** finder spider completed ***\n ", level=log.INFO) self.log("number of unique onions queued: " + str(len(self.onions)), level=log.INFO) pool = PoolManager() for onion in self.onions: onion = onion.replace("https:", "http:") onion = onion.replace(".onion", ".onion/") onion = onion[:30] json = "{\"url\":\"" + onion + "\"}" self.log("POSTing onion to ahmia: " + json) post_url = "https://ahmia.fi/address/" content_type = {"Content-Type": "application/json"} req = pool.urlopen("POST", post_url, headers=content_type, body=json) if req.status != 200 and req.status != 403: self.log("Failed to POST " + onion + " server responded with HTTP " + str(req.status), level=log.ERROR)
def crawl_for_honey(url): try: print(f'\nStarting crawl on {url}\n') to_search = ['div', '---', '!', 'h1'] req = PoolManager(num_pools=5) http = req.urlopen('GET', url, timeout=10, headers=header) soup = BeautifulSoup(http.data, "html.parser") for item in to_search: print(f'Selecting: {item}') for item in soup.findAll(item): cprint(f"{item}", "blue", "on_white", attrs=["bold"]) x.execute(alive_check, ('Alive', url)) c.commit() http.close() except (exceptions.ConnectionError, exceptions.ConnectTimeoutError,exceptions.MaxRetryError, exceptions.ReadTimeoutError, timeout) as e: cprint(f"\n{e}", "red", "on_white", attrs=["bold", "underline", "dark"]) cprint(f"\nAppears to be a dead url: {url}", "red", "on_white", attrs=["bold", "underline", "dark"]) x.execute(alive_check, ('Dead', url)) c.commit() pass
class __Settings(QtCore.QObject): sendError = QtCore.pyqtSignal(str) providerLoading = QtCore.pyqtSignal(int,str) def __init__(self): QtCore.QObject.__init__(self) self.__pm = PoolManager(timeout=Timeout(10), headers={'Accept-Encoding': 'gzip,deflate'}) self.__settings = None self.providersListDefaultValue = "https://raw.githubusercontent.com/cr0wbar/fishnet-providers/master/providers.json" self.downloadProvidersListAtStartupDefaultValue = True self.providersDefaultValue = {"remote":[],"local":[]} self.pagesDefaultValue = 3 def __checkSettingsSanity(self): if not self.__settings: self.__settings = {} if not "downloadProvidersListAtStartup" in self.__settings: self.__settings["downloadProvidersListAtStartup"] = self.downloadProvidersListAtStartupDefaultValue if not "providersList" in self.__settings: self.__settings["providersList"] = self.providersListDefaultValue if not "providers" in self.__settings: self.__settings["providers"] = self.providersDefaultValue if not "pages" in self.__settings: self.__settings["pages"] = self.pagesDefaultValue def getSettings(self): return self.__settings def loadConfiguration(self,path): if not isfile(path): self.__checkSettingsSanity() else: try: sfile = open(path,"r") self.__settings = loads(sfile.read()) sfile.close() except IOError as e: self.sendError.emit("Cannot open configuration at '"+path+"' <br/><b>Reason:<b/>" + str(e)) finally: self.__checkSettingsSanity() def loadProviders(self): #Check if there is a url for the list of providers #If there is and we are supposed to download the list, #then proceed to download the list. #Then download each provider in the downloaded list providerManager = ProviderManager().instance providerManager.reset() if self.__settings and "downloadProvidersListAtStartup" in self.__settings and self.__settings["downloadProvidersListAtStartup"]: try:#Load remote list of providers r = self.__pm.urlopen("GET", self.__settings["providersList"]) self.providerLoading.emit(10,"Loading list of providers") providersList = loads(r.data.decode("utf-8")) if "list" in providersList: self.__settings["providers"]["remote"].clear() for providerUrl in providersList["list"]: self.__settings["providers"]["remote"].append(providerUrl) except Exception as e: self.sendError.emit("cannot retrieve the list of providers at '"+self.__settings["providersList"]+"'<br/><b>Reason:</b> "+str(e)) #Load stored remote providers totalNumberOfProviders = 0 providersLoaded = 0 if self.__settings and "remote" in self.__settings["providers"]: totalNumberOfProviders+=len(self.__settings["providers"]["remote"]) if self.__settings and "local" in self.__settings["providers"]: totalNumberOfProviders+=len(self.__settings["providers"]["local"]) if self.__settings and "remote" in self.__settings["providers"]: for remoteProvider in self.__settings["providers"]["remote"]: providersLoaded+=1 self.providerLoading.emit(int(10.+float(providersLoaded)/float(totalNumberOfProviders)*90.), "Loading provider at "+remoteProvider) providerManager.loadProviderFromUrl(remoteProvider) #Load stored local providers if self.__settings and "local" in self.__settings["providers"]: for localProvider in self.__settings["providers"]["local"]: providersLoaded+=1 self.providerLoading.emit(int(10.+float(providersLoaded)/float(totalNumberOfProviders)*90.), "Loading provider at "+localProvider) providerManager.loadProviderFromFile(localProvider) def setDefaultProvider(self,provider): self.__settings["defaultProvider"] = provider def writeConfiguration(self,path): try: sfile = open(path,"w") sfile.write(dumps(self.__settings)) sfile.close() except IOError as e: self.sendError.emit("cannot open configuration at '"+Globals.configurationPath+"'<br/><b>Reason:</b> " + str(e))
def push_opentsdb(cluster): pm = PoolManager() ts = str(time.time()).split(".")[0] port = get_port() hostname = get_hostname() #hostname = "haoziyu-worker-dev003-bjdxt9.qiyi.virtual" ma = MesosJunkman(cluster,hostname, port) path = "/page/cpu/usage" cpu_idle = ma.host_cpu(path) data = [] for cpu in cpu_idle: cpu["timestamp"] = int(ts) cpu["tags"] = { "hostname":hostname, "cluster":cluster } data.append(cpu) path = "/page/memory" mem = ma.host_memory(path) for m in mem: m["timestamp"] = int(ts) m["tags"] = { "hostname":hostname, "cluster":cluster } data.append(m) path = "/page/system/loadavg" loadavg = ma.host_loadavg(path) for ld in loadavg: ld["timestamp"] = int(ts) ld["tags"] = { "hostname":hostname, "cluster":cluster } data.append(ld) # path = "/page/df" # df = ma.host_disk_df(path) # for d in df: # d["timestamp"] = int(ts) # tags = d["tags"].split(",") # mount = tags[2].split("=")[1] # fstype = tags[3].split("=")[1] # d["tags"] = { # "hostname":hostname, # "cluster":cluster, # "mount":mount, # "fstype":fstype # } # data.append(d) # path = "/page/diskio" # dio = ma.host_disk_io(path) # for io in dio: # io["timestamp"] = int(ts) # tags = io["tags"].split(",") # device = tags[2].split("=")[1] # io["tags"] = { # "hostname":hostname, # "cluster":cluster, # "device":device # } # data.append(io) # print(data) logger.info(data) pm.urlopen("POST","http://10.15.230.1:4242/api/put",headers={"Content-Type":"application/json"},body=json.dumps(data))
class SteemNodeRPC(object): """ This class allows to call API methods synchronously, without callbacks. It logs in and registers to the APIs: * database * history :param str urls: Either a single Websocket URL, or a list of URLs :param str user: Username for Authentication :param str password: Password for Authentication :param Array apis: List of APIs to register to (default: ["database", "network_broadcast"]) Available APIs * database * network_node * network_broadcast * history """ call_id = 0 api_id = {} def __init__(self, urls, user="", password="", **kwargs): self.apis = kwargs.pop("apis", ["database", "network_broadcast"]) self.api_id = {} self._request_id = 0 if isinstance(urls, list): self.urls = cycle(urls) else: self.urls = cycle([urls]) self.user = user self.password = password self.num_retries = kwargs.get("num_retries", -1) self.is_http = False self.connect() self.register_apis() self.chain_params = self.get_network() def get_request_id(self): self._request_id += 1 return self._request_id def connect(self): cnt = 0 while True: cnt += 1 self.url = next(self.urls) log.debug("Trying to connect to node %s" % self.url) if self.url[:4] == "http": #pdb.set_trace() if not self.is_http: self.http = PoolManager( timeout = 60, retries = 20, socket_options = HTTPConnection.default_socket_options + \ [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), ], headers={'Content-Type': 'application/json'}, cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) self.is_http = True break if self.url[:3] == "wss": sslopt_ca_certs = {'cert_reqs': ssl.CERT_NONE} self.ws = websocket.WebSocket(sslopt=sslopt_ca_certs) else: self.ws = websocket.WebSocket() try: self.is_http = False self.ws.connect(self.url) break except KeyboardInterrupt: raise except: if (self.num_retries >= 0 and cnt > self.num_retries): raise NumRetriesReached() sleeptime = (cnt - 1) * 2 if cnt < 10 else 10 if sleeptime: log.warning( "Lost connection to node during connect(): %s (%d/%d) " % (self.url, cnt, self.num_retries) + "Retrying in %d seconds" % sleeptime) time.sleep(sleeptime) if not self.is_http: self.login(self.user, self.password, api_id=1) def register_apis(self, apis=None): if self.is_http: return for api in (apis or self.apis): api = api.replace("_api", "") self.api_id[api] = self.get_api_by_name("%s_api" % api, api_id=1) if not self.api_id[api] and not isinstance(self.api_id[api], int): raise NoAccessApi("No permission to access %s API. " % api) def get_account(self, name): account = self.get_accounts([name]) if account: return account[0] def account_history(self, account, first=99999999999, limit=-1, only_ops=[], exclude_ops=[]): warnings.warn( "The block_stream() call has been moved to `steem.account.Account.rawhistory()`", DeprecationWarning) from piston.account import Account return Account(account, steem_instance=self.steem).rawhistory( first=first, limit=limit, only_ops=only_ops, exclude_ops=exclude_ops) def block_stream(self, start=None, stop=None, mode="irreversible"): warnings.warn( "The block_stream() call has been moved to `steem.blockchain.Blockchain.blocks()`", DeprecationWarning) from piston.blockchain import Blockchain return Blockchain(mode=mode).blocks(start, stop) def stream(self, opNames, *args, **kwargs): warnings.warn( "The stream() call has been moved to `steem.blockchain.Blockchain.stream()`", DeprecationWarning) from piston.blockchain import Blockchain return Blockchain(mode=kwargs.get("mode", "irreversible")).stream( opNames, *args, **kwargs) def list_accounts(self, start=None, step=1000, limit=None, **kwargs): warnings.warn( "The list_accounts() call has been moved to `steem.blockchain.Blockchain.get_all_accounts()`", DeprecationWarning) from piston.blockchain import Blockchain return Blockchain( mode=kwargs.get("mode", "irreversible")).get_all_accounts( start=start, steps=step, **kwargs) def get_network(self): """ Identify the connected network. This call returns a dictionary with keys chain_id, prefix, and other chain specific settings """ props = self.get_dynamic_global_properties() chain = props["current_supply"].split(" ")[1] assert chain in known_chains, "The chain you are connecting to is not supported" return known_chains.get(chain) def rpcexec(self, payload): """ Execute a call by sending the payload. In here, we mostly deal with Steem specific error handling :param json payload: Payload data :raises ValueError: if the server does not respond in proper JSON format :raises RPCError: if the server returns an error """ try: return self._rpcexec(payload) except RPCError as e: msg = exceptions.decodeRPCErrorMsg(e).strip() if msg == "Account already transacted this block.": raise exceptions.AlreadyTransactedThisBlock(msg) elif msg == "missing required posting authority": raise exceptions.MissingRequiredPostingAuthority elif msg == "Voting weight is too small, please accumulate more voting power or steem power.": raise exceptions.VoteWeightTooSmall(msg) elif msg == "Can only vote once every 3 seconds.": raise exceptions.OnlyVoteOnceEvery3Seconds(msg) elif msg == "You have already voted in a similar way.": raise exceptions.AlreadyVotedSimilarily(msg) elif msg == "You may only post once every 5 minutes.": raise exceptions.PostOnlyEvery5Min(msg) elif msg == "Duplicate transaction check failed": raise exceptions.DuplicateTransaction(msg) elif msg == "Account exceeded maximum allowed bandwidth per vesting share.": raise exceptions.ExceededAllowedBandwidth(msg) elif re.match("^no method with name.*", msg): raise exceptions.NoMethodWithName(msg) elif msg: raise exceptions.UnhandledRPCError(msg) else: raise e except Exception as e: raise e """ RPC Calls """ def _rpcexec(self, payload): """ Execute a call by sending the payload :param json payload: Payload data :raises ValueError: if the server does not respond in proper JSON format :raises RPCError: if the server returns an error """ log.debug(json.dumps(payload)) cnt = 0 while True: cnt += 1 try: if self.is_http: _body = json.dumps(payload) response = self.http.urlopen('POST', self.url, body=_body) reply = response.data.decode('utf-8') else: self.ws.send( json.dumps(payload, ensure_ascii=False).encode('utf8')) reply = self.ws.recv() break except KeyboardInterrupt: raise except: log.warning(str(sys.exc_info())) if (self.num_retries > -1 and cnt > self.num_retries): raise NumRetriesReached() sleeptime = (cnt - 1) * 2 if cnt < 10 else 10 if sleeptime: log.warning( "Lost connection to node during rpcexec(): %s (%d/%d) " % (self.url, cnt, self.num_retries) + "Retrying in %d seconds" % sleeptime) time.sleep(sleeptime) # retry try: if not self.is_http: self.ws.close() time.sleep(sleeptime) self.connect() self.register_apis() except: pass ret = {} try: ret = json.loads(reply, strict=False) except ValueError: raise ValueError("Client returned invalid format. Expected JSON!") log.debug(json.dumps(reply)) if 'error' in ret: print(ret) if 'detail' in ret['error']: raise RPCError(ret['error']['detail']) else: raise RPCError(ret['error']['message']) else: return ret["result"] def __getattr__(self, name): """ Map all methods to RPC calls and pass through the arguments. """ def method(*args, **kwargs): # Sepcify the api to talk to if "api_id" not in kwargs: if ("api" in kwargs): if (kwargs["api"] in self.api_id and self.api_id[kwargs["api"]]): api_id = self.api_id[kwargs["api"]] elif self.is_http and kwargs["api"]: api_id = kwargs["api"] + "_api" else: raise ValueError( "Unknown API! " "Verify that you have registered to %s" % kwargs["api"]) elif self.is_http: api_id = "database_api" else: api_id = 0 else: api_id = kwargs["api_id"] # let's be able to define the num_retries per query self.num_retries = kwargs.get("num_retries", self.num_retries) query = { "method": "call", "params": [api_id, name, list(args)], "jsonrpc": "2.0", "id": self.get_request_id() } r = self.rpcexec(query) return r return method
from io import BytesIO from urllib3 import PoolManager from pydub import AudioSegment from pydub.playback import play # recognizer = sr.Recognizer() mp3_url = "https://pdst.fm/e/aphid.fireside.fm/d/1437767933/8658dd0c-baa7-4412-9466-918650a0013d/6ba07dd7-ebc5-4686-b1eb-f25bcd83821a.mp3" http = PoolManager() mp3 = http.urlopen("GET", mp3_url) # print(mp3.data) mp3_as = AudioSegment.from_mp3(BytesIO(mp3.data)) print(type(mp3_as)) print(mp3_as.channels) print(mp3_as.frame_rate) print(mp3_as.sample_width) print(mp3_as.max) print(mp3_as.duration_seconds) episode_wav = mp3_as.export("episode.wav", format="wav") # play(mp3_as)
#created for Py2.7 #updated for Python3-July 19 2018 # import libraries from urllib3 import PoolManager from bs4 import BeautifulSoup import html5lib import requests #specify the url music_page = 'https://www.kalx.berkeley.edu/playlists' #query the website and return the html to the variable 'page' manager = PoolManager(10) page = manager.request('GET', 'https://www.kalx.berkeley.edu/playlists') page2 = manager.urlopen(page) print(page2) # with open('http://www.kalx.berkeley.edu/playlists/index.html') as fp: # soup = BeautifulSoup(fp) #page = open("http://www.kalx.berkeley.edu/playlists/") #parse the html using beautiful soup and store in variable 'soup' soup = BeautifulSoup(page, 'html5lib') #Take out the <div> of name and get its value name_box = soup.find('td', attrs={'class': 'views-field views-field-nothing'}) #name = name_box.text.strip() # strip() is used to remove starting and trailing print(name_box)