def resolve_conflicts(self) -> bool: """ 共识算法解决冲突 使用网络中最长的链. :return: 如果链被取代返回 True, 否则为False """ neighbours = self.nodes # We're only looking for chains longer than ours max_length = len(self.chain) new_chain = None # Grab and verify the chains from all the nodes in our network for node in neighbours: response = requests.get(f'http://{node}/chain') if response.status_code == 200: length = response.json()['length'] chain = response.json()['chain'] # Check if the length is longer and the chain is valid if length > max_length and self.valid_chain(chain): max_length = length new_chain = chain # Replace our chain if we discovered a new, valid chain longer than ours if new_chain: self.chain = new_chain return True return False
def get_package_version(name, version): # type: (str, str) -> Package url = "https://pypi.org/pypi/{0}/{1}/json".format(name, version) with requests.get(url) as r: r.raise_for_status() result = r.json() package = Package.from_json(result) return package
def check_internet(): try: # Kenneth represents the Internet LGTM. resp = requests.get('http://httpbin.org/ip', timeout=1.0) resp.raise_for_status() except Exception: warnings.warn('Cannot connect to HTTPBin...', RuntimeWarning) warnings.warn('Will skip tests requiring Internet', RuntimeWarning) return False return True
def check_internet(): try: # Kenneth represents the Internet LGTM. resp = requests.get('http://httpbin.org/ip', timeout=1.0) resp.raise_for_status() except Exception: warnings.warn('Cannot connect to HTTPBin...', ResourceWarning) warnings.warn('Will skip tests requiring Internet', ResourceWarning) return False return True
def _get(pypi_server): """ Query the PyPI RSS feed and return a list of XML items. """ response = requests.get(pypi_server) if response.status_code >= 300: raise HTTPError(status_code=response.status_code, reason=response.reason) if hasattr(response.content, 'decode'): tree = xml.etree.ElementTree.fromstring(response.content.decode()) else: tree = xml.etree.ElementTree.fromstring(response.content) channel = tree.find('channel') return channel.findall('item')
def resolve_conflicts(self) -> bool: neighbours = self.nodes max_lenght = len(self.chain) for node in neighbours: response = requests.get(f'http://{node}/chain') new_chain = None if response.status_code == 200: length = response.json()['length'] chain = response.json()['chain'] if length > max_lenght and self.valid_chain(chain): max_lenght = length new_chain = chain if new_chain: self.chain = new_chain return True return False
def fetch_database_url(mirror, db_name, key, cached, proxy): headers = {} if key: headers["X-Api-Key"] = key if cached: cached_data = get_from_cache(db_name=db_name) if cached_data: return cached_data url = mirror + db_name r = requests.get(url=url, timeout=REQUEST_TIMEOUT, headers=headers, proxies=proxy) if r.status_code == 200: data = r.json() if cached: write_to_cache(db_name, data) return data elif r.status_code == 403: raise InvalidKeyError() elif r.status_code == 429: raise TooManyRequestsError()
def get_imports_info(imports, pypi_server="https://pypi.python.org/pypi/", proxy=None): result = [] for item in imports: try: response = requests.get("{0}{1}/json".format(pypi_server, item), proxies=proxy) if response.status_code == 200: if hasattr(response.content, 'decode'): data = json2package(response.content.decode()) else: data = json2package(response.content) elif response.status_code >= 300: raise HTTPError(status_code=response.status_code, reason=response.reason) except HTTPError: logging.debug('Package %s does not exist or network problems', item) continue result.append({'name': item, 'version': data.latest_release_id}) return result
def get(package_name, pypi_server="https://pypi.python.org/pypi/"): """ Constructs a request to the PyPI server and returns a :class:`yarg.package.Package`. :param package_name: case sensitive name of the package on the PyPI server. :param pypi_server: (option) URL to the PyPI server. >>> import yarg >>> package = yarg.get('yarg') <Package yarg> """ if not pypi_server.endswith("/"): pypi_server = pypi_server + "/" response = requests.get("{0}{1}/json".format(pypi_server, package_name)) if response.status_code >= 300: raise HTTPError(status_code=response.status_code, reason=response.reason) if hasattr(response.content, 'decode'): return json2package(response.content.decode()) else: return json2package(response.content)
from pipenv.vendor import requests from bs4 import BeautifulSoup print('*' * 40) print('CORONAVIRUS'.center(40, '*')) print('*' * 40) print('_' * 40) pais = input('Pais[em inglẽs]:').strip() url = f'https://www.worldometers.info/coronavirus/country/{pais}/' req = requests.get(url) soap = BeautifulSoup(req.text, 'html.parser') if len(soap.find_all('div', 'number-table-main')) == 0: casos = casos_com_resultado = '0 ou não identificado.' else: casos = soap.find_all('div', 'number-table-main')[0].text.strip() casos_com_resultado = soap.find_all('div', 'number-table-main')[1].text.strip() if len(soap.find_all('span', 'number-table')) == 0: condicoes = casos_serios = recuperados = mortes = '0 ou não identificado.' else: condicoes = soap.find_all('span', 'number-table')[0].text.strip() casos_serios = soap.find_all('span', 'number-table')[1].text.strip() recuperados = soap.find_all('span', 'number-table')[2].text.strip() mortes = soap.find_all('span', 'number-table')[3].text.strip() casos_total = '0 ou não identificado.' if len(soap.find_all('div', 'maincounter-number')) == 0 else \ soap.find_all('div', 'maincounter-number')[0].text.strip() print('_' * 40)
import pipenv.vendor.requests as requests response = requests.get('https://httpbin.org/ip') print('Your IP is {0}'.format(response.json()['origin']))
def try_internet(url="http://httpbin.org/ip", timeout=1.5): resp = requests.get(url, timeout=timeout) resp.raise_for_status()