def validate(self, url: URL, params: Union[str, bytes, Dict[str, str]], signature: str) -> bool: """ Validate a request from Twilio. Args: url: Full URI that Twilio requested on your server. params: Dictionary of POST variables or string of POST body for JSON requests. signature: The signature in the X-Twilio-Signature header. Returns: True if the request passes validation, False if not. """ url = url.with_scheme("https").with_port(None) try: decoded_signature = base64.b64decode(signature) except Exception: return False if "bodySHA256" in url.query and isinstance(params, (str, bytes)): valid_body_hash = hmac.compare_digest(self._compute_hash(params), url.query["bodySHA256"]) valid_signature = hmac.compare_digest(self._compute_signature(url, {}), decoded_signature) return valid_body_hash and valid_signature else: return hmac.compare_digest(self._compute_signature(url, params or {}), decoded_signature)
async def load_config(path: str) -> None: global access_token, homeserver_url, upload_url try: with open(path) as config_file: config = json.load(config_file) homeserver_url = config["homeserver"] access_token = config["access_token"] except FileNotFoundError: print( "Matrix config file not found. Please enter your homeserver and access token." ) homeserver_url = input("Homeserver URL: ") access_token = input("Access token: ") whoami_url = URL(homeserver_url ) / "_matrix" / "client" / "r0" / "account" / "whoami" if whoami_url.scheme not in ("https", "http"): whoami_url = whoami_url.with_scheme("https") user_id = await whoami(whoami_url, access_token) with open(path, "w") as config_file: json.dump( { "homeserver": homeserver_url, "user_id": user_id, "access_token": access_token }, config_file) print(f"Wrote config to {path}") upload_url = URL(homeserver_url) / "_matrix" / "media" / "r0" / "upload"
def reroute_local(url): a_url = URL(url) if a_url.host in HummingWebApp._hosts_to_mock and not any( x in a_url.path for x in HummingWebApp._hosts_to_mock[a_url.host]): host_path = f"/{a_url.host}{a_url.path}" query = a_url.query a_url = a_url.with_scheme("http").with_host(HummingWebApp.host).with_port(HummingWebApp._port)\ .with_path(host_path).with_query(query) return a_url
def reroute_local(url): """ reroute a url if it is one of the hosts we handle :param url: the original url :return: the rerouted url """ a_url = URL(url) if a_url.host in MockWebServer._hosts_to_mock and not any(x in a_url.path for x in MockWebServer._hosts_to_mock[a_url.host]): host_path = f"/{a_url.host}{a_url.path}" query = a_url.query a_url = a_url.with_scheme("http").with_host(MockWebServer.host).with_port(MockWebServer._port)\ .with_path(host_path).with_query(query) return a_url
async def _connect(cls, url, rest_url, application_id, client_id, auth=None, *, reconnect=True, session=None, loop=None, tags=None, **kwargs): self = cls() try: self.bot = kwargs.pop("bot") except KeyError: raise TypeError("bot must always be provided when using BotClient") self.loop = loop or self.bot.loop self.auth = auth self.id = client_id self.app_id = application_id self.tags = tags ws_url = URL(url).with_query("encoding=json") / "gateway" / "websocket" scheme = self.schema_map.get(ws_url.scheme, ws_url.scheme) self.ws_url = ws_url.with_scheme(scheme) self.reconnect = reconnect coro = BotWSClient.create(self, self.ws_url, reconnect=self.reconnect) self._ws = await asyncio.wait_for(coro, timeout=20) self._rest = await APIClient.create(rest_url, session=session, auth=auth, loop=self.loop) self.version = self._rest.version self._task = self.loop.create_task(self._poll_data()) return self
class Server: def __init__(self, config): self._listen = URL(config["listen"]) self._loop = asyncio.get_event_loop() self._app = web.Application(middlewares=(ws_conn,)) self._runner = web.AppRunner(self._app, handle_signals=True) def start(self): try: self._loop.run_until_complete(self._setup()) logging.info("Running server on: %r" % str(self._listen)) self._loop.run_forever() except Exception: traceback.print_exc() except KeyboardInterrupt: pass finally: logging.info("Stopping app") self._loop.run_until_complete(self._runner.cleanup()) async def _setup(self): self._app["ws_conn"] = {} self._app["loop"] = self._loop self._app["is_shutdowning"] = False self._app["ws_url"] = str(self._listen.with_scheme("ws").with_path("ws")) self._app.on_cleanup.append(close_tasks) self._app.on_shutdown.append(shutdown_ws_connect) self._app.router.add_view("/factorial/", FactorialHandler) self._app.router.add_static( prefix="/static", path="app/static", name="static", ) setup_jinja2(app=self._app, loader=FileSystemLoader("app/static/")) await self._runner.setup() self.site = web.TCPSite( self._runner, self._listen.host, self._listen.port, ) await self.site.start()
def create_start_urls(self, urls: List[Union[URL, str]]) -> List[URL]: """ Create the start URLs for the crawl from an initial URL. May be overridden. :param urls: Initial URLs """ crawl_start_urls: Set[URL] = set() for url in urls + self.start_urls: if isinstance(url, str): if "//" not in url: url = f"//{url}" url = URL(url) if url.scheme.lower() not in ["http", "https"]: url = url.with_scheme("http") crawl_start_urls.add(url) return list(crawl_start_urls)
def main(verify_ssl, ca_path, token, project, own_name, base_url, ltimezone): # Make sure we've got https url = URL(base_url) if url.scheme != 'https': url = url.with_scheme('https') settings = { 'verify_ssl': verify_ssl, 'base_url': base_url, 'token': token, 'project': project, 'own_name': own_name } if ca_path: settings.update({'ca_path': ca_path}) settings['list_pods_url'] = '{}/api/v1/namespaces/{}/pods/'.format( settings['base_url'], settings['project']) settings[ 'list_dcs_url'] = '{}/oapi/v1/namespaces/{}/deploymentconfigs/'.format( settings['base_url'], settings['project']) loop = asyncio.get_event_loop() if not ltimezone: ltimezone = 'Europe/Athens' localtz = timezone(ltimezone) while True: # Sleep random time from the morning (up to a full day) t = localtz.localize(datetime.today()) delta = random.randint(0, 86400) if t.weekday() >= 5: # Skip weekends; It would be too cruel print("Not on a weekend, I won't!") elif (t + timedelta(seconds=delta)).hour < 16: # Give them 1h to solve time.sleep(delta) # Spread chaos and fear in the heart of sysadmins :p loop.run_until_complete(chaos(settings)) else: # Not afterhours ... we're not maniacs print("Lucky you ... Saved by the clock!") # Sleep until the next morning t = localtz.localize(datetime.today()) future = localtz.localize(datetime(t.year, t.month, t.day, 9, 30)) if t.hour >= 9: future += timedelta(days=1) time.sleep((future - t).total_seconds())
def __init__( self, url: URL, upgrade_table: UpgradeTable, db_args: dict[str, Any] = None, log: logging.Logger | None = None, owner_name: str | None = None, ignore_foreign_tables: bool = True, ) -> None: if url.scheme in ("cockroach", "cockroachdb"): self.scheme = Scheme.COCKROACH # Send postgres scheme to asyncpg url = url.with_scheme("postgres") self._exit_on_ice = (db_args or {}).pop("meow_exit_on_ice", True) super().__init__( url, db_args=db_args, upgrade_table=upgrade_table, log=log, owner_name=owner_name, ignore_foreign_tables=ignore_foreign_tables, ) self._pool = None self._pool_override = False
def test_with_scheme_uppercased(): url = URL("http://example.com") assert str(url.with_scheme("HTTPS")) == "https://example.com"
def _hotel_parse(self, response): url = URL(response.url) if response.xpath('//h2[@class="narrow-results-header"]/text()'): self.logger.debug('Drop response(%s)', response.url) return if url.path == '/search.do' and response.status == 302: location = URL(response.headers.get('Location').decode()) yield Request( str(url.with_path(location.path).with_query(location.query)), self._hotel_parse, meta=response.meta ) return hotels_id = url.path.strip('/').split('/')[0][2:] hotels_id = url.query.get('hotel-id') or hotels_id loader = ItemLoader(item=Hotels(), response=response) en_url = response.xpath(hotels_xp.EN_US_URL).extract_first() PREFIX = self.site_prefix.get(url.host.split('.')[-1]) if PREFIX == 'CN': en_url = url.with_host('www.hotels.com') \ .with_query({'pos': 'HCOM_US', 'locale': 'en_US'}) en_url = str(en_url) # yield Request( # en_url, # self._hotel_parse, # meta={ # # 'proxy': '172.17.1.198:1118' # } # ) if response.meta.get('crawl_reviews', True): review_url = url.with_scheme(url.scheme) \ .with_host(url.host) \ .join( URL( '/'.join([ '', 'hotel', hotels_id, 'reviews', '' ]) ) ) yield Request( str(review_url), self._review_parse, meta={ 'hotels_id': hotels_id, # 'proxy': '172.17.1.198:1118' } ) loader.add_value('url', response.url) loader.add_value('us_url', en_url) if response.meta.get('crawl_rooms', True): now = datetime.now() checkin = now + timedelta(days=1) checkout = now + timedelta(days=3) details_query = { 'q-check-in': checkin.strftime('%Y-%m-%d'), 'q-check-out': checkout.strftime('%Y-%m-%d'), # 'hotel-id': hotels_id, 'q-room-0-adults': 2, 'q-room-0-children': 0, 'tab': 'description', } u = url.with_scheme('https') \ .with_query(details_query) yield Request( str(u), self._rooms_parse, meta={ 'hotels_id': hotels_id, 'locale': PREFIX.lower(), 'dont_redirect': True, } ) yield Request( str(u.with_host('www.hotels.cn')), self._rooms_parse, meta={ 'hotels_id': hotels_id, 'locale': 'en', 'dont_redirect': True, # 'proxy': '172.17.1.198:1118' } ) supplier_obj_id = response.meta.get('statics.hotels.id') supplier_name = response.meta.get('statics.hotels.supplier') if supplier_obj_id: loader.add_value('statics_hotels_id', supplier_obj_id) loader.add_value('statics_hotels_supplier', supplier_name) loader.add_value('hotels_id', hotels_id) loader.add_xpath('title', hotels_xp.TITLE) loader.add_xpath('name', hotels_xp.NAME) position = take_first(response, hotels_xp.POSITION) # loader.add_xpath('latitude', hotels_xp.LATITUDE) # loader.add_xpath('longitude', hotels_xp.LONGITUDE) latitude, longitude = position.split(',') loader.add_value('latitude', latitude) loader.add_xpath('longitude', longitude) loader.add_xpath('star', hotels_xp.STAR, re='(\d+)') loader.add_value('address', self._address(response)) loader.add_xpath('address_text', hotels_xp.ADDRESS) loader.add_xpath('price', hotels_xp.PRICE) loader.add_xpath('city', hotels_xp.ADDRESS_LOCALITY) loader.add_xpath('country', hotels_xp.ADDRESS_COUNTRY) loader.add_xpath('telephone', hotels_xp.TELEPHONE) # loader.add_xpath('rating', '') # loader.add_xpath('tripadvisor', '') loader.add_xpath('landmarks', hotels_xp.LANDMARKS) loader.add_xpath('traffic_tips', hotels_xp.TRAFFIC_TIPS) loader.add_value('notice', self._notice(response, PREFIX)) IN_STORE_SERVICES = getattr( hotels_xp, PREFIX + '_IN_STORE_SERVICE_FACT' ) loader.add_value( 'in_store_service_facilities', service_to_dict(response.xpath(IN_STORE_SERVICES), hotels_xp) ) ROOM_SERVICES = getattr(hotels_xp, PREFIX + '_ROOM_SERVICE_FACT') loader.add_value( 'room_service_facilities', service_to_dict(response.xpath(ROOM_SERVICES), hotels_xp) ) loader.add_xpath('around', hotels_xp.AROUND) pictures = self._pictures(response.xpath(hotels_xp.PICTURES)) loader.add_value('pictures', pictures) loader.add_xpath('short_introduction', hotels_xp.SHORT_INTRODUCTION) loader.add_xpath('amenities', hotels_xp.AMENITIES) loader.add_xpath('for_families', hotels_xp.FOR_FAMILIES) loader.add_value('locale', PREFIX.lower()) loader.add_value('summary', self._summary(response)) yield loader.load_item()
"certs", *args, ) AMQP_URL = URL(os.getenv("AMQP_URL", "amqp://*****:*****@localhost/")) amqp_urls = { "amqp": AMQP_URL, "amqp-named": AMQP_URL.update_query(name="pytest"), "amqps": AMQP_URL.with_scheme("amqps").with_query( { "cafile": cert_path("ca.pem"), "no_verify_ssl": 1 }, ), "amqps-client": AMQP_URL.with_scheme("amqps").with_query( { "cafile": cert_path("ca.pem"), "keyfile": cert_path("client.key"), "certfile": cert_path("client.pem"), "no_verify_ssl": 1, }, ), } amqp_url_list, amqp_url_ids = [], [] for name, url in amqp_urls.items():
def test_with_scheme_uppercased(): url = URL('http://example.com') assert str(url.with_scheme('HTTPS')) == 'https://example.com'
def test_with_scheme_invalid_type(): url = URL('http://example.com') with pytest.raises(TypeError): assert str(url.with_scheme(123))
def test_with_scheme(): url = URL("http://example.com") assert str(url.with_scheme("https")) == "https://example.com"
def test_with_scheme(): url = URL('http://example.com') assert str(url.with_scheme('https')) == 'https://example.com'
def test_with_scheme_invalid_type(): url = URL("http://example.com") with pytest.raises(TypeError): assert str(url.with_scheme(123))