Esempio n. 1
0
async def get_book(id):
    detail_url = lib_detail_url % id
    async with aiohttp.ClientSession(cookie_jar=aiohttp.CookieJar(unsafe=True),
                                     headers=headers) as session:
        async with session.get(detail_url) as resp:
            thehtml = await resp.text()
            soup = BeautifulSoup(thehtml, 'lxml')
            alldd = soup.find_all('dd')
            book = alldd[0].text.split("/")[0]
            author = alldd[0].text.split("/")[1]

            #获取豆瓣简介
            isbn = alldd[2].text.split("/")[0]
            douban = douban_url % isbn
            async with aiohttp.ClientSession(
                    cookie_jar=aiohttp.CookieJar(unsafe=True),
                    headers=headers) as dsession:
                async with dsession.get(douban) as dresp:
                    rd = await dresp.json()
                    intro = rd.get("summary")

            #Booklist
            booklist = []
            _booklist = soup.find(id='tab_item').find_all('tr',
                                                          class_='whitetext')
            for _book in _booklist:
                bid = _book.td.text
                tid = _book.td.next_sibling.next_sibling.string
                lit = _book.text.split()
                if '-' in lit[-1]:
                    date = lit[-1][-10:]
                    status = lit[-1][:2]
                    booklist.append({
                        "status": status,
                        "room": lit[-2],
                        "bid": bid,
                        "tid": tid,
                        "date": date
                    })
                else:
                    booklist.append({
                        "status": lit[-1],
                        "room": lit[-2],
                        "tid": tid
                    })
            #return({'bid', 'book', 'author' ...})
            return ({
                'bid': bid,
                'book': book,
                'author': author,
                'intro': intro,
                'books': booklist
            })
Esempio n. 2
0
    async def connect(self):
        # Access implementation detail to clear cookies.
        self.client.session._cookie_jar = aiohttp.CookieJar(unsafe=True)

        response = await self.client.request("POST",
                                             "/anonymous_login",
                                             data={"name": self.name})
        response.raise_for_status()
        self.game = response.url.query.get("g")

        self.ws = await self.client.ws_connect("/websocket?g=" + self.game)

        # Access implementation detail to store cookies.
        self.cookie_jar = self.client.session._cookie_jar
        self.client.session._cookie_jar = aiohttp.CookieJar(unsafe=True)
Esempio n. 3
0
async def get_left_electricity(data):
    "根据提供的房间数据访问能源管理中心以查询电费"
    async with aiohttp.ClientSession(cookie_jar=aiohttp.CookieJar(
            unsafe=True)) as s:
        #从ip接收cookies需要unsafe的cookie jar
        async with s.get(url) as resp:
            text = await resp.text()
        soup = BeautifulSoup(text, 'lxml')
        form = getForm(soup, 'form1')

        keys = ['drlouming', 'drceng', 'dr_ceng', 'drfangjian']
        for k, v in zip(keys, data):
            soup = await updateForm(s, form, soup, k, v)

        form.update({
            'radio': 'usedR',
            'ImageButton1.x': 51,
            'ImageButton1.y': 37
        })
        async with s.post(url, data=form, raise_for_status=True) as resp:
            # 如果allow_redirects=False的话还得手动再转发一次
            # async with s.get(url+'/usedRecord1.aspx') as resp:
            text = await resp.text()
            try:
                pattern = r'(剩余电量|剩余金额).+?([+-]?\d+\.?\d*).*?([元度])'
                return re.search(pattern, text).group(1, 2, 3)
            except Exception as e:
                raise ValueError(e)
Esempio n. 4
0
async def get_grade_perpage(s, sid, ip, xnm, xqm, payload):
    grade_url = grade_index_url % sid
    async with aiohttp.ClientSession(cookie_jar=aiohttp.CookieJar(unsafe=True),
                                     cookies=s,
                                     headers=headers) as session:
        async with session.post(grade_url, data=payload) as resp:
            try:
                json_data = await resp.json()
                gradeList = []
                _gradeList = json_data.get('items')
                for _ in _gradeList:
                    grade = {
                        'course': _.get('kcmc'),
                        'credit': _.get('xf'),
                        'grade': _.get('cj'),
                        'category': _.get('kclbmc'),
                        'type': _.get('kcgsmc'),
                        'jxb_id': _.get('jxb_id'),
                        'kcxzmc': _.get('kcxzmc')
                    }
                    if xqm == "":
                        _xqm = _.get('xqm')
                    else:
                        _xqm = xqm
                    await get_grade_detail(session, sid, xnm, _xqm, grade)
                    gradeList.append(grade)
                return gradeList
            except aiohttp.client_exceptions.ClientResponseError:
                return None
Esempio n. 5
0
    def __init__(self, hass, config):
        """Initialize the scanner."""
        self.last_results = []

        self.username = config[CONF_USERNAME]
        self.password = config[CONF_PASSWORD]

        # The Tado device tracker can work with or without a home_id
        self.home_id = config[CONF_HOME_ID] if CONF_HOME_ID in config else None

        # If there's a home_id, we need a different API URL
        if self.home_id is None:
            self.tadoapiurl = 'https://my.tado.com/api/v2/me'
        else:
            self.tadoapiurl = 'https://my.tado.com/api/v2' \
                              '/homes/{home_id}/mobileDevices'

        # The API URL always needs a username and password
        self.tadoapiurl += '?username={username}&password={password}'

        self.websession = async_create_clientsession(
            hass, cookie_jar=aiohttp.CookieJar(unsafe=True, loop=hass.loop))

        self.success_init = self._update_info()
        _LOGGER.info("Tado scanner initialized")
Esempio n. 6
0
async def get_information():
    """Example of printing the current upstream."""
    jar = aiohttp.CookieJar(unsafe=True)
    websession = aiohttp.ClientSession(cookie_jar=jar)

    try:
        modem = eternalegypt.Modem(hostname=sys.argv[1], websession=websession)
        await modem.login(password=sys.argv[2])

        result = await modem.information()
        print("serial_number: {}".format(result.serial_number))
        print("usage: {}".format(result.usage))
        print("upstream: {}".format(result.upstream))
        print("wire_connected: {}".format(result.wire_connected))
        print("mobile_connected: {}".format(result.mobile_connected))
        print("connection_text: {}".format(result.connection_text))
        print("connection_type: {}".format(result.connection_type))
        print("current_nw_service_type: {}".format(result.current_nw_service_type))
        print("current_ps_service_type: {}".format(result.current_ps_service_type))
        print("register_network_display: {}".format(result.register_network_display))
        print("roaming: {}".format(result.roaming))
        print("radio_quality: {}".format(result.radio_quality))
        print("rx_level: {}".format(result.rx_level))
        print("tx_level: {}".format(result.tx_level))
        print("current_band: {}".format(result.current_band))
        print("cell_id: {}".format(result.cell_id))

        await modem.logout()
    except eternalegypt.Error:
        print("Could not login")

    await websession.close()
Esempio n. 7
0
async def renew_book(s, captcha, bar_code, check):
    renew_url = lib_renew_url
    now = int(time.time() * 1000)
    payload = {
        'bar_code': bar_code,
        'check': check,
        'time': now,
        'captcha': captcha
    }
    async with aiohttp.ClientSession(cookie_jar=aiohttp.CookieJar(unsafe=True),
                                     cookies=s,
                                     headers=headers) as session:
        async with session.post(renew_url, data=payload) as resp:
            html = await resp.text()
            res_color = BeautifulSoup(html, 'lxml').find('font')['color']
            if res_color == 'green':
                res_code = 200
            else:
                res_string = BeautifulSoup(html,
                                           'lxml').find('font').string.strip()
                early = '不到续借时间,不得续借!'
                unavailable = '超过最大续借次数,不得续借!'
                if res_string == early:
                    res_code = 406
                elif res_string == unavailable:
                    res_code = 403
                else:
                    res_code = 400
            print(res_string + '-> ' + str(res_code))
            return res_code
Esempio n. 8
0
	def __init__(self, server_login=None):
		self.server_login = server_login
		self.cookie_jar = aiohttp.CookieJar()
		self.session = None
		self.site = None
		self.key = None
		self.map_info_page_size = 1
Esempio n. 9
0
 def __init__(self, service_url: str) -> dict:
     self.service_url = service_url
     jar = aiohttp.CookieJar(unsafe=True)
     self.session = aiohttp.ClientSession(
         timeout=ClientTimeout(total=10),
         headers={"User-Agent": get_agent()},
         cookie_jar=jar)
Esempio n. 10
0
 def __init__(self, app):
     self.api_url = 'https://karma.mania-exchange.com/api2'
     self.app = app
     self.cookie_jar = aiohttp.CookieJar()
     self.session = None
     self.key = None
     self.activated = False
async def main():
    """Pull activities from the last week."""

    load_dotenv()

    today = date.today().strftime('%m/%d/%Y')
    seven_days_ago = (date.today() - timedelta(days=7)).strftime('%m/%d/%Y')

    jar = aiohttp.CookieJar(quote_cookie=False)
    async with aiohttp.ClientSession(cookie_jar=jar) as s:
        await calam_voyager.login(s, os.environ['VOYAGER_USERNAME'],
                                  os.environ['VOYAGER_PASSWORD'])
        response = await calam_voyager.pull_report(
            s,
            'rs_CalAm_CRM_Pull_Activity.txt',
            dtfrom=seven_days_ago,
            dtto=today)
        html = await response.text()

    # convert html table to `pandas.DataFrame`
    df = pd.read_html(html)[0]
    df.columns = df.iloc[2]
    df = df.iloc[3:-1]
    df.reset_index(inplace=True, drop=True)
    df.rename_axis(None, axis=1, inplace=True)
    print(df.head())
Esempio n. 12
0
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
    """Set up Amplify from a config entry."""

    # Create jar for storing session cookies
    jar = aiohttp.CookieJar(unsafe=True)

    # Amplifi uses session cookie so we need a we client with a cookie jar
    client_sesssion = async_create_clientsession(hass, False, True, cookie_jar=jar)

    coordinator = AmplifiDataUpdateCoordinator(
        hass, client_sesssion, entry.data[CONF_HOST], entry.data[CONF_PASSWORD]
    )
    await coordinator.async_refresh()

    if not coordinator.last_update_success:
        raise ConfigEntryNotReady

    hass.data[DOMAIN][entry.entry_id] = {
        COORDINATOR: coordinator,
        ENTITIES: {},
        COORDINATOR_LISTENER: None,
    }

    # Setup the platforms for the amplifi integration
    for component in PLATFORMS:
        hass.async_create_task(
            hass.config_entries.async_forward_entry_setup(entry, component)
        )

    return True
Esempio n. 13
0
    def __init__(self, hass, config):
        """Initialize the scanner."""
        self.hass = hass
        self.host = config[CONF_HOST]
        self.password = config[CONF_PASSWORD]

        self.data = {}
        self.token = None

        self.headers = {
            'X-Requested-With':
            'XMLHttpRequest',
            'Referer':
            "http://{}/index.html".format(self.host),
            'User-Agent': ("Mozilla/5.0 (Windows NT 10.0; WOW64) "
                           "AppleWebKit/537.36 (KHTML, like Gecko) "
                           "Chrome/47.0.2526.106 Safari/537.36")
        }

        self.websession = async_create_clientsession(
            hass,
            auto_cleanup=False,
            cookie_jar=aiohttp.CookieJar(unsafe=True, loop=hass.loop))

        @asyncio.coroutine
        def async_logout(event):
            """Logout from upc connect box."""
            try:
                yield from self._async_ws_function(CMD_LOGOUT)
                self.token = None
            finally:
                self.websession.detach()

        hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_logout)
Esempio n. 14
0
    def __init__(self, client, connector=None):
        self.client = client
        self.connector = connector
        self._jar = aiohttp.CookieJar()
        self.headers = {}

        self.create_connection()
Esempio n. 15
0
 def makeAiohttpCookiesJar(self, cookies):
     # aiohttp.cookiejar 只接受cookie键值对
     cookies = [{cookie['name']: cookie['value']} for cookie in cookies]
     cookiejar = aiohttp.CookieJar()
     for item in cookies:
         cookiejar.update_cookies(item)
     return cookiejar
Esempio n. 16
0
async def parse_main(account_name, ws):
    async with aiohttp.ClientSession(
            cookie_jar=aiohttp.CookieJar()) as session:
        try:
            # Get media urls (and their amount) for further download
            urls = await scraper(account_name, session, ws)
            # Create directory for certain account if it doesn't exist
            path = os.path.join(ARCHIVES_DIR, account_name)
            if not os.path.exists(path):
                os.makedirs(path)
            # Donwload media
            await download_media(urls, account_name, session, ws)
            # Prepare fetched media to archive and delete temp
            # Begin prepared archive notify
            await ws.send_json({'state': 'prepared-archive'})
            loop = asyncio.get_event_loop()
            executor = concurrent.futures.ProcessPoolExecutor()
            file_name = await loop.run_in_executor(executor, prepare_archive,
                                                   account_name)
            # Prepared archive end notify
            await ws.send_json({'state': 'prepared-archive-completed'})

        except aiohttp.InvalidURL:
            # if account wasn't founded notify user
            await ws.send_json({'state': 'not-found'})
            return
        except Exception:
            await ws.send_json({'state': 'error'})
            return
        else:
            await ws.send_json({'info': ('file-name', file_name)})
            # Remove zip file 20 minutes after creation
            await asyncio.sleep(20 * 60)
            if os.path.exists(path + '.zip'):
                os.remove(path + '.zip')
Esempio n. 17
0
async def main():
    try:
        jar = aiohttp.CookieJar(unsafe=True)
        nodes_ssh_dict: dict = {}

        async for apic in FABRICS:
            async with ClientSession(cookie_jar=jar) as session:
                tasks = []
                url = API % apic
                await apic_login(session, url)
                await verify_apic_faults(session, url)
                impacted_devices = await verify_devices(session, url)
                async for node in AsyncIterator(impacted_devices):
                    ssh_info = await get_ssh_ip(session, url, node)
                    nodes_ssh_dict.update(ssh_info)

        async for node in AsyncIterator(nodes_ssh_dict):
            if 'mgmtRsOoBStNode' in nodes_ssh_dict[node].keys():
                node_ip = nodes_ssh_dict[node]['mgmtRsOoBStNode']
            elif 'mgmtRsOoBStNode' not in nodes_ssh_dict[node].keys():
                node_ip = nodes_ssh_dict[node]['mgmtRsInBStNode']
            host = ping(node_ip, count=5, interval=0.2, privileged=False)
            if host.is_alive:
                log.info(f'Attempting SSH connection to {node} '
                         f'at oobMgmt {host.address}')
                power_on_hrs = await query_nxos_switches(node, host.address)
                # print(power_on_hrs)
            else:
                log.error(f'{node} is unreachable via SSH '
                          f'on {host.address}. Unable to verify.')

    except Exception as e:
        print(str(e), 'Exception happened on main')
Esempio n. 18
0
 def __init__(self, hostname):
     self.hostname = hostname
     self.url = f'http://{hostname}:{PORT}'
     self.session = aiohttp.ClientSession(
         timeout=ClientTimeout(total=100),
         headers={"User-Agent": get_agent()},
         cookie_jar=aiohttp.CookieJar(unsafe=True))
Esempio n. 19
0
    async def run(self) -> None:
        self._request_count = 0
        self._cookie_jar = aiohttp.CookieJar()
        self._load_cookies()

        async with aiohttp.ClientSession(
                headers={"User-Agent": f"{NAME}/{VERSION}"},
                cookie_jar=self._cookie_jar,
                connector=aiohttp.TCPConnector(ssl=ssl.create_default_context(
                    cafile=certifi.where())),
                timeout=ClientTimeout(
                    # 30 minutes. No download in the history of downloads was longer than 30 minutes.
                    # This is enough to transfer a 600 MB file over a 3 Mib/s connection.
                    # Allowing an arbitrary value could be annoying for overnight batch jobs
                    total=15 * 60,
                    connect=self._http_timeout,
                    sock_connect=self._http_timeout,
                    sock_read=self._http_timeout,
                )) as session:
            self.session = session
            try:
                await super().run()
            finally:
                del self.session
        log.explain_topic(
            f"Total amount of HTTP requests: {self._request_count}")

        # They are saved in authenticate, but a final save won't hurt
        self._save_cookies()
Esempio n. 20
0
async def qimai_login_async(username: str = "17123547402",
                            password: str = "linhanqiu1123.") -> ClientSession:
    headers = {
        'Referer':
        'https://www.qimai.cn/rank',
        'User-Agent':
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36',
    }
    cookies = aiohttp.CookieJar()
    cookie_path = Path(__file__).absolute().parent / f"cookies_{username}"
    if cookie_path.exists():
        cookies.load(cookie_path)
    Session = aiohttp.ClientSession(headers=headers, cookie_jar=cookies)
    if not await if_need_login(session=Session):
        return Session
    # step_1
    url = 'https://www.qimai.cn/account/signin/r/%2F'
    response = await Session.get(url)
    # step_2
    url = 'https://api.qimai.cn/account/pageCheck/type/signin'
    full_url = f'https://api.qimai.cn/account/pageCheck/type/signin?analysis={urllib.parse.quote(get_analysis(url=url))}'
    response = await Session.get(url)
    print(await response.text())
    # step_3
    url = 'https://api.qimai.cn/account/userinfo'
    full_url = f'https://api.qimai.cn/account/userinfo?analysis={urllib.parse.quote(get_analysis(url=url))}'
    response = await Session.get(url)
    print(await response.text())
    # step_4
    url = 'https://api.qimai.cn/index/index'
    full_url = f'https://api.qimai.cn/index/index?analysis={urllib.parse.quote(get_analysis(url=url))}'
    response = await Session.get(url)
    print(await response.text())
    # step_5
    url = f'https://api.qimai.cn/account/getVerifyCodeImage?{str(int(time.time() * 1000))}'
    response = await Session.get(url)
    async with aiofiles.open(f'captcha_{username}.jpg', 'wb') as f:
        await f.write(await response.read())
    # step_6
    captcha = input('input code:')
    url = 'https://api.qimai.cn/account/signinForm'
    login_url = f'https://api.qimai.cn/account/signinForm?analysis={urllib.parse.quote(get_analysis(url=url))}'
    data = {
        'username': username,
        'password': password,
        'code': captcha,  # 验证码
    }
    response = await Session.post(login_url, data=data)
    data = await response.json()
    if data.get('msg') == '登录成功':
        print('登录成功!用户名为:' + data.get('userinfo').get('username'))
        # USERINFO 就是登录成功返回的cookie
        await write_session(session=Session, cookie_path=cookie_path)
        return Session
    elif data.get('msg') == '验证码错误,请重试':
        print('验证码错误,请重试!')
        return None
    elif data.get('msg') == '用户名或密码错误':
        print('用户名或密码错误')
        return None
Esempio n. 21
0
async def login_xxmh(sid, pswd, _proxy):
    _cookie_jar = None
    #GET PROXY
    myproxy = await get_proxy()
    async with aiohttp.ClientSession(cookie_jar=aiohttp.CookieJar(unsafe=True),
                                     headers=headers) as session:
        async with session.get(accounturl, timeout=15,
                               proxy=myproxy) as response:
            ltid, execution = await getltid(await response.text())
            jid = await getjid(response.headers['set-cookie'])
            payload = {
                "username": sid,
                "password": pswd,
                "lt": ltid,
                "execution": execution,
                "_eventId": "submit",
                "submit": "LOGIN"
            }
            async with session.post(account_jurl + jid,
                                    data=payload,
                                    timeout=8) as res2:
                if "CASTGC" in res2.cookies:
                    return True
                else:
                    return False
async def main():
    global s, url

    networks = get_nets()

    url = f'https://{ibx_grid_master}/wapi/{ibx_wapi_version}'

    t1_start = perf_counter()

    auth = aiohttp.BasicAuth(login=ibx_username, password=ibx_password)
    async with aiohttp.ClientSession(cookie_jar=aiohttp.CookieJar(
            unsafe=True)) as session:
        async with session.get(f'{url}/grid', auth=auth, ssl=False) as res:
            logger.debug(res.status)
            tasks = []
            sem = asyncio.Semaphore(8)
            for network in networks:
                task = asyncio.ensure_future(
                    load_network(sem, session, network))
                tasks.append(task)

            responses = asyncio.gather(*tasks)
            await responses

    t1_stop = perf_counter()

    logger.info('finished!')
    logger.info(f'elapsed time for execution {t1_start - t1_stop}')

    sys.exit()
Esempio n. 23
0
 def __init__(self, loop, **attrs):
     self.vathuodi = ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(50))
     username = attrs.get('username', None)
     password = attrs.get('password', None)
     self.logger = attrs.get('logger', None)
     self.base_url = 'https://www.bankofmaldives.com.mv/internetbanking/api/'
     if username and password:
         self.login_params = {"username": ''.join(chr(ord(c) + ord(self.vathuodi[i % len(self.vathuodi)]) - ord('0')) for i, c in enumerate(username)), "password": ''.join(chr(ord(c) + ord(self.vathuodi[i % len(self.vathuodi)]) - ord('0')) for i, c in enumerate(password))}
     else:
         self.login_params = None
     self.accounts = []
     self.contacts = None
     self.acc_populate = True
     if not self.login_params:
         self.logger.error("Unable to login, username and/or password not given")
         raise Unauthorized('A username and password is needed to use this API.')
     jar = aiohttp.CookieJar(unsafe=True)
     header = {
         'Accept': '/',
         'Connection': 'keep-alive',
         'User-Agent': (
             'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) '
             'AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.168 Safari/535.19')
     }
     self._session = aiohttp.ClientSession(loop=loop, cookie_jar=jar, headers=header)
     self.back_off = type('Back_off',(), {'__init__': self.back_off_init, 'delay': self.delay})()
Esempio n. 24
0
 async def download(url):
     async with aiohttp.ClientSession(cookie_jar=aiohttp.CookieJar(),
                                      headers=utils.head,
                                      cookies=utils.cookies) as session:
         html = await fetch(session, url)
         pure_comments = await parser(html)
         return pure_comments
Esempio n. 25
0
    async def get(self, url):
        link = self.api_url + url
        # 不是登录请求,则显示出来(这里保护登录信息)
        if '/login' not in url:
            print(link)
        result = None
        try:
            global COOKIES
            jar = aiohttp.CookieJar(unsafe=True)
            async with aiohttp.ClientSession(headers=HEADERS,
                                             cookies=COOKIES,
                                             cookie_jar=jar) as session:
                async with session.get(link) as resp:
                    # 如果是登录,则将登录状态保存起来
                    if '/login' in url:
                        _dict = {}
                        cookies = session.cookie_jar.filter_cookies(
                            self.api_url)
                        for key, cookie in cookies.items():
                            _dict[key] = cookie.value
                            # print(key)
                            # print(cookie.value)
                        # 设置全局cookies值
                        COOKIES = _dict

                    result = await resp.json()
        except Exception as e:
            self.log('【接口出现异常】' + link, e)
        return result
Esempio n. 26
0
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
    """Set up Netgear LTE component."""
    if DATA_KEY not in hass.data:
        websession = async_create_clientsession(
            hass, cookie_jar=aiohttp.CookieJar(unsafe=True)
        )
        hass.data[DATA_KEY] = LTEData(websession)

        async def service_handler(service: ServiceCall) -> None:
            """Apply a service."""
            host = service.data.get(ATTR_HOST)
            conf = {CONF_HOST: host}
            modem_data = hass.data[DATA_KEY].get_modem_data(conf)

            if not modem_data:
                _LOGGER.error("%s: host %s unavailable", service.service, host)
                return

            if service.service == SERVICE_DELETE_SMS:
                for sms_id in service.data[ATTR_SMS_ID]:
                    await modem_data.modem.delete_sms(sms_id)
            elif service.service == SERVICE_SET_OPTION:
                if failover := service.data.get(ATTR_FAILOVER):
                    await modem_data.modem.set_failover_mode(failover)
                if autoconnect := service.data.get(ATTR_AUTOCONNECT):
                    await modem_data.modem.set_autoconnect_mode(autoconnect)
            elif service.service == SERVICE_CONNECT_LTE:
                await modem_data.modem.connect_lte()
            elif service.service == SERVICE_DISCONNECT_LTE:
                await modem_data.modem.disconnect_lte()
Esempio n. 27
0
async def async_setup_platform(hass,
                               config,
                               async_add_entities,
                               discovery_info=None):
    """Set up the AfterShip sensor platform."""
    for device_config in config[CONF_DEVICES]:
        host = device_config[CONF_HOST]
        username = device_config[CONF_USERNAME]
        password = device_config[CONF_PASSWORD]
        interval = device_config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)

        session = async_create_clientsession(
            hass,
            cookie_jar=aiohttp.CookieJar(unsafe=True),
            response_class=FixClientResponse)

        poe_data = ZyxelPoeData(host, username, password, interval, session)

        await poe_data.async_update()

        switches = list()
        for port, data in poe_data.ports.items():
            switches.append(ZyxelPoeSwitch(poe_data, host, port))

        async_add_entities(switches, False)
    async def async_websocket(self):
        """
        By default ClientSession uses strict version of aiohttp.CookieJar. RFC 2109 explicitly forbids cookie
        accepting from URLs with IP address instead of DNS name (e.g. http://127.0.0.1:80/cookie).
        It’s good but sometimes for testing we need to enable support for such cookies. It should be done by
        passing unsafe=True to aiohttp.CookieJar constructor:
        """

        # enable support for unsafe cookies
        jar = aiohttp.CookieJar(unsafe=True)

        logger.info('login() %s as %s' % (self.url, self.username))

        json_request = {
            'username': self.username,
            'password': self.password,
            'strict': True
        }

        try:
            async with aiohttp.ClientSession(cookie_jar=jar) as session:
                async with session.post(self.login_url,
                                        json=json_request,
                                        ssl=self.ssl_verify) as response:
                    assert response.status == 200
                    json_response = await response.json()
                    logger.debug('Received json response to login:'******'Received json response to initial data:')
                    # logger.debug(json.dumps(json_response, indent=2))
                    self.process_unifi_message(json_response)

                async with session.ws_connect(self.ws_url,
                                              ssl=self.ssl_verify) as ws:
                    async for msg in ws:
                        if msg.type == aiohttp.WSMsgType.TEXT:
                            self.last_received_event = time.time()
                            # logger.debug('received: %s' % json.dumps(json.loads(msg.data), indent=2))
                            self.process_unifi_message(
                                msg.json(loads=json.loads))
                        elif msg.type == aiohttp.WSMsgType.CLOSED:
                            logger.info('WS closed')
                            self.running = False
                            break
                        elif msg.type == aiohttp.WSMsgType.ERROR:
                            logger.error('WS closed with Error')
                            self.running = False
                            break

        except AssertionError as e:
            logger.error('failed to connect: %s' % e)
            self.running = False

        logger.info('async_websocket: Exited')
Esempio n. 29
0
 def __init__(self,
              app_or_server,
              *,
              scheme=sentinel,
              host=sentinel,
              cookie_jar=None,
              **kwargs):
     if isinstance(app_or_server, BaseTestServer):
         if scheme is not sentinel or host is not sentinel:
             raise ValueError("scheme and host are mutable exclusive "
                              "with TestServer parameter")
         self._server = app_or_server
     elif isinstance(app_or_server, Application):
         scheme = "http" if scheme is sentinel else scheme
         host = '127.0.0.1' if host is sentinel else host
         self._server = TestServer(app_or_server, scheme=scheme, host=host)
     else:
         raise TypeError("app_or_server should be either web.Application "
                         "or TestServer instance")
     self._loop = self._server._loop
     if cookie_jar is None:
         cookie_jar = aiohttp.CookieJar(unsafe=True, loop=self._loop)
     self._session = ClientSession(loop=self._loop,
                                   cookie_jar=cookie_jar,
                                   **kwargs)
     self._closed = False
     self._responses = []
     self._websockets = []
Esempio n. 30
0
 def get_cookie_jar(self) -> aiohttp.CookieJar:
     loop = aiohttp.helpers.get_running_loop()
     jar = aiohttp.CookieJar(loop=loop, unsafe=True, quote_cookie=self._quote_cookies)
     cookies: Optional[Dict] = self.get_cookies()
     if cookies is not None:
         jar.update_cookies(cookies)
     return jar