def _get_session_from_console(self,
                               console_page_response,
                               csrf_token_data,
                               extra_cookies=None):
     soup = Bfs(console_page_response.text, features='html.parser')
     try:
         csrf_token = soup.find(csrf_token_data.entity_type,
                                csrf_token_data.attributes).attrs.get(
                                    csrf_token_data.attribute_value)
     except AttributeError:
         raise ValueError('Response received: %s' %
                          console_page_response.text)
     session = requests.Session()
     cookies_to_filter = self._standard_cookies + extra_cookies if extra_cookies else []
     cookies = self._filter_cookies(self._session.cookies,
                                    cookies_to_filter)
     session.headers.update(self._default_headers)
     session.headers.update({
         'Cookie':
         self._header_cookie_from_cookies(cookies),
         csrf_token_data.headers_name:
         csrf_token
     })
     for cookie in cookies:
         session.cookies.set_cookie(cookie)
     return session
Ejemplo n.º 2
0
 def _get_tokens(response):
     soup = Bfs(response.text, "html.parser")
     form = soup.find('form', {'id': 'custom-geniki-tracking-page-form'})
     form_build_id = form.find('input',
                               {'name': 'form_build_id'}).attrs.get('value')
     index = response.text.find('theme_token')
     theme_token = response.text[index:index + 80].split('"')[1]
     return form_build_id, theme_token
Ejemplo n.º 3
0
 def _get_required_form(self, response, action):
     soup = Bfs(response.text, 'html.parser')
     form = next((form for form in soup.findAll('form')
                  if form.get('action', '') == action), None)
     if not form:
         self._logger.debug('Response : {}'.format(response.text))
         raise NoExpectedFormOption(response.text)
     return form
Ejemplo n.º 4
0
 def _submit_email(self, payload):
     url = '{login_url}/signin/v1/lookup'.format(login_url=self._login_url)
     payload['Email'] = self.email
     response = self._session.post(url, data=payload)
     if INVALID_EMAIL_MESSAGE in response.text:
         raise InvalidUser(self.email)
     soup = Bfs(response.text, 'html.parser')
     form = soup.find('form')
     return self._get_hidden_form_fields(form)
Ejemplo n.º 5
0
 def _parse_xsrf_token(response):
     soup = Bfs(response.text, features='html.parser')
     xsrf = soup.find('span', id='_xsrfToken')
     if not xsrf:
         if LOGIN_ERROR_TEXT in response.text:
             raise AuthenticationExpired()
         raise ResponseError(
             f'Unable to get xsrf token from page, response was :{response.text}'
         )
     return xsrf.string.strip()
Ejemplo n.º 6
0
    def _handle_prompt(self, response):
        # Borrowed with slight modification from https://git.io/vxu1A
        soup = Bfs(response.text, 'html.parser')
        challenge_url = response.url.split("?")[0]
        try:
            data_key = soup.find('div', {
                'data-api-key': True
            }).get('data-api-key')
            data_tx_id = soup.find('div', {
                'data-tx-id': True
            }).get('data-tx-id')
        except AttributeError:
            message = 'Unexpected response received :{}'.format(response.text)
            raise Unexpected2FAResponse(message)
        await_url = ('https://content.googleapis.com/cryptauth/v1/'
                     'authzen/awaittx?alt=json&key={}').format(data_key)
        await_body = {'txId': data_tx_id}

        print(
            "Open the Google App, and tap 'Yes' on the prompt to sign in ...")

        self._session.headers['Referer'] = response.url
        response = self._session.post(await_url, json=await_body)
        parsed = json.loads(response.text)

        payload = {
            'challengeId':
            soup.find('input', {
                'name': 'challengeId'
            }).get('value'),
            'challengeType':
            soup.find('input', {
                'name': 'challengeType'
            }).get('value'),
            'TL':
            soup.find('input', {
                'name': 'TL'
            }).get('value'),
            'gxf':
            soup.find('input', {
                'name': 'gxf'
            }).get('value'),
            'token':
            parsed['txToken'],
            'action':
            soup.find('input', {
                'name': 'action'
            }).get('value'),
            'TrustDevice':
            'on',
        }
        response = self._session.post(challenge_url, data=payload)
        response.raise_for_status()
        return response
Ejemplo n.º 7
0
    def _get_table(self, section_attr):
        """
        Gets according section tag

        This is used for teams and matches
        :param section_attr: name of the section id attribute
        :return: BFS object
        """
        if not self._soup:
            competition_page = self.session.get(self.url)
            self._soup = Bfs(competition_page.text, "html.parser")
        return self._soup.find('section', {'id': '{}'.format(section_attr)})
Ejemplo n.º 8
0
    def _handle_prompt(self, resp):
        # Borrowed with slight modification from https://git.io/vxu1A
        response_page = Bfs(resp.text, 'html.parser')
        challenge_url = resp.url.split("?")[0]

        data_key = response_page.find('div', {
            'data-api-key': True
        }).get('data-api-key')
        data_tx_id = response_page.find('div', {
            'data-tx-id': True
        }).get('data-tx-id')

        await_url = "https://content.googleapis.com/cryptauth/v1/authzen/awaittx?alt=json&key=%s" % data_key
        await_body = {'txId': data_tx_id}

        print(
            "Open the Google App, and tap 'Yes' on the prompt to sign in ...")

        self._session.headers['Referer'] = resp.url
        response = self._session.post(await_url, json=await_body)
        parsed = json.loads(response.text)

        payload = {
            'challengeId':
            response_page.find('input', {
                'name': 'challengeId'
            }).get('value'),
            'challengeType':
            response_page.find('input', {
                'name': 'challengeType'
            }).get('value'),
            'TL':
            response_page.find('input', {
                'name': 'TL'
            }).get('value'),
            'gxf':
            response_page.find('input', {
                'name': 'gxf'
            }).get('value'),
            'token':
            parsed['txToken'],
            'action':
            response_page.find('input', {
                'name': 'action'
            }).get('value'),
            'TrustDevice':
            'on',
        }

        resp = self._session.post(challenge_url, data=payload)
        resp.raise_for_status()
        return resp
Ejemplo n.º 9
0
 def market_place_id(self):
     """Marker place id of account."""
     if self._marketplace_id is None:
         url = 'https://console.aws.amazon.com/billing/home?'
         response = self.session.get(url)
         if response.status_code == 401:
             raise InvalidCredentials
         if not response.ok:
             self.logger.error(f'Could not retrieve market place id, response: {response.text}')
             raise ServerError
         soup = Bfs(response.text, features="html.parser")
         self._marketplace_id = soup.find('input', {'id': 'marketPlace'}).attrs.get('value')
     return self._marketplace_id
Ejemplo n.º 10
0
 def directories(self):
     """Return list of directories."""
     url = f'https://{self._admin_host}/admin/people/directories'
     response = self.session.get(url)
     if not response.ok:
         self._logger.error(response.text)
         return []
     soup = Bfs(response.text, features='html.parser')
     xsrf = self._parse_xsrf_token(response)
     return [
         ActiveDirectory(self, data, xsrf)
         for data in soup.find_all('p', {'class': 'active_directory'})
     ]
Ejemplo n.º 11
0
    def __league_page(self):
        """
        Gets Footy.eu competitions page and scrapes its HTML

        :return: footy front page as BFS object
        """
        if not self._front_page:
            page = self.session.get(self._site)
            try:
                self._front_page = Bfs(page.text, 'html.parser')
            except Bfs.HTMLParser.HTMLParseError:
                self.logger.exception("Error while parsing Footy front page")
        return self._front_page
Ejemplo n.º 12
0
    def _get_csrf_token(html_page):
        """
        Gets the CRSF value from the HTML login page
        Args:
            html_page: HTML page

        Returns: string

        """
        soup = Bfs(html_page, 'html.parser')
        csrf_token = soup.find('meta', {
            'name': 'csrf-token'
        }).attrs.get('content')
        return csrf_token
 def _initialize(self):
     url = '{login_url}/ServiceLogin'.format(login_url=self._login_url)
     response = self._session.get(url, verify=not DEBUG)
     soap = Bfs(response.text, 'html.parser')
     try:
         data = soap.find('div', {'data-initial-sign-in-data': True})\
             .get('data-initial-sign-in-data').replace('%.@.', '[')
         self.req_id = json.loads(data)[30]
     except (TypeError, KeyError):
         self._logger.exception('Unable to parse response :%s', response.text)
         raise InvalidData
     except (ValueError, IndexError):
         self._logger.exception('Unable to parse response :%s', data)
         raise InvalidData
Ejemplo n.º 14
0
    def stdout(self):
        """The stdout of the job execution.

        Returns:
            basestring: The stdout of the job execution.

        """
        url = f"{self._tower.host}{self._data.get('related', {}).get('stdout')}"
        response = self._tower.session.get(url)
        soup = Bfs(response.text, 'html.parser')
        # get stdout div tag
        div = soup.find('div', {'class': 'nocode ansi_fore ansi_back'})
        # remove style tag
        div.find('style').extract()
        return div.text
Ejemplo n.º 15
0
    def stdout(self):
        """The stdout of the project update.

        Returns:
            basestring: The stdout of the project update.

        """
        stdout_url = self._data.get('related', {}).get('stdout')
        url = '{host}{url}'.format(host=self._tower.host, url=stdout_url)
        response = self._tower.session.get(url)
        soup = Bfs(response.text, 'html.parser')
        # get stdout div tag
        div = soup.find('div', {'class': 'nocode ansi_fore ansi_back'})
        # remove style tag
        div.find('style').extract()
        return div.text
 def _get_response(self,
                   url,
                   params=None,
                   extra_cookies=None,
                   headers=None):
     extra_cookies = extra_cookies or []
     headers = headers or {}
     cookies_to_filter = self._standard_cookies + extra_cookies
     headers.update(self._default_headers)
     cookies = self._filter_cookies(self._session.cookies,
                                    cookies_to_filter)
     headers['Cookie'] = self._header_cookie_from_cookies(cookies)
     arguments = {
         'url': url,
         'headers': headers,
         'cookies': self._cookies_to_dict(cookies),
         'allow_redirects': False
     }
     if params:
         arguments.update({'params': params})
     self.logger.debug('Getting url :%s with arguments : %s', url,
                       arguments)
     response = requests.get(**arguments)
     if not response.ok:
         try:
             error_response = Bfs(response.text, features='html.parser')
             error_title = error_response.title.string.strip()
             err_msg = error_response.find('div', {
                 'id': 'content'
             }).find('p').string
         except AttributeError:
             raise ValueError('Response received: %s' % response.text)
         if all([
                 response.status_code == 400,
                 error_title == 'Credentials expired'
         ]):
             raise ExpiredCredentials(response.status_code, err_msg)
         raise ValueError('Response received: %s' % response.text)
     self._debug_response(response, cookies)
     self._session.cookies.update(response.cookies)
     return response
Ejemplo n.º 17
0
    def get_aws_provisioning_data(self, application_id):
        """Gets the provisioning data for okta aws application.

        Args:
            application_id: Okta application id

        Returns:
            account_ids: Account ids associated with the application
            xsrfToken: xsrf token associated with the application

        """
        url = f'{self._admin_aws_application_url}/{application_id}/settings/user-mgmt'
        response = self.session.get(url)
        soup = Bfs(response.text, 'html.parser')
        user_mgmt_account_ids = soup.find(
            'input', {'id': 'userMgmtSettings.accountsIds'})
        account_ids = user_mgmt_account_ids.get(
            'value', '') if user_mgmt_account_ids else ''
        user_mgmt_xsrftoken = soup.find('input', {'name': '_xsrfToken'})
        xsrftoken = user_mgmt_xsrftoken.get('value',
                                            '') if user_mgmt_xsrftoken else ''
        return account_ids, xsrftoken
Ejemplo n.º 18
0
 def _initialize(self):
     url = '{login_url}/ServiceLogin'.format(login_url=self._login_url)
     response = self._session.get(url)
     soup = Bfs(response.text, 'html.parser')
     form = soup.find('form')
     return self._get_hidden_form_fields(form)
Ejemplo n.º 19
0
    def search(self, tracking_number):
        """Searches for packages.

        Args:
            tracking_number: The tracking number to look for.

        Returns:
            result (TrackingState): The tracking state of the package.

        """
        form_build_id, theme_token = self._initialize()
        url = '{base}/system/ajax'.format(base=self._base_url)
        headers = self._headers.copy()
        headers.update({'Host': 'www.taxydromiki.com',
                        'Accept': ('application/json, text/javascript, */*; '
                                   'q=0.01'),
                        'Content-Type': 'application/x-www-form-urlencoded',
                        'X-Requested-With': 'XMLHttpRequest',
                        'Referer': '{base}/track'.format(base=self._base_url),
                        'Origin': self._base_url})
        css = 'ajax_page_state[css]'
        js = 'ajax_page_state[js]'  # pylint: disable=invalid-name
        payload = {'_triggering_element_name': 'op',
                   '_triggering_element_value': u'Αναζήτηση',
                   'ajax_html_ids[]': [
                       'mp-pusher',
                       'mp-menu',
                       'block-custom-custom-time-date',
                       'block-custom-custom_time_date-ajax-content',
                       'block-custom-custom-account-menu',
                       'block-lang-dropdown-language-content',
                       'lang_dropdown_form_language_content',
                       'lang-dropdown-select-language_content',
                       'trigger',
                       'block-system-main-menu',
                       'block-custom-geniki-tracking-form',
                       'custom-geniki-tracking-form',
                       'edit-tracking-number--2',
                       'edit-submit--3',
                       'block-block-10',
                       'main-content',
                       'custom-geniki-tracking-page-form',
                       'edit-tracking-searchbox',
                       'edit-tracking-number',
                       'edit-submit',
                       'tracking-result',
                       'block-menu-block-1',
                       'block-user-login',
                       'user-login-form',
                       'edit-name',
                       'edit-pass',
                       'edit-actions',
                       'edit-submit--4',
                       'block-block-3',
                       'block-block-4',
                       'block-block-9',
                       'block-block-2',
                       'block-menu-menu-footer-menu',
                       'block-menu-menu-footer-second',
                       'block-block-8'],
                   f'{css}[modules/node/node.css]': '1',
                   f'{css}[sites/all/modules/ckeditor/css/ckeditor.css]': '1',
                   f'{css}[sites/all/modules/ctools/css/ctools.css]': '1',
                   f'{css}[sites/all/modules/date/date_api/date.css]': '1',
                   f'{css}[sites/all/modules/date/date_popup/themes/datepicker.1.7.css]': '1',
                   f'{css}[sites/all/modules/lang_dropdown/lang_dropdown.css]': '1',
                   f'{css}[sites/all/modules/logintoboggan/logintoboggan.css]': '1',
                   f'{css}[sites/all/modules/views/css/views.css]': '1',
                   f'{css}[sites/all/themes/omega/omega/omega/css/modules/field/field.theme.css]': '1',
                   f'{css}[sites/all/themes/omega/omega/omega/css/modules/search/search.theme.css]': '1',
                   f'{css}[sites/all/themes/omega/omega/omega/css/modules/system/system.base.css]': '1',
                   f'{css}[sites/all/themes/omega/omega/omega/css/modules/system/system.menus.theme.css]': '1',
                   f'{css}[sites/all/themes/omega/omega/omega/css/modules/system/system.messages.theme.css]': '1',
                   f'{css}[sites/all/themes/omega/omega/omega/css/modules/system/system.theme.css]': '1',
                   f'{css}[sites/all/themes/omega/omega/omega/css/modules/user/user.base.css]': '1',
                   f'{css}[sites/all/themes/omega/omega/omega/css/modules/user/user.theme.css]': '1',
                   f'{css}[sites/all/themes/taxydromiki/css/after.css]': '1',
                   f'{css}[sites/all/themes/taxydromiki/css/taxydromiki.hacks.css]': '1',
                   f'{css}[sites/all/themes/taxydromiki/css/taxydromiki.no-query.css]': '1',
                   f'{css}[sites/all/themes/taxydromiki/css/taxydromiki.normalize.css]': '1',
                   f'{css}[sites/all/themes/taxydromiki/css/taxydromiki.print.css]': '1',
                   f'{css}[sites/all/themes/taxydromiki/css/taxydromiki.styles.css]': '1',
                   f'{js}[0]': '1',
                   f'{js}[misc/ajax.js]': '1',
                   f'{js}[misc/drupal.js]': '1',
                   f'{js}[misc/form.js]': '1',
                   f'{js}[misc/jquery.cookie.js]': '1',
                   f'{js}[misc/jquery.js]': '1',
                   f'{js}[misc/jquery.once.js]': '1',
                   f'{js}[misc/progress.js]': '1',
                   f'{js}[public://languages/el_j43hC-GL4y98fNlJzVRM9SL0SKGIvZ5zF9vHNPqFpS8.js]': '1',
                   f'{js}[sites/all/modules/ajaxblocks/ajaxblocks.js]': '1',
                   f'{js}[sites/all/modules/browserclass/browserclass.js]': '1',
                   f'{js}[sites/all/modules/clientside_validation/clientside_validation.ie8.js]': '1',
                   f'{js}[sites/all/modules/clientside_validation/clientside_validation.js]': '1',
                   f'{js}[sites/all/modules/clientside_validation/jquery-validate/jquery.validate.min.js]': '1',
                   f'{js}[sites/all/modules/clientside_validation/jquery.form.js]': '1',
                   f'{js}[sites/all/modules/custom/custom_geniki/custom_geniki.tracking.js]': '1',
                   f'{js}[sites/all/modules/google_analytics/googleanalytics.js]': '1',
                   f'{js}[sites/all/modules/lang_dropdown/lang_dropdown.js]': '1',
                   f'{js}[sites/all/themes/omega/omega/omega/js/no-js.js]': '1',
                   f'{js}[sites/all/themes/taxydromiki/js/classie.js]': '1',
                   f'{js}[sites/all/themes/taxydromiki/js/jquery.hoverIntent.js]': '1',
                   f'{js}[sites/all/themes/taxydromiki/js/jquery.uniform.js]': '1',
                   f'{js}[sites/all/themes/taxydromiki/js/jquery.wookmark.js]': '1',
                   f'{js}[sites/all/themes/taxydromiki/js/mlpushmenu.js]': '1',
                   f'{js}[sites/all/themes/taxydromiki/js/modernizr.custom.js]': '1',
                   f'{js}[sites/all/themes/taxydromiki/js/taxydromiki.behaviors.js]': '1',
                   f'{js}[sites/all/themes/taxydromiki/js/taxydromiki.responsive.js]': '1',
                   f'{js}[sites/all/themes/taxydromiki/libraries/html5shiv/html5shiv-printshiv.min.js]': '1',
                   f'{js}[sites/all/themes/taxydromiki/libraries/html5shiv/html5shiv.min.js]': '1',
                   'ajax_page_state[theme]': 'taxydromiki',
                   'ajax_page_state[theme_token]': theme_token,
                   'form_build_id': form_build_id,
                   'form_id': 'custom_geniki_tracking_page_form',
                   'tracking_number': tracking_number}
        response = self._session.post(url, headers=headers, data=payload)
        response_data = next((entry for entry in response.json()
                              if entry.get('command') == 'insert'), None)
        soup = Bfs(response_data.get('data'), 'html.parser')
        entries = soup.find_all('div', {'class': 'tracking-checkpoint'})
        return [TrackingState(entry) for entry in entries]