Пример #1
0
 def list_child_projects(self, project):
     """List the direct child projects of a project."""
     url = self.gerrit.url('LIST_CHILD_PROJECTS', project_name=project)
     r = Request(method='GET', url=url, auth=self.gerrit.auth)
     return self.gerrit.dispatch(r)
Пример #2
0
    def perform_request(self, nest_request, verbose_errors=True):

        url = self._make_nest_api_url(nest_request)
        data_payload = None
        http_code = None
        exception = None
        headers = None

        num_tries = nest_request.get_num_tries()
        no_success = True
        while num_tries > 0 and no_success:

            #if not our first try, wait for the delay period
            if num_tries != nest_request.get_num_tries():
                time.sleep(nest_request.get_retry_delay_secs())

            num_tries = num_tries - 1
            try:
                headers = nest_request.get_headers()
                # set the optional auth_token
                if self.auth_token is not None:
                    headers = headers.copy()  # don't modify caller's object
                    headers['Authorization'] = 'Bearer ' + self.auth_token

                ##see http://docs.python-requests.org/en/latest/user/advanced/#request-and-response-objects
                request = Request(
                    nest_request.get_http_op(),
                    url,
                    data=nest_request.get_data_payload(),
                    files=nest_request.get_files_payload(),
                    headers=headers,
                    params=nest_request.get_query_params(),
                )
                prepped_request = self.session.prepare_request(request)
                op = nest_request.get_http_op()
                full_url = prepped_request.url
                if VERBOSE:
                    log(op + ': ' + str(full_url))
                    log('request body: ')
                    log(str(nest_request.get_data_payload()))
                resp = self.session.send(
                    prepped_request,
                    timeout=nest_request.get_timeout_secs(),
                    allow_redirects=True,
                    verify=False)
                data_payload = resp.text
                if VERBOSE:
                    log('response code: ' + str(resp.status_code))
                    if resp.headers[
                            'Content-Type'] == 'text/html; charset=utf-8':
                        log('response body:  <<supressing html response>>')
                    else:
                        log('reponse body: ' + resp.text)
                http_code = resp.status_code
                headers = resp.headers
            except Timeout as te:
                exception = te
            except ConnectionError as te:
                exception = te
            except Exception as e:
                traceback.print_exc()
                exception = e

            nest_response = NestHttpResponse(nest_request,
                                             http_code=http_code,
                                             exception=exception,
                                             data_payload=data_payload,
                                             headers=headers)

            if nest_response.did_succeed():
                no_success = False
            else:
                if verbose_errors:
                    log(nest_response.get_error_message())
                    log("num tries remaining: " + str(num_tries))

        return nest_response
Пример #3
0
 def test_encode_request(self):
     r = Request('GET', "http://www.example.com/lala")
     rs = self.encoder.encode(r)
     self.assertIn(r.method, rs)
     self.assertIn(r.url, rs)
Пример #4
0
    def extract_page(self, parameters):
        from requests import Request, Session
        from random import randint
        from time import sleep

        # Check if we have an url and filename
        if 'url' not in parameters:
            return {
                'error': True,
                'error_message': 'No URL in the parameters',
            }

        if 'filename' not in parameters:
            return {
                'error': True,
                'error_message': 'No FILENAME in the parameters',
            }

        # print('   Url: ' + parameters['url'])  # TODO Test output to debug the code

        if 'requestType' not in parameters:
            parameters['requestType'] = 'GET'

        # Set the request Headers
        headers = parameters['headers'] if 'headers' in parameters else None

        # GET Parameters
        params = parameters['params'] if 'params' in parameters else None

        # Request Body for 'x-url-encoded' parameters
        data = parameters['data'] if 'data' in parameters else None

        # Set the request Cookies
        cookies = parameters['cookies'] if 'cookies' in parameters else None

        # Set the request PoolManager
        proxy = parameters['proxy'] if 'proxy' in parameters else None

        # Set the max retries number
        max_retries = parameters['retries'] if 'retries' in parameters else 1
        retries_wait_range = parameters['retries_wait_range'] if 'retries_wait_range' in parameters else [3, 5]
        current_retry = 1
        html_source = None

        # Prepare the desired Request and make the download
        session = Session()

        # html_source = requests.request('GET', url, headers=headers, proxies=self.proxies, cookies=self.cookieJar)
        desired_request = Request(parameters['requestType'], parameters['url'], headers=headers, params=params,
                                  data=data, cookies=cookies)

        prepared_request = desired_request.prepare()

        error = {
            'error': True,
            'error_message': 'First request',
        }

        while error['error'] and current_retry <= max_retries:
            html_source = session.send(prepared_request, proxies=proxy)

            # Check for error after the data download
            error = self.extraction_error_check(html_source)
            if error['error']:
                if current_retry == max_retries:
                    filename = 'error-' + parameters['filename']
                else:
                    sleep(randint(retries_wait_range[0], retries_wait_range[1]))
                    filename = 'retry-' + str(current_retry) + '-' + parameters['filename']
            else:
                filename = parameters['filename']

            # Save the downloaded data into the HDD
            self.save_source_to_file(html_source, os.path.dirname(self.tempDir) + '/' + filename)

            # Increase the request number and sleep a random time from the range
            current_retry = current_retry + 1

        return html_source
Пример #5
0
def _prepare_request(reddit_session,
                     url,
                     params,
                     data,
                     auth,
                     files,
                     method=None):
    """Return a requests Request object that can be "prepared"."""
    # Requests using OAuth for authorization must switch to using the oauth
    # domain.
    if getattr(reddit_session, '_use_oauth', False):
        headers = {'Authorization': 'bearer %s' % reddit_session.access_token}
        config = reddit_session.config
        for prefix in (config.api_url, config.permalink_url):
            if url.startswith(prefix):
                if config.log_requests >= 1:
                    sys.stderr.write('substituting {} for {} in url\n'.format(
                        config.oauth_url, prefix))
                url = config.oauth_url + url[len(prefix):]
                break
    else:
        headers = {}
    headers.update(reddit_session.http.headers)

    if method:
        pass
    elif data or files:
        method = 'POST'
    else:
        method = 'GET'

    # Log the request if logging is enabled
    if reddit_session.config.log_requests >= 1:
        sys.stderr.write('{0}: {1}\n'.format(method, url))
    if reddit_session.config.log_requests >= 2:
        if params:
            sys.stderr.write('params: {0}\n'.format(params))
        if data:
            sys.stderr.write('data: {0}\n'.format(data))
        if auth:
            sys.stderr.write('auth: {0}\n'.format(auth))
    # Prepare request
    request = Request(method=method,
                      url=url,
                      headers=headers,
                      params=params,
                      auth=auth,
                      cookies=reddit_session.http.cookies)
    if method == 'GET':
        return request
    # Most POST requests require adding `api_type` and `uh` to the data.
    if data is True:
        data = {}

    if isinstance(data, dict):
        if not auth:
            data.setdefault('api_type', 'json')
            if reddit_session.modhash:
                data.setdefault('uh', reddit_session.modhash)
    else:
        request.headers.setdefault('Content-Type', 'application/json')

    request.data = data
    request.files = files
    return request
Пример #6
0
 def get_statuses(self, timeout=None):
     request = Request('GET', self.__create_url(None, 'statuses'))
     response = self.__send_request(request, timeout)
     return response.json()
Пример #7
0
def login_broker_sso_form(settings, pytestconfig):
    """
    Fixture to perform the log in when we have a broker and an external IDP
    :param settings: settings of the IDP and SP
    :param pytestconfig: fixture that provides the standard used for log in: WSFED or SAML
    :return:
    """
    standard = pytestconfig.getoption('standard')

    s = Session()

    # Standard
    if standard == "WSFED":
        client = "sps_wsfed"
        idp_broker = settings["idp"]["saml_broker"]
    elif standard == "SAML":
        client = "sps_saml"
        idp_broker = settings["idp"]["wsfed_broker"]

    # Service provider settings
    sp = settings[client][0]
    sp_ip = sp["ip"]
    sp_port = sp["port"]
    sp_scheme = sp["http_scheme"]
    sp_path = sp["path"]

    # Identity provider settings
    idp_ip = settings["idp"]["ip"]
    idp_port = settings["idp"]["port"]
    idp_scheme = settings["idp"]["http_scheme"]

    idp2_ip = settings["idp_external"]["ip"]
    idp2_port = settings["idp_external"]["port"]
    idp2_scheme = settings["idp_external"]["http_scheme"]

    idp_username = settings["idp_external"]["test_realm"]["username"]
    idp_password = settings["idp_external"]["test_realm"]["password"]

    keycloak_login_form_id = settings["idp"]["login_form_id"]

    # Common header for all the requests
    header = req.get_header()

    (session_cookie, response) = req.access_sp_saml(logger, s, header, sp_ip,
                                                    sp_port, sp_scheme,
                                                    sp_path, idp_ip, idp_port)

    # store the cookie received from keycloak
    keycloak_cookie = response.cookies

    redirect_url = response.headers['Location']

    header_redirect_idp = {
        **header, 'Host': "{ip}:{port}".format(ip=idp_ip, port=idp_port),
        'Referer': "{ip}:{port}".format(ip=sp_ip, port=sp_port)
    }

    response = req.redirect_to_idp(logger, s, redirect_url,
                                   header_redirect_idp, keycloak_cookie)

    # In the login page we can choose to login with the external IDP
    soup = BeautifulSoup(response.content, 'html.parser')

    div = soup.find("div", {"id": "kc-social-providers"})

    # we can have several idp external; choose the one needed for the test
    all_li = div.find_all('li')
    for li in all_li:
        if li.span.text == idp_broker:
            external_idp_url = "{scheme}://{ip}:{port}".format(
                scheme=idp_scheme, ip=idp_ip, port=idp_port) + li.a['href']

    # Select to login with the external IDP
    req_choose_external_idp = Request(method='GET',
                                      url="{url}".format(url=external_idp_url),
                                      headers=header,
                                      cookies=keycloak_cookie)

    prepared_request = req_choose_external_idp.prepare()

    log_request(logger, req_choose_external_idp)

    response = s.send(prepared_request, verify=False, allow_redirects=False)

    logger.debug(response.status_code)

    # get the HTTP binding response with the url to the external IDP
    soup = BeautifulSoup(response.content, 'html.parser')
    form = soup.body.form

    url_form = form.get('action')
    inputs = form.find_all('input')
    method_form = form.get('method')

    params = {}
    for input in inputs:
        params[input.get('name')] = input.get('value')

    header_redirect_external_idp = {
        **header, 'Host': "{ip}:{port}".format(ip=idp2_ip, port=idp2_port),
        'Referer': "{ip}:{port}".format(ip=idp_ip, port=idp_port)
    }

    # Redirect to external IDP
    if idp_broker == "cloudtrust_saml":
        req_redirect_external_idp = Request(
            method=method_form,
            url="{url}".format(url=url_form),
            data=params,
            headers=header_redirect_external_idp)
    else:
        req_redirect_external_idp = Request(
            method=method_form,
            url="{url}".format(url=url_form),
            params=params,
            headers=header_redirect_external_idp)

    referer_url = url_form

    prepared_request = req_redirect_external_idp.prepare()

    log_request(logger, req_redirect_external_idp)

    response = s.send(prepared_request, verify=False, allow_redirects=False)

    logger.debug(response.status_code)

    # if we have an identity provider saml, we do an extra redirect
    if idp_broker == "cloudtrust_saml":
        redirect_url = response.headers['Location']
        keycloak_cookie2 = response.cookies
        response = req.redirect_to_idp(logger, s, redirect_url, header,
                                       keycloak_cookie2)
    else:
        keycloak_cookie2 = response.cookies

    soup = BeautifulSoup(response.content, 'html.parser')

    form = soup.find("form", {"id": keycloak_login_form_id})

    url_form = form.get('action')
    method_form = form.get('method')
    inputs = form.find_all('input')

    input_name = []
    for input in inputs:
        input_name.append(input.get('name'))

    credentials_data = {}
    credentials_data["username"] = idp_username
    credentials_data["password"] = idp_password

    # Authenticate to the external IDP
    response = req.send_credentials_to_idp(logger, s, header, idp2_ip,
                                           idp2_port, referer_url, url_form,
                                           credentials_data, {
                                               **keycloak_cookie2,
                                               **session_cookie
                                           }, method_form)

    keycloak_cookie3 = response.cookies

    # get the HTTP binding response with the url to the broker IDP
    soup = BeautifulSoup(response.content, 'html.parser')
    form = soup.body.form

    url_form = form.get('action')
    inputs = form.find_all('input')
    method_form = form.get('method')

    token = {}
    for input in inputs:
        token[input.get('name')] = input.get('value')

    req_token_from_external_idp = Request(method=method_form,
                                          url="{url}".format(url=url_form),
                                          data=token,
                                          cookies=keycloak_cookie,
                                          headers=header)

    prepared_request = req_token_from_external_idp.prepare()

    log_request(logger, req_token_from_external_idp)

    response = s.send(prepared_request, verify=False, allow_redirects=False)

    logger.debug(response.status_code)

    if response.status_code == HTTPStatus.FOUND:  # user logs in for the first time and has to fill in a form
        response = req.broker_fill_in_form(logger, s, response, header,
                                           keycloak_cookie, idp_broker,
                                           settings)

    # Get the token (SAML response) from the broker IDP
    soup = BeautifulSoup(response.content, 'html.parser')
    form = soup.body.form

    url_form = form.get('action')
    inputs = form.find_all('input')
    method_form = form.get('method')

    token = {}
    for input in inputs:
        token[input.get('name')] = input.get('value')

    # Access SP with the token
    (response,
     sp_cookie) = req.access_sp_with_token(logger, s, header, sp_ip, sp_port,
                                           sp_scheme, idp_scheme, idp_ip,
                                           idp_port, method_form, url_form,
                                           token, session_cookie,
                                           keycloak_cookie)

    return sp_cookie, keycloak_cookie3, response.status_code
Пример #8
0
 def get_project_description(self, project):
     """Retrieves the description of a project."""
     url = self.gerrit.url('PROJECT_DESCRIPTION', project_name=project)
     r = Request(method='GET', url=url, auth=self.gerrit.auth)
     return self.gerrit.dispatch(r)
Пример #9
0
 def delete_project_description(self, project):
     """Deletes the description of a project."""
     url = self.gerrit.url('PROJECT_DESCRIPTION', project_name=project)
     r = Request(method='DELETE', url=url, auth=self.gerrit.auth)
     return self.gerrit.dispatch(r)
Пример #10
0
 def list_projects(self):
     """Lists the projects accessible by the caller. """
     url = self.gerrit.url('LIST_PROJECTS')
     r = Request(method='GET', url=url, auth=self.gerrit.auth)
     return self.gerrit.dispatch(r)
Пример #11
0
 def get_project(self, project):
     """Retrieves a project."""
     url = self.gerrit.url('PROJECT', project_name=project)
     r = Request(method='GET', url=url, auth=self.gerrit.auth)
     return self.gerrit.dispatch(r)
Пример #12
0
 def list_dashboards(self, project):
     """List custom dashboards for a project."""
     url = self.gerrit.url('LIST_DASHBOARDS', project_name=project)
     r = Request(method='GET', url=url, auth=self.gerrit.auth)
     return self.gerrit.dispatch(r)
Пример #13
0
 def get_tag(self, project, tag):
     """Retrieves a tag of a project."""
     url = self.gerrit.url('GET_TAG', project_name=project, tag=tag)
     r = Request(method='GET', url=url, auth=self.gerrit.auth)
     return self.gerrit.dispatch(r)
Пример #14
0
 def list_tags(self, project):
     """List the tags of a project.
     Only includes tags under the refs/tags/ namespace."""
     url = self.gerrit.url('LIST_TAGS', project_name=project)
     r = Request(method='GET', url=url, auth=self.gerrit.auth)
     return self.gerrit.dispatch(r)
Пример #15
0
 def __execute(self, client=None, timeout=None, **kwargs):
     if not kwargs.get('type'):
         kwargs['type'] = 'LIMIT'
     request = Request('POST', self.__create_order_url(client), json=kwargs)
     response = self.__send_request(request)
     return response.json()
Пример #16
0
 def get_project_parent(self, project):
     """Retrieves the name of a project’s parent project.
     For the All-Projects root project an empty string is returned."""
     url = self.gerrit.url('PROJECT_PARENT', project_name=project)
     r = Request(method='GET', url=url, auth=self.gerrit.auth)
     return self.gerrit.dispatch(r)
Пример #17
0
 def __change_clients_status(self, status, timeout=None):
     request = Request('PATCH',
                       self.__create_url(None, 'clients'),
                       json={'status': status})
     self.__send_request(request, timeout)
Пример #18
0
    def test_multiple_settings_webhook(self, requests_mock):
        def handle_get(url, auth):
            with open(
                    os.path.join(os.path.dirname(__file__), 'test_data',
                                 'shipments.json')) as shipment_json:
                shipment_json = json.load(shipment_json)
                shipment = shipment_json.get("shipments")[0]
                shipment["orderKey"] = self.pick_list
                shipment["orderNumber"] = self.pick_list
            response_mock = Mock()
            response_mock.status_code = 200
            response_mock.json.return_value = shipment_json
            return response_mock

        def handle_delete(url, auth):
            #if called with the webhook authentication raise an error
            webhook_auth = (self.settingId.api_key,
                            self.settingId.get_password('api_secret'))
            if auth == webhook_auth:
                raise RuntimeError(
                    "Requests delete called with webhook settings")
            else:
                response_mock = Mock()
                response_mock.status_code = 200
                return response_mock

        #Add second setting
        shipstation_settings = frappe.new_doc("Shipstation Settings")
        shipstation_settings.update({
            "api_key": '_Test_98980898989890',
            "api_secret": '98989898989898',
        })

        #Map lead source
        shipstation_settings.append("store_mapping", {
            "source": self.lead_source.name,
            "store_id": "_Test_Store_ID"
        })

        #Map transporter
        shipstation_settings.append("transporter_mapping", {
            "transporter": "_Test Supplier",
            "carrier_code": "canada_post"
        })
        second_setting = shipstation_settings.insert(ignore_permissions=True)

        #Add the two settings (orderIds) to the delivery note
        delivery_note = frappe.get_doc('Delivery Note',
                                       self.delivery_note.name)
        order_table = frappe.new_doc('Shipstation Order ID', delivery_note,
                                     'ais_shipstation_order_ids')
        order_table.update({
            'settings_id': self.settingId.name,
            'shipstation_order_id': '_Test_orderId'
        })
        order_table.save()
        order_table = frappe.new_doc('Shipstation Order ID', delivery_note,
                                     'ais_shipstation_order_ids')
        order_table.update({
            'settings_id': second_setting.name,
            'shipstation_order_id': '_Test_orderId2'
        })
        order_table.save()

        #Mock the request URL
        url = "http://deverp.metactical.com/api/method/metactical.api.shipstation.orders_shipped_webhook?settingid=" + self.settingId.name
        data = '{"resource_url": "https://test.shipstationurl.com", "resource_type": "SHIP_NOTIFY"}'
        #frappe.request.url = "http://deverp.metactical.com/api/method/metactical.api.shipstation.orders_shipped_webhook?settingid=" + self.settingId.name
        frappe.request = Request('Post', url, data=data)

        requests_mock.get = Mock(side_effect=handle_get)
        requests_mock.delete = Mock(side_effect=handle_delete)
        orders_shipped_webhook()

        #Assert delete called with second authentication settings
        auth = (second_setting.api_key,
                second_setting.get_password('api_secret'))
        requests_mock.delete.assert_called_with(
            'https://ssapi.shipstation.com/orders/_Test_orderId2', auth=auth)

        #Verify the delivery note is submitted and the shipment data has been saved
        delivery_note_name = frappe.db.get_value("Delivery Note",
                                                 {"pick_list": self.pick_list})
        delivery_note = frappe.get_doc("Delivery Note", delivery_note_name)

        self.assertEqual(delivery_note.docstatus, 1)
        self.assertEqual(delivery_note.transporter, "_Test Supplier")
        self.assertEqual(delivery_note.lr_no, "7302361059843272")
        self.assertEqual(delivery_note.ais_package_weight, "192.0 ounces")
        self.assertEqual(delivery_note.lr_date,
                         datetime.date(datetime(2021, 9, 27)))
        self.assertEqual(delivery_note.ais_shipment_cost, 22.25)
        self.assertEqual(delivery_note.ais_package_size,
                         "30.0l x 10.0w x 10.0h")
        self.assertEqual(delivery_note.ais_updated_by_shipstation, 1)
Пример #19
0
def tile_url(layer, z, x, y, style=None, internal=True):
    """Construct actual tile request to QGIS Server.

    Different than tile_url_format, this method will return url for requesting
    a tile, with all parameters filled out.

    :param layer: Layer to use
    :type layer: Layer

    :param z: TMS coordinate, zoom parameter
    :type z: int, str

    :param x: TMS coordinate, longitude parameter
    :type x: int, str

    :param y: TMS coordinate, latitude parameter
    :type y: int, str

    :param style: Layer style to choose
    :type style: str

    :param internal: Flag to switch between public url and internal url.
        Public url will be served by Django Geonode (proxified).
    :type internal: bool

    :return: Tile url
    :rtype: str
    """
    try:
        qgis_layer = QGISServerLayer.objects.get(layer=layer)
    except QGISServerLayer.DoesNotExist:
        msg = 'No QGIS Server Layer for existing layer {0}'.format(layer.name)
        logger.debug(msg)
        raise

    x = int(x)
    y = int(y)
    z = int(z)

    # Call the WMS
    top, left = num2deg(x, y, z)
    bottom, right = num2deg(x + 1, y + 1, z)

    transform = CoordTransform(SpatialReference(4326), SpatialReference(3857))
    top_left_corner = Point(left, top, srid=4326)
    bottom_right_corner = Point(right, bottom, srid=4326)
    top_left_corner.transform(transform)
    bottom_right_corner.transform(transform)

    bottom = bottom_right_corner.y
    right = bottom_right_corner.x
    top = top_left_corner.y
    left = top_left_corner.x

    bbox = ','.join(str(val) for val in [left, bottom, right, top])

    if not style:
        style = 'default'

    if style not in [s.name for s in qgis_layer.styles.all()]:
        if qgis_layer.default_style:
            style = qgis_layer.default_style.name

    query_string = {
        'SERVICE': 'WMS',
        'VERSION': '1.3.0',
        'REQUEST': 'GetMap',
        'BBOX': bbox,
        'CRS': 'EPSG:3857',
        'WIDTH': '256',
        'HEIGHT': '256',
        'MAP': qgis_layer.qgis_project_path,
        'LAYERS': layer.name,
        'STYLE': style,
        'FORMAT': 'image/png',
        'TRANSPARENT': 'true',
        'DPI': '96',
        'MAP_RESOLUTION': '96',
        'FORMAT_OPTIONS': 'dpi:96'
    }
    qgis_server_url = qgis_server_endpoint(internal)
    url = Request('GET', qgis_server_url, params=query_string).prepare().url

    return url
Пример #20
0
def get_metadata(
        eqlat=None,
        eqlon=None,
        eqtime=None,
        eqradius=10,
        abandoned=False,
        station_type='Ground',
        eqtimewindow=10,  # seconds
        station_radius=200):
    """Retrieve station metadata JSON from CESMD web service.

    Args:
        eqlat (float):
            Earthquake latitude.
        eqlon (float):
            Earthquake longitude.
        eqtime (datetime):
            Earthquake origin time.
        eqradius (float):
            Earthquake search radius (km).
        abandoned (bool):
            Whether or not to include abandoned stations in the search.
        station_type (str):
            One of the following station types: [%s]
        eqtimewidow (float):
            Earthquake time search window in sec.
        station_radius (str):
            Radius (km) to search for stations from epicenter.

    Returns:
        dict: Dictionary of event/station information.

    Raises:
        ValueError
        ConnectionError

    """ % (','.join(STATION_TYPES))
    params = {
        'rettype': 'metadata',
        'groupby': 'event',
        'format': 'json',
        'nodata': 404,
        'sttype': STATION_TYPES[station_type],
        'abandoned': abandoned
    }
    has_event_info = (eqlat is not None) and (eqlon
                                              is not None) and (eqtime
                                                                is not None)

    if not has_event_info:
        raise ValueError(
            'get_metadata must get either event id or event information.')
    else:
        starttime = eqtime - timedelta(seconds=eqtimewindow // 2)
        endtime = eqtime + timedelta(seconds=eqtimewindow // 2)
        params['elat'] = eqlat
        params['elon'] = eqlon
        params['erad'] = eqradius
        params['startdate'] = starttime.strftime('%Y-%m-%dT%H:%M:%S')
        params['enddate'] = endtime.strftime('%Y-%m-%dT%H:%M:%S')
        params['maxepidist'] = station_radius

    session = Session()
    request = Request('GET', URL_TEMPLATE, params=params).prepare()
    response = session.get(request.url)
    if response.status_code != 200:
        fmt = 'Could not retrieve data from url "%s": Server response %i'
        raise ConnectionError(fmt % (request.url, response.status_code))
    metadata = response.json()

    return metadata
Пример #21
0
def delete_realm(settings):
    """
    Fixture to perform the deletion of a realm from Keycloak
    :param settings:
    :return:
    """
    # Identity provider settings
    idp_ip = settings["idp"]["ip"]
    idp_port = settings["idp"]["port"]
    idp_scheme = settings["idp"]["http_scheme"]

    idp_username = settings["idp"]["master_realm"]["username"]
    idp_password = settings["idp"]["master_realm"]["password"]
    idp_client_id = settings["idp"]["master_realm"]["client_id"]

    idp_realm_id = settings["idp"]["master_realm"]["name"]

    idp_realm_test = settings["idp"]["test_realm"]["name"]

    s = Session()

    access_token_data = {
        "client_id": idp_client_id,
        "username": idp_username,
        "password": idp_password,
        "grant_type": "password"
    }

    access_token = req.get_access_token(logger, s, access_token_data,
                                        idp_scheme, idp_port, idp_ip,
                                        idp_realm_id)

    header = {
        'Accept':
        "application/json,text/plain, */*",
        'Accept-Encoding':
        "gzip, deflate",
        'Accept-Language':
        "en-US,en;q=0.5",
        'User-Agent':
        "Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:59.0) Gecko/20100101 Firefox/59.0",
        'Connection':
        "keep-alive",
        'Content-Type':
        "application/json",
        'Referer':
        "{scheme}://{ip}:{port}/auth/admin/master/console/".format(
            scheme=idp_scheme, ip=idp_ip, port=idp_port),
        'Host':
        "{ip}:{port}".format(ip=idp_ip, port=idp_port),
        "DNT":
        "1",
        "Keep-Alive":
        "timeout=15, max=3",
        'Authorization':
        'Bearer ' + access_token
    }

    req_delete_realm = Request(
        method='DELETE',
        url="{scheme}://{ip}:{port}/auth/admin/realms/{realm}".format(
            scheme=idp_scheme, ip=idp_ip, port=idp_port, realm=idp_realm_test),
        headers=header,
    )

    prepared_request = req_delete_realm.prepare()

    log_request(logger, req_delete_realm)

    response = s.send(prepared_request, verify=False)

    logger.debug(response.status_code)

    return response
Пример #22
0
def get_records(
    output,
    email,
    unpack=False,
    process_level='raw',
    group_by='event',
    minpga=None,
    maxpga=None,
    min_station_dist=None,
    max_station_dist=None,
    network=None,
    station_type='Ground',
    include_inactive=False,
    station_name=None,
    min_station_latitude=None,
    max_station_latitude=None,
    min_station_longitude=None,
    max_station_longitude=None,
    station_latitude=None,
    station_longitude=None,
    radius_km=None,
    station_code=None,
    event_name=None,
    minmag=None,
    maxmag=None,
    fault_type=None,
    startdate=None,
    enddate=None,
    min_event_latitude=None,
    max_event_latitude=None,
    min_event_longitude=None,
    max_event_longitude=None,
    event_latitude=None,
    event_longitude=None,
    event_radius=None,
    eventid=None,
):
    """Retrieve strong motion waveform records from CESMD website.

    Args:
        output (str):
            Filename or directory where downloaded zip data will be written.
        unpack (bool):
            If True, all zipped files will be unpacked (output will become a
            directory name.)
        email (str):
            Email address of requesting user.
        process_level (str):
            One of 'raw','processed','plots'.
        group_by (str):
            One of 'event', 'station'
        minpga (float):
            Minimum PGA value.
        maxpga (float):
            Maximum PGA value.
        min_station_dist (float):
            Minimum station distance from epicenter.
        max_station_dist (float):
            Maximum station distance from epicenter.
        network (str):
            Source network of strong motion data.
        station_type (str):
            Type of strong motion station (array, dam, etc.)
        include_inactive (bool):
            Include results from stations that are no longer active.
        station_name (str):
            Search only for station matching input name.
        min_station_latitude (float):
            Latitude station min when using a box search.
        max_station_latitude (float):
            Latitude station max when using a box search.
        min_station_longitude (float):
            Longitude station min when using a box search.
        max_station_longitude (float):
            Longitude station max when using a box search.
        station_latitude (float):
            Center latitude for station search.
        station_longitude (float):
            Center longitude for station search.
        radius_km (float):
            Radius (km) for station search.
        station_code (str):
            Particular station code to search for.
        event_name (str):
            Earthquake name to search for.
        minmag (float):
            Magnitude minimum when using a magnitude search.
        maxmag (float):
            Magnitude maximum when using a magnitude search.
        fault_type (str):
            Fault type.
        start_date (str):
            Start date/time in YYYY-MM-DD HH:MM:SS format
        end_date (str):
            End date/time in YYYY-MM-DD HH:MM:SS format
        min_event_latitude (float):
            Latitude event min when using a box search.
        max_event_latitude (float):
            Latitude event max when using a box search.
        min_event_longitude (float):
            Longitude event min when using a box search.
        max_event_longitude (float):
            Longitude event max when using a box search.
        event_latitude (float):
            Center earthquake latitude for radius search.
        event_longitude (float):
            Center earthquake longitude for radius search.
        event_radius (float):
            Earthquake search radius (km).
        eventid (str):
            NEIC or other ANSS event ID.

    Returns:
        tuple: (Top level output directory, list of data files)

    Raises:
        KeyError
    """
    # getting the inputargs must be the first line of the method!
    inputargs = locals().copy()
    del inputargs['output']
    del inputargs['unpack']

    # note: this only supports one of the options or all of them,
    # no other combinations. ??
    if process_level not in PROCESS_LEVELS:
        fmt = 'Only process levels of %s are supported (%s was input)'
        tpl = (','.join(PROCESS_LEVELS), process_level)
        raise KeyError(fmt % tpl)

    if group_by not in GROUP_OPTIONS:
        fmt = 'Only process levels of %s are supported (%s was input)'
        tpl = (','.join(GROUP_OPTIONS), group_by)
        raise KeyError(fmt % tpl)

    # determine which network user wanted
    if network is not None and network not in NETWORKS:
        fmt = 'Network with ID %s not found in list of supported networks.'
        tpl = network
        raise KeyError(fmt % tpl)

    if station_type is not None and station_type not in STATION_TYPES:
        fmt = 'Station type %s not found in list of supported types.'
        tpl = station_type
        raise KeyError(fmt % tpl)

    # convert 'Ground' to 'G' for example
    inputargs['station_type'] = STATION_TYPES[inputargs['station_type']]

    # check against list of fault types
    if fault_type is not None and fault_type not in FAULT_TYPES:
        fmt = 'Fault type %s not found in supported fault types %s.'
        tpl = (fault_type, ','.join(FAULT_TYPES))
        raise KeyError(fmt % tpl)

    # make sure there is only one method being used to select station
    # geographically
    if min_station_latitude is not None and station_latitude is not None:
        raise Exception(
            'Select stations either by bounding box or by radius, not both.')

    # make sure there is only one method being used to select events
    # geographically
    if min_event_latitude is not None and event_latitude is not None:
        raise Exception(
            'Select events either by bounding box or by radius, not both.')

    # now convert process levels to string webservice expects
    levels = {'processed': 'P', 'raw': 'R', 'plots': 'T', 'all': 'P,R,T'}
    inputargs['process_level'] = levels[process_level]

    # now convert input args to keys of parameters expected by
    params = {}
    for key, value in inputargs.items():
        if key in KEY_TABLE:
            params[KEY_TABLE[key]] = value
        else:
            params[key] = value

    # convert all booleans to strings that are 'true' and 'false'
    for key, value in params.items():
        if isinstance(value, bool):
            if value:
                params[key] = 'true'
            else:
                params[key] = 'false'

    # add in a couple of parameters that seem to be required
    params['orderby'] = 'epidist-asc'
    params['nodata'] = '404'
    params['rettype'] = 'dataset'

    session = Session()
    request = Request('GET', URL_TEMPLATE, params=params).prepare()
    url = request.url
    response = session.get(request.url)

    if not response.status_code == 200:
        fmt = 'Your url "%s" returned a status code of %i with message: "%s"'
        raise ConnectionError(fmt %
                              (url, response.status_code, response.reason))

    if unpack:
        if not os.path.exists(output):
            os.makedirs(output)
        fbytes = io.BytesIO(response.content)
        myzip = zipfile.ZipFile(fbytes, mode='r')
        members = myzip.namelist()
        for member in members:
            finfo = myzip.getinfo(member)
            if finfo.is_dir():
                continue
            if not member.lower().endswith('.zip'):
                fin = myzip.open(member)
                flatfile = member.replace('/', '_')
                outfile = os.path.join(output, flatfile)
                with open(outfile, 'wb') as fout:
                    fout.write(fin.read())
                fin.close()
            else:
                zfiledata = io.BytesIO(myzip.read(member))
                try:
                    tmpzip = zipfile.ZipFile(zfiledata, mode='r')
                    tmp_members = tmpzip.namelist()
                    for tmp_member in tmp_members:
                        tfinfo = tmpzip.getinfo(tmp_member)
                        if not tfinfo.is_dir():
                            fin = tmpzip.open(tmp_member)
                            flatfile = tmp_member.replace('/', '_')
                            parent, _ = os.path.splitext(member)
                            parent = parent.replace('/', '_')
                            # sometimes the member ends with .zip.zip (??)
                            parent = parent.replace('.zip', '')
                            datadir = os.path.join(output, parent)
                            if not os.path.exists(datadir):
                                os.makedirs(datadir)
                            outfile = os.path.join(datadir, flatfile)
                            with open(outfile, 'wb') as fout:
                                fout.write(fin.read())
                            fin.close()
                    tmpzip.close()
                    zfiledata.close()
                except BaseException as e:
                    fmt = ('Could not unpack sub-zip file "%s" due to error '
                           '"%s". Skipping.')
                    print(fmt % (member, str(e)))
                    continue

        myzip.close()

        datafiles = []
        for root, fdir, files in os.walk(output):
            for tfile in files:
                if not tfile.endswith('.json'):
                    datafile = os.path.join(root, tfile)
                    datafiles.append(datafile)

        return (os.path.abspath(output), datafiles)
    else:
        if not output.endswith('.zip'):
            output += '.zip'
        with open(output, 'wb', encoding='utf-8') as f:
            f.write(response.content)
        return (output, [])
Пример #23
0
    def test_CT_TC_WS_FED_IDP_LOGOUT_PERIMETRIC(self, settings,
                                                login_sso_form):
        """
        Scenario: user is logged in on several SPs.
        The user logs out of one SP. Access to all the other SPs should require a new log in.
        :param settings:
        :return:
        """

        s = Session()

        # Service provider settings
        sp_ip = settings["service_provider"]["ip"]
        sp_port = settings["service_provider"]["port"]
        sp_scheme = settings["service_provider"]["http_scheme"]
        sp_logout_path = settings["service_provider"]["logout_path"]
        sp_message = settings["service_provider"]["logged_out_message"]
        sp_path = settings["service_provider"]["path"]

        # Service provider 2 settings
        sp2_ip = settings["service_provider2"]["ip"]
        sp2_port = settings["service_provider2"]["port"]
        sp2_scheme = settings["service_provider2"]["http_scheme"]
        sp2_logout_path = settings["service_provider2"]["logout_path"]
        sp2_path = settings["service_provider2"]["path"]
        sp2_message = settings["service_provider2"]["logged_in_message"]

        # Identity provider settings
        idp_ip = settings["identity_provider"]["ip"]
        idp_port = settings["identity_provider"]["port"]
        idp_scheme = settings["identity_provider"]["http_scheme"]

        # Common header for all the requests
        header = {
            'Accept':
            "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
            'Accept-Encoding': "gzip, deflate",
            'Accept-Language': "en-US,en;q=0.5",
            'User-Agent':
            "Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:59.0) Gecko/20100101 Firefox/59.0",
            'Connection': "keep-alive",
            'Upgrade-Insecure-Requests': "1",
        }

        # Perform login using the fixture login_sso_form
        sp_cookie, keycloak_cookie = login_sso_form

        # User is logged in on SP1

        # Perform login on SP2

        response = req.access_sp_ws_fed(s, header, sp2_ip, sp2_port,
                                        sp2_scheme, sp2_path)

        session_cookie = response.cookies

        redirect_url = response.headers['Location']

        header_redirect_idp = {
            **header, 'Host': "{ip}:{port}".format(ip=idp_ip, port=idp_port),
            'Referer': "{ip}:{port}".format(ip=sp2_ip, port=sp2_port)
        }

        response = req.redirect_to_idp(s, redirect_url, header_redirect_idp,
                                       {**keycloak_cookie})

        soup = BeautifulSoup(response.content, 'html.parser')
        form = soup.body.form

        url_form = form.get('action')
        inputs = form.find_all('input')
        method_form = form.get('method')

        ws_fed_response = {}
        for input in inputs:
            ws_fed_response[input.get('name')] = input.get('value')

        (response, sp2_cookie) = req.access_sp_with_token(
            s, header, sp2_ip, sp2_port, idp_scheme, idp_ip, idp_port,
            method_form, url_form, ws_fed_response, session_cookie,
            keycloak_cookie)

        # req_get_sp_login_reload_page = Request(
        #     method='GET',
        #     url="{scheme}://{ip}:{port}/{path}".format(
        #         scheme=sp2_scheme,
        #         port=sp2_port,
        #         ip=sp2_ip,
        #         path=sp2_path
        #     ),
        #     headers=header_sp2_reload_page,
        #     cookies={**session_cookie}
        # )
        #
        # prepared_request = req_get_sp_login_reload_page.prepare()
        #
        # logger.debug(
        #     json.dumps(
        #         prepared_request_to_json(req_get_sp_login_reload_page),
        #         sort_keys=True,
        #         indent=4,
        #         separators=(',', ': ')
        #     )
        # )
        #
        # response = s.send(prepared_request, verify=False, allow_redirects=False)
        #
        # logger.debug(response.status_code)
        #
        # # the user is logged in and refreshing the page will return an OK
        # assert response.status_code == 200

        # User is now logged in on both applications: SP1 and SP2

        # Logout from the first applications
        header_sp_logout_page = {
            **header, 'Host':
            "{ip}:{port}".format(ip=sp_ip, port=sp_port),
            'Referer':
            "{scheme}://{ip}:{port}".format(scheme=sp_scheme,
                                            ip=sp_ip,
                                            port=sp_port)
        }

        req_get_sp_logout_page = Request(
            method='GET',
            url="{scheme}://{ip}:{port}/{path}".format(scheme=sp_scheme,
                                                       port=sp_port,
                                                       ip=sp_ip,
                                                       path=sp_logout_path),
            headers=header_sp_logout_page,
            cookies={**sp_cookie})

        prepared_request = req_get_sp_logout_page.prepare()

        logger.debug(
            json.dumps(prepared_request_to_json(req_get_sp_logout_page),
                       sort_keys=True,
                       indent=4,
                       separators=(',', ': ')))

        response = s.send(prepared_request,
                          verify=False,
                          allow_redirects=False)

        logger.debug(response.status_code)

        # new session cookie
        session_cookie2 = response.cookies

        redirect_url = response.headers['Location']

        req_sp_logout_redirect = Request(method='GET',
                                         url=redirect_url,
                                         headers=header_sp_logout_page,
                                         cookies={**sp_cookie})

        prepared_request = req_sp_logout_redirect.prepare()

        logger.debug(
            json.dumps(prepared_request_to_json(req_sp_logout_redirect),
                       sort_keys=True,
                       indent=4,
                       separators=(',', ': ')))

        response = s.send(prepared_request,
                          verify=False,
                          allow_redirects=False)

        logger.debug(response.status_code)

        redirect_url = response.headers['Location']

        response = req.redirect_to_idp(s, redirect_url, header, sp_cookie)

        assert response.status_code == 200

        soup = BeautifulSoup(response.content, 'html.parser')

        form = soup.body.form
        url_form = form.get('action')
        method_form = form.get('method')
        inputs = form.find_all('input')

        # Send ws fed response
        token = {}
        for input in inputs:
            token[input.get('name')] = input.get('value')

        (response,
         cookie) = req.access_sp_with_token(s, header, sp_ip, sp_port,
                                            idp_scheme, idp_ip, idp_port,
                                            method_form, url_form, token,
                                            sp_cookie, sp_cookie)

        assert response.status_code == 200

        assert re.search(sp_message, response.text) is not None

        # Check that when the user accesses the secured page of SP1 with the old session cookie,
        # he is redirected to log in

        req_get_sp_login_reload_page = Request(
            method='GET',
            url="{scheme}://{ip}:{port}/{path}".format(scheme=sp_scheme,
                                                       port=sp_port,
                                                       ip=sp_ip,
                                                       path=sp_path),
            headers=header,
            cookies={**session_cookie})

        prepared_request = req_get_sp_login_reload_page.prepare()

        logger.debug(
            json.dumps(prepared_request_to_json(req_get_sp_login_reload_page),
                       sort_keys=True,
                       indent=4,
                       separators=(',', ': ')))

        response = s.send(prepared_request,
                          verify=False,
                          allow_redirects=False)

        logger.debug(response.status_code)

        # Assert that the refresh page gives a 302 which signals that the user is logged out of SP
        assert response.status_code == 302

        # Check if the user is logged out from SP2: perform a refresh of the page; we expect to get a redirect

        header_sp2_reload_page = {
            **header,
            'Host': "{ip}:{port}".format(ip=sp2_ip, port=sp2_port),
        }

        req_get_sp_login_reload_page = Request(
            method='GET',
            url="{scheme}://{ip}:{port}/{path}".format(scheme=sp2_scheme,
                                                       port=sp2_port,
                                                       ip=sp2_ip,
                                                       path=sp2_path),
            headers=header_sp2_reload_page,
            cookies={**session_cookie})

        prepared_request = req_get_sp_login_reload_page.prepare()

        logger.debug(
            json.dumps(prepared_request_to_json(req_get_sp_login_reload_page),
                       sort_keys=True,
                       indent=4,
                       separators=(',', ': ')))

        response = s.send(prepared_request,
                          verify=False,
                          allow_redirects=False)

        logger.debug(response.status_code)

        # Assert that the refresh page gives a 302 which signals that the user is logged out of SP2
        assert response.status_code == 302
Пример #24
0
    def get_time_series_for_fields(
        self,
        fields: List[Field],
        start_time: datetime = None,
        window: Window = Window.RAW,
        end_time: Optional[datetime] = None,
    ) -> List[FieldTimeSeries]:
        """Get complete (non-paginated) time series data for each field in `fields`"""
        assert (start_time is None) == (
            end_time is None
        ), "Either both start and end time should be provided, or both should be missing"
        # Build requests queue
        params = {
            "timeStart": int(start_time.timestamp()) if start_time else None,
            "timeEnd": int(end_time.timestamp()) if end_time else None,
            "window": window.value,
            "limit": 5000,
        }
        queue: List[Tuple[str, BatchRequest]] = [
            (
                f.field_human_name,  # type: ignore
                BatchRequest.from_request(
                    Request(
                        method="GET",
                        url=self._url(f"outputs/{f.output_id}/fields/{f.field_human_name}/data"),
                        params=params,
                    )
                ),
            )
            for f in fields
        ]

        # Make all requests in queue
        MAX_BATCH_REQUESTS = 200
        records: Dict[str, Dict[datetime, str]] = defaultdict(dict)
        while queue:
            # Make next batch of requests
            requests = dict(queue[:MAX_BATCH_REQUESTS])
            del queue[:MAX_BATCH_REQUESTS]
            logger.info(f"Making {len(requests)} batched requests to IOT API")
            responses = self._batch_request(requests)
            any_success = False

            # Process responses
            for name, resp in responses.items():
                if resp.ok:
                    any_success = True
                    series = {
                        Parsers.datetime(r["event_time"]): Parsers.unknown(r["value"])
                        for r in resp.body["records"]
                    }
                    records[name].update(series)
                    # Add request for next page
                    next_page_url = resp.body["meta"]["next_page_url"]
                    if next_page_url:
                        queue.append((name, BatchRequest(method="GET", uri=next_page_url)))
                else:
                    # Retry request
                    logger.warning(f"Got bad response from IOT API ({resp.body}). Retrying...")
                    queue.append((name, requests[name]))

            if not any_success:
                raise IOError(f"All {len(requests)} batched requests to IOT API failed")

        fields_by_name = {f.field_human_name: f for f in fields}
        return [
            FieldTimeSeries(field=fields_by_name[name], time_series=series)
            for name, series in records.items()
        ]
Пример #25
0
from requests import Request, Session

url = 'http://httpbin.org/post'
data = {
    'name': 'germey',
}
headers = {
    'User-Agent':
    'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537'
    '.36 (KHTML, like Gecko) Chrome/84.0.4115.0 Safari/537.36 Edg/84.0.488.1'
}
s = Session()
req = Request('POST', url, data=data, headers=headers)
prepped = s.prepare_request(req)
r = s.send(prepped)
print(r.text)
Пример #26
0
 def get_account(self, client=None, timeout=None):
     request = Request('GET', self.__create_url(client, 'accounts'))
     response = self.__send_request(request, timeout)
     return response.json()
Пример #27
0
 def _request(self, method: str, path: str, **kwargs) -> Any:
     request = Request(method, self._ENDPOINT + path, **kwargs)
     self._sign_request(request)
     response = self._session.send(request.prepare())
     return self._process_response(response)
Пример #28
0
 def cancel_all(self, client=None, timeout=None):
     request = Request('DELETE', self.__create_order_url(client))
     self.__send_request(request, timeout)
Пример #29
0
    def test_CT_TC_WS_FED_BROKER_ACCESS_CONTROL_RBAC_OK_SP_initiated(
            self, settings):
        """
        Scenario: User logs in to SP1 where he has the appropriate role.
        Same user tries to log in to SP2, SP that he is authorized to access. He should
        be able to access SP2 without authenticating again.
        :param settings:
        :return:
        """

        s = Session()

        # Service provider settings
        sp = settings["sps_wsfed"][0]
        sp_ip = sp["ip"]
        sp_port = sp["port"]
        sp_scheme = sp["http_scheme"]
        sp_path = sp["path"]
        sp_message = sp["logged_in_message"]

        # Service provider 2 settings
        sp2 = settings["sps_wsfed"][1]
        sp2_ip = sp2["ip"]
        sp2_port = sp2["port"]
        sp2_scheme = sp2["http_scheme"]
        sp2_path = sp2["path"]
        sp2_message = sp2["logged_in_message"]

        # Identity provider settings
        idp_ip = settings["idp"]["ip"]
        idp_port = settings["idp"]["port"]
        idp_scheme = settings["idp"]["http_scheme"]
        idp_broker = settings["idp"]["wsfed_broker"]
        idp_form_id = settings["idp"]["login_form_update"]

        idp_username = settings["idp_external"]["test_realm"]["username"]
        idp_password = settings["idp_external"]["test_realm"]["password"]

        idp2_ip = settings["idp_external"]["ip"]
        idp2_port = settings["idp_external"]["port"]
        idp2_scheme = settings["idp_external"]["http_scheme"]

        keycloak_login_form_id = settings["idp"]["login_form_id"]

        # Common header for all the requests
        header = req.get_header()

        # We check that test works for both types of identity provider
        idp_brokers = [
            settings["idp"]["saml_broker"], settings["idp"]["wsfed_broker"]
        ]

        for idp_broker in idp_brokers:

            response = req.access_sp_ws_fed(logger, s, header, sp_ip, sp_port,
                                            sp_scheme, sp_path)

            session_cookie = response.cookies

            redirect_url = response.headers['Location']

            header_redirect_idp = {
                **header, 'Host': "{ip}:{port}".format(ip=idp_ip,
                                                       port=idp_port),
                'Referer': "{ip}:{port}".format(ip=sp_ip, port=sp_port)
            }

            response = req.redirect_to_idp(logger, s, redirect_url,
                                           header_redirect_idp, session_cookie)

            keycloak_cookie = response.cookies

            if response.status_code == HTTPStatus.UNAUTHORIZED and response.headers[
                    'WWW-Authenticate'] == 'Negotiate':
                response = req.kerberos_form_fallback(logger, s, response,
                                                      header, {
                                                          **keycloak_cookie,
                                                          **session_cookie
                                                      })

            # In the login page we can choose to login with the external IDP
            soup = BeautifulSoup(response.content, 'html.parser')

            div = soup.find("div", {"id": "kc-social-providers"})

            assert div is not None

            # we can have several idp external; choose the one needed for the test
            all_li = div.find_all('li')
            for li in all_li:
                if li.span.text == idp_broker:
                    external_idp_url = "{scheme}://{ip}:{port}".format(
                        scheme=idp_scheme, ip=idp_ip,
                        port=idp_port) + li.a['href']

            assert external_idp_url is not None

            # Select to login with the external IDP
            req_choose_external_idp = Request(
                method='GET',
                url="{url}".format(url=external_idp_url),
                headers=header,
                cookies=keycloak_cookie)

            prepared_request = req_choose_external_idp.prepare()

            log_request(logger, req_choose_external_idp)

            response = s.send(prepared_request,
                              verify=False,
                              allow_redirects=False)

            logger.debug(response.status_code)

            assert response.status_code == HTTPStatus.OK or response.status_code == HTTPStatus.FOUND

            # get the HTTP binding response with the url to the external IDP
            soup = BeautifulSoup(response.content, 'html.parser')
            form = soup.body.form

            url_form = form.get('action')
            inputs = form.find_all('input')
            method_form = form.get('method')

            params = {}
            for input in inputs:
                params[input.get('name')] = input.get('value')

            header_redirect_external_idp = {
                **header, 'Host': "{ip}:{port}".format(ip=idp2_ip,
                                                       port=idp2_port),
                'Referer': "{ip}:{port}".format(ip=idp_ip, port=idp_port)
            }

            # Redirect to external IDP
            if idp_broker == "cloudtrust_saml":
                req_redirect_external_idp = Request(
                    method=method_form,
                    url="{url}".format(url=url_form),
                    data=params,
                    headers=header_redirect_external_idp)
            else:
                req_redirect_external_idp = Request(
                    method=method_form,
                    url="{url}".format(url=url_form),
                    params=params,
                    headers=header_redirect_external_idp)

            # url_parts = list(urlparse.urlparse(url_form))
            # query = dict(urlparse.parse_qsl(url_parts[4]))
            # query.update(params)
            # url_parts[4] = urlencode(query)
            # referer_url = urlparse.urlunparse(url_parts)
            referer_url = url_form

            prepared_request = req_redirect_external_idp.prepare()

            log_request(logger, req_redirect_external_idp)

            response = s.send(prepared_request,
                              verify=False,
                              allow_redirects=False)

            logger.debug(response.status_code)

            # if we have an identity provider saml, we do an extra redirect
            if idp_broker == "cloudtrust_saml":
                redirect_url = response.headers['Location']
                keycloak_cookie2 = response.cookies
                response = req.redirect_to_idp(logger, s, redirect_url, header,
                                               keycloak_cookie2)
            else:
                keycloak_cookie2 = response.cookies

            soup = BeautifulSoup(response.content, 'html.parser')

            form = soup.find("form", {"id": keycloak_login_form_id})

            assert form is not None

            url_form = form.get('action')
            method_form = form.get('method')
            inputs = form.find_all('input')

            input_name = []
            for input in inputs:
                input_name.append(input.get('name'))

            assert "username" in input_name
            assert "password" in input_name

            credentials_data = {}
            credentials_data["username"] = idp_username
            credentials_data["password"] = idp_password

            # Authenticate to the external IDP
            response = req.send_credentials_to_idp(logger, s, header, idp2_ip,
                                                   idp2_port, referer_url,
                                                   url_form, credentials_data,
                                                   {
                                                       **keycloak_cookie2,
                                                       **session_cookie
                                                   }, method_form)

            assert response.status_code == HTTPStatus.OK or response.status_code == HTTPStatus.FOUND

            # get the HTTP binding response with the url to the broker IDP
            soup = BeautifulSoup(response.content, 'html.parser')
            form = soup.body.form

            url_form = form.get('action')
            inputs = form.find_all('input')
            method_form = form.get('method')

            token = {}
            for input in inputs:
                token[input.get('name')] = input.get('value')

            req_token_from_external_idp = Request(
                method=method_form,
                url="{url}".format(url=url_form),
                data=token,
                cookies=keycloak_cookie,
                headers=header)

            prepared_request = req_token_from_external_idp.prepare()

            log_request(logger, req_token_from_external_idp)

            response = s.send(prepared_request,
                              verify=False,
                              allow_redirects=False)

            if response.status_code == HTTPStatus.FOUND:
                new_cookie = response.cookies
                redirect_url = response.headers['Location']
                response = req.redirect_to_idp(logger, s, redirect_url, header,
                                               {
                                                   **keycloak_cookie,
                                                   **new_cookie
                                               })
                response = req.broker_fill_in_form(logger, s, response, header,
                                                   keycloak_cookie, new_cookie,
                                                   idp_broker, idp_form_id)

            keycloak_cookie3 = response.cookies

            logger.debug(response.status_code)

            # Get the token from the broker IDP
            soup = BeautifulSoup(response.content, 'html.parser')
            form = soup.body.form

            url_form = form.get('action')
            inputs = form.find_all('input')
            method_form = form.get('method')

            token = {}
            for input in inputs:
                token[input.get('name')] = input.get('value')

            # Access SP with the token
            (response, sp_cookie) = req.access_sp_with_token(
                logger, s, header, sp_ip, sp_port, sp_scheme, idp_scheme,
                idp_ip, idp_port, method_form, url_form, token, session_cookie,
                keycloak_cookie2)

            assert response.status_code == HTTPStatus.OK

            # assert that we are logged in
            assert re.search(sp_message, response.text) is not None

            # User is logged in on SP1

            # Attempt to perform login on SP2

            response = req.access_sp_ws_fed(logger, s, header, sp2_ip,
                                            sp2_port, sp2_scheme, sp2_path)

            session_cookie2 = response.cookies

            redirect_url = response.headers['Location']

            header_redirect_idp = {
                **header, 'Host': "{ip}:{port}".format(ip=idp_ip,
                                                       port=idp_port),
                'Referer': "{ip}:{port}".format(ip=sp2_ip, port=sp2_port)
            }

            response = req.redirect_to_idp(logger, s, redirect_url,
                                           header_redirect_idp,
                                           {**keycloak_cookie3})

            soup = BeautifulSoup(response.content, 'html.parser')
            form = soup.body.form

            url_form = form.get('action')
            inputs = form.find_all('input')
            method_form = form.get('method')

            token = {}
            for input in inputs:
                token[input.get('name')] = input.get('value')

            (response, sp2_cookie) = req.access_sp_with_token(
                logger, s, header, sp2_ip, sp2_port, sp2_scheme, idp_scheme,
                idp_ip, idp_port, method_form, url_form, token,
                session_cookie2, keycloak_cookie2)

            assert response.status_code == HTTPStatus.OK

            assert re.search(sp2_message, response.text) is not None
Пример #30
0
 def delete_project_branch(self, project, branch):
     """Deletes a branch."""
     url = self.gerrit.url('BRANCH', project_name=project, branch_id=branch)
     r = Request(method='DELETE', url=url, auth=self.gerrit.auth)
     return self.gerrit.dispatch(r)