def generate_dashboard_url(dashboard):
    """Generate a dashboard URL from a given definition."""
    try:
        title = dashboard.get('dashboard', 'title')
    except configparser.NoOptionError:
        raise ValueError("option 'title' in section 'dashboard' not set")

    try:
        foreach = dashboard.get('dashboard', 'foreach')
    except configparser.NoOptionError:
        raise ValueError("option 'foreach' in section 'dashboard' not set")

    try:
        baseurl = dashboard.get('dashboard', 'baseurl')
    except configparser.NoOptionError:
        baseurl = 'https://review.openstack.org/#/dashboard/?'

    url = baseurl
    url += escape(urllib_parse.urlencode({'title': title,
                                          'foreach': foreach}))
    for section in dashboard.sections():
        if not section.startswith('section'):
            continue

        try:
            query = dashboard.get(section, 'query')
        except configparser.NoOptionError:
            raise ValueError("option 'query' in '%s' not set" % section)

        title = section[9:-1]
        encoded = escape(urllib_parse.urlencode({title: query}))
        url += "&%s" % encoded
    return url
Example #2
0
    def list(self, response, res):
        params = dict(response.request.params)
        params.pop("marker", None)
        query = urlparse.urlencode(params)
        type_name = response.request.urlvars.get("type_name")
        type_version = response.request.urlvars.get("type_version")
        if response.request.urlvars.get("state") == "creating":
            drafts = "/drafts"
        else:
            drafts = ""

        artifacts_list = [serialization.serialize_for_client(a, show_level=Showlevel.NONE) for a in res["artifacts"]]
        url = "/v3/artifacts"
        if type_name:
            url += "/" + type_name
        if type_version:
            url += "/v" + type_version
        url += drafts
        if query:
            first_url = url + "?" + query
        else:
            first_url = url
        body = {"artifacts": artifacts_list, "first": first_url}
        if "next_marker" in res:
            params["marker"] = res["next_marker"]
            next_query = urlparse.urlencode(params)
            body["next"] = url + "?" + next_query
        content = json.dumps(body, ensure_ascii=False)
        response.unicode_body = six.text_type(content)
        response.content_type = "application/json"
Example #3
0
def url_concat(url, args, fragments=None):
    """Concatenate url and argument dictionary regardless of whether
    url has existing query parameters.

    >>> url_concat("http://example.com/foo?a=b", dict(c="d"))
    'http://example.com/foo?a=b&c=d'
    """

    if not args and not fragments:
        return url

    # Strip off hashes
    while url[-1] == '#':
        url = url[:-1]

    fragment_tail = ''
    if fragments:
        fragment_tail = '#' + urlencode(fragments)

    args_tail = ''
    if args:
        if url[-1] not in ('?', '&'):
            args_tail += '&' if ('?' in url) else '?'
        args_tail += urlencode(args)

    return url + args_tail + fragment_tail
Example #4
0
 def __init__(self, base, relative=None):
     self._has_token = False
     self._url = None
     self._url_parts = None
     self._loaded = False
     self._xml = None
     self._url_parts = None
     self._headers = None
     self._config = None
     if isinstance(base, six.string_types):
         base_url = base
         self._url_parts = list(parse.urlsplit(base_url))
     elif isinstance(base, RequestBase):
         base_url = base.url
         self._has_token = base.has_token
         self._url_parts = base._url_parts[:]
         self._headers = base._headers
         self._config = base.config
     if relative:
         scheme, netloc, path, qs, fragment = parse.urlsplit(relative)
         if path:
             self._url_parts[2] = _join_plex(self._url_parts[2], path)
         if qs:
             data = parse.parse_qsl(self._url_parts[3]) + parse.parse_qsl(qs)
             self._url_parts[3] = parse.urlencode(data)
         else:
             # Strip of all non-token parts
             data = parse.parse_qsl(self._url_parts[3])
             self._url_parts[3] = parse.urlencode([(x, y) for x, y in data if x == 'X-Plex-Token'])
     if not self._has_token:
         self._has_token = 'X-Plex-Token' in parse.parse_qs(self._url_parts[3])
     self._url = parse.urlunsplit(self._url_parts)
Example #5
0
def test_group_request(session, users, groups, http_client, base_url):  # noqa: F811
    user = users["*****@*****.**"]
    group = groups["sad-team"]

    # Request to join

    fe_url = url(base_url, "/groups/{}/join".format(group.groupname))
    resp = yield http_client.fetch(
        fe_url,
        method="POST",
        headers={"X-Grouper-User": user.username},
        body=urlencode({"reason": "Test Request", "member": "User: [email protected]"}),
    )
    assert resp.code == 200

    request = Request.get(session, requester_id=user.id, requesting_id=group.id)
    assert request.status == "pending"

    # Approve request

    fe_url = url(base_url, "/groups/{}/requests/{}".format(group.groupname, request.id))
    resp = yield http_client.fetch(
        fe_url,
        method="POST",
        headers={"X-Grouper-User": "******"},
        body=urlencode({"reason": "Test Request", "status": "actioned"}),
    )
    assert resp.code == 200

    request = Request.get(session, requester_id=user.id, requesting_id=group.id)
    assert request.status == "actioned"
Example #6
0
    def execute(cls, uri, http_verb, extra_headers=None, batch=False, _body=None, **kw):
        """
        if batch == False, execute a command with the given parameters and
        return the response JSON.
        If batch == True, return the dictionary that would be used in a batch
        command.
        """
        if batch:
            urlsplitter = urlparse(API_ROOT).netloc
            ret = {"method": http_verb, "path": uri.split(urlsplitter, 1)[1]}
            if kw:
                ret["body"] = kw
            return ret

        if not ('app_id' in ACCESS_KEYS and 'rest_key' in ACCESS_KEYS):
            raise core.ParseError('Missing connection credentials')

        app_id = ACCESS_KEYS.get('app_id')
        rest_key = ACCESS_KEYS.get('rest_key')
        master_key = ACCESS_KEYS.get('master_key')

        url = uri if uri.startswith(API_ROOT) else cls.ENDPOINT_ROOT + uri
        if _body is None:
            data = kw and json.dumps(kw, default=date_handler) or "{}"
        else:
            data = _body
        if http_verb == 'GET' and data and len(urlencode(kw)) > 0:
            url += '?%s' % urlencode(kw)
            data = None
        else:
            data = data.encode('utf-8')

        headers = {
            'Content-type': 'application/json',
            'X-Parse-Application-Id': app_id,
            'X-Parse-REST-API-Key': rest_key
        }
        headers.update(extra_headers or {})
        url = url.replace('classes/User', 'users') # Jarrel edit
        request = Request(url, data, headers)

        if ACCESS_KEYS.get('session_token'):
            request.add_header('X-Parse-Session-Token', ACCESS_KEYS.get('session_token'))
        elif master_key:
            request.add_header('X-Parse-Master-Key', master_key)

        request.get_method = lambda: http_verb

        try:
            response = urlopen(request, timeout=CONNECTION_TIMEOUT)
        except HTTPError as e:
            exc = {
                400: core.ResourceRequestBadRequest,
                401: core.ResourceRequestLoginRequired,
                403: core.ResourceRequestForbidden,
                404: core.ResourceRequestNotFound
                }.get(e.code, core.ParseError)
            raise exc(e.read())

        return json.loads(response.read().decode('utf-8'))
 def _list_messages_page(self, marker, retry=False):
     url = self._get_files_url()
     parsed_url = urlsplit(url, 'http')
     conn = self.get_connection(parsed_url, self.tls)
     headers = [('Host', parsed_url.hostname),
                ('X-Auth-Token', self.auth.token_id)]
     query = urlencode({'limit': '1000'})
     if marker:
         query += '&{0}'.format(urlencode({'marker': marker}))
     selector = '{0}?{1}'.format(parsed_url.path, query)
     with gevent.Timeout(self.timeout):
         log.request(conn, 'GET', selector, headers)
         conn.putrequest('GET', selector)
         for name, value in headers:
             conn.putheader(name, value)
         conn.endheaders()
         res = conn.getresponse()
         status = '{0!s} {1}'.format(res.status, res.reason)
         log.response(conn, status, res.getheaders())
         data = res.read()
     if res.status == 401 and not retry:
         self.auth.create_token()
         return self._list_messages_page(marker, retry=True)
     if res.status == 200:
         lines = data.splitlines()
         return [line for line in lines
                 if line.startswith(self.prefix)], lines[-1]
     elif res.status == 204:
         return [], None
     else:
         raise RackspaceError(res)
Example #8
0
File: cas.py Project: 10sr/hue
 def cas_application(environ, start_response):
     username = environ.get('REMOTE_USER','')
     if username:
         return application(environ, start_response)
     qs = environ.get('QUERY_STRING','').split("&")
     if qs and qs[-1].startswith("ticket="):
         # assume a response from the authority
         ticket = qs.pop().split("=", 1)[1]
         environ['QUERY_STRING'] = "&".join(qs)
         service = construct_url(environ)
         args = urlencode(
                 {'service': service,'ticket': ticket})
         requrl = authority + "validate?" + args
         result = urlopen(requrl).read().split("\n")
         if 'yes' == result[0]:
             environ['REMOTE_USER'] = result[1]
             environ['AUTH_TYPE'] = 'cas'
             return application(environ, start_response)
         exce = CASLoginFailure()
     else:
         service = construct_url(environ)
         args = urlencode({'service': service})
         location = authority + "login?" + args
         exce = CASAuthenticate(location)
     return exce.wsgi_application(environ, start_response)
Example #9
0
    def export(self, ds, requestor, notify):
        """
        This function exports data as FITS files. To do this, the function binds metadata (keywords) to images (arrays) to create FITS files and then serves the FITS files at jsoc.stanford.edu.
        Written by Monica Bobra and Art Amezcua
        19 July 2016

        Parameters
        ----------
        requestor: string
        	Username of requestor.
        notify   : string
        	E-mail address of requestor.
        ds       : string
            Name of the data series.

        Returns
        -------
        supath : list
            List containing paths to all the requested FITS files.
		"""
		# test to see if the user's e-mail address is registered with jsoc.stanford.edu
        test_email_query = 'http://jsoc.stanford.edu/cgi-bin/ajax/checkAddress.sh?address='+quote_plus(notify)+'&checkonly=1'
        response = urlopen(test_email_query)
        data = json.loads(response.read())
        if (data['status'] == 4):
		    raise RuntimeError('User e-mail address is not registered with jsoc.stanford.edu')
        query = '?' + urlencode({'op': 'exp_request', 'protocol': 'fits', 'format': 'json', 'method': 'url', 'requestor': requestor, 'notify': notify, 'ds': ds})
        req = self._json_request(self._url_jsoc_fetch + query)
        # waiting for the request to be ready
        if (int(req.data['status']) == 1 or int(req.data['status']) == 2):
            if 'requestid' in req.data:
                query = '?' + urlencode({'op': 'exp_status', 'requestid': req.data['requestid']})
                supath = []
                print('Waiting for the request to be ready. Please allow at least 20 seconds.')
                time.sleep(15)
                while True :  
                    req = self._json_request(self._url_jsoc_fetch + query)
                    if (int(req.data['status']) == 1 or int(req.data['status']) == 2 or int(req.data['status']) == 6):
                        time.sleep(5)
                    elif (int(req.data['status']) == 0):
                        dir = req.data['dir']
                        for dataobj in (req.data['data']):
                            supath.append(urljoin(self.baseurl,os.path.join(req.data['dir'],dataobj['filename'])))
                        break
                    else:
                        print(type(req.data['status']))
                        if (req.data['status'] == 3):
                            raise RuntimeError('DRMS Query failed, request size is too large, status=%s' % req.data['status'])
                        if (req.data['status'] == 4):
                            raise RuntimeError('DRMS Query failed, request not formed correctly, status=%s' % req.data['status'])
                        if (req.data['status'] == 5):
                            raise RuntimeError('DRMS Query failed, export request expired, status=%s' % req.data['status'])
                            
            else:
                raise RuntimeError('DRMS Query failed, there is no requestid, status=%s' % req.data['status'])
        else:
            raise RuntimeError('DRMS Query failed, series is not a valid series, status=%s' % req.data['status'])
        print("All the data are available at:")
        print(str(urljoin(self.baseurl,req.data['dir'])))
        return supath
Example #10
0
    def list(self, response, res):
        params = dict(response.request.params)
        params.pop('marker', None)
        query = urlparse.urlencode(params)
        type_name = response.request.urlvars.get('type_name')
        type_version = response.request.urlvars.get('type_version')
        if response.request.urlvars.get('state') == 'creating':
            drafts = "/drafts"
        else:
            drafts = ""

        artifacts_list = [
            serialization.serialize_for_client(a, show_level=Showlevel.NONE)
            for a in res['artifacts']]
        url = "/v0.1/artifacts"
        if type_name:
            url += "/" + type_name
        if type_version:
            url += "/v" + type_version
        url += drafts
        if query:
            first_url = url + "?" + query
        else:
            first_url = url
        body = {
            "artifacts": artifacts_list,
            "first": first_url
        }
        if 'next_marker' in res:
            params['marker'] = res['next_marker']
            next_query = urlparse.urlencode(params)
            body['next'] = url + '?' + next_query
        content = json.dumps(body, ensure_ascii=False)
        response.unicode_body = six.text_type(content)
        response.content_type = 'application/json'
Example #11
0
    def use_http_uri(message, typ, destination="", relay_state=""):
        if "\n" in message:
            data = message.split("\n")[1]
        else:
            data = message.strip()
        if typ == "SAMLResponse":
            info = {
                "data": data,
                "headers": [
                    ("Content-Type", "application/samlassertion+xml"),
                    ("Cache-Control", "no-cache, no-store"),
                    ("Pragma", "no-cache")
                ]
            }
        elif typ == "SAMLRequest":
            # msg should be an identifier
            if relay_state:
                query = urlencode({"ID": message,
                                   "RelayState": relay_state})
            else:
                query = urlencode({"ID": message})
            info = {
                "data": "",
                "url": "%s?%s" % (destination, query)
            }
        else:
            raise NotImplemented

        return info
Example #12
0
def test_user_tok_acls(
    session, graph, users, user_admin_perm_to_auditors, http_client, base_url  # noqa: F811
):
    role_user = "******"
    admin = "*****@*****.**"
    pleb = "*****@*****.**"

    # admin creating token for role user
    fe_url = url(base_url, "/users/{}/tokens/add".format(role_user))
    resp = yield http_client.fetch(
        fe_url, method="POST", headers={"X-Grouper-User": admin}, body=urlencode({"name": "foo"})
    )
    assert resp.code == 200

    with pytest.raises(HTTPError):
        # non-admin creating token for role user
        resp = yield http_client.fetch(
            fe_url,
            method="POST",
            headers={"X-Grouper-User": pleb},
            body=urlencode({"name": "foo2"}),
        )

    fe_url = url(base_url, "/users/{}/tokens/add".format(pleb))
    with pytest.raises(HTTPError):
        # admin creating token for normal (non-role) user
        resp = yield http_client.fetch(
            fe_url,
            method="POST",
            headers={"X-Grouper-User": admin},
            body=urlencode({"name": "foo3"}),
        )
Example #13
0
    def list(self, **kwargs):
        """Retrieve a listing of Image objects

        :param page_size: Number of images to request in each paginated request
        :returns generator over list of Images
        """

        ori_validate_fun = self.model.validate
        empty_fun = lambda *args, **kwargs: None

        def paginate(url):
            resp, body = self.http_client.get(url)
            for image in body['images']:
                # NOTE(bcwaldon): remove 'self' for now until we have
                # an elegant way to pass it into the model constructor
                # without conflict.
                image.pop('self', None)
                yield self.model(**image)
                # NOTE(zhiyan): In order to resolve the performance issue
                # of JSON schema validation for image listing case, we
                # don't validate each image entry but do it only on first
                # image entry for each page.
                self.model.validate = empty_fun

            # NOTE(zhiyan); Reset validation function.
            self.model.validate = ori_validate_fun

            try:
                next_url = body['next']
            except KeyError:
                return
            else:
                for image in paginate(next_url):
                    yield image

        filters = kwargs.get('filters', {})

        if not kwargs.get('page_size'):
            filters['limit'] = DEFAULT_PAGE_SIZE
        else:
            filters['limit'] = kwargs['page_size']

        tags = filters.pop('tag', [])
        tags_url_params = []

        for tag in tags:
            if isinstance(tag, six.string_types):
                tags_url_params.append({'tag': strutils.safe_encode(tag)})

        for param, value in six.iteritems(filters):
            if isinstance(value, six.string_types):
                filters[param] = strutils.safe_encode(value)

        url = '/v2/images?%s' % parse.urlencode(filters)

        for param in tags_url_params:
            url = '%s&%s' % (url, parse.urlencode(param))

        for image in paginate(url):
            yield image
Example #14
0
	def finalize_request(self):
		redirect_to = self.data.get('redirect_to') or None
		redirect_message = self.data.get('redirect_message') or None
		status = self.integration_request.status

		if self.flags.status_changed_to == "Completed":
			if self.data.reference_doctype and self.data.reference_docname:
				custom_redirect_to = None
				try:
					custom_redirect_to = frappe.get_doc(self.data.reference_doctype,
						self.data.reference_docname).run_method("on_payment_authorized", self.flags.status_changed_to)
				except Exception:
					frappe.log_error(frappe.get_traceback())

				if custom_redirect_to:
					redirect_to = custom_redirect_to

				redirect_url = 'payment-success'

			if self.redirect_url:
				redirect_url = self.redirect_url
				redirect_to = None
		else:
			redirect_url = 'payment-failed'

		if redirect_to:
			redirect_url += '?' + urlencode({'redirect_to': redirect_to})
		if redirect_message:
			redirect_url += '&' + urlencode({'redirect_message': redirect_message})

		return {
			"redirect_to": redirect_url,
			"status": status
		}
Example #15
0
    def user_data(self, access_token, *args, **kwargs):
        """Loads user data from service"""
        url = GITHUB_USER_DATA_URL + '?' + urlencode({
            'access_token': access_token
        })

        try:
            data = simplejson.load(dsa_urlopen(url))
        except ValueError:
            data = None

        # if we have a github organization defined, test that the current users
        # is a member of that organization.
        if data and self.GITHUB_ORGANIZATION:
            member_url = GITHUB_ORGANIZATION_MEMBER_OF_URL.format(
                org=self.GITHUB_ORGANIZATION,
                username=data.get('login')
            ) + '?' + urlencode({
                'access_token': access_token
            })

            try:
                response = dsa_urlopen(member_url)
            except HTTPError:
                data = None
            else:
                # if the user is a member of the organization, response code
                # will be 204, see http://bit.ly/ZS6vFl
                if response.code != 204:
                    raise AuthFailed('User doesn\'t belong to the '
                                     'organization')
        return data
Example #16
0
	def create_charge_on_braintree(self):
		self.configure_braintree()

		redirect_to = self.data.get('redirect_to') or None
		redirect_message = self.data.get('redirect_message') or None

		result = braintree.Transaction.sale({
			"amount": self.data.amount,
			"payment_method_nonce": self.data.payload_nonce,
			"options": {
				"submit_for_settlement": True
			}
		})

		if result.is_success:
			self.integration_request.db_set('status', 'Completed', update_modified=False)
			self.flags.status_changed_to = "Completed"
			self.integration_request.db_set('output', result.transaction.status, update_modified=False)

		elif result.transaction:
			self.integration_request.db_set('status', 'Failed', update_modified=False)
			error_log = frappe.log_error("code: " + str(result.transaction.processor_response_code) + " | text: " + str(result.transaction.processor_response_text), "Braintree Payment Error")
			self.integration_request.db_set('error', error_log.error, update_modified=False)
		else:
			self.integration_request.db_set('status', 'Failed', update_modified=False)
			for error in result.errors.deep_errors:
				error_log = frappe.log_error("code: " + str(error.code) + " | message: " + str(error.message), "Braintree Payment Error")
				self.integration_request.db_set('error', error_log.error, update_modified=False)

		if self.flags.status_changed_to == "Completed":
			status = 'Completed'
			if self.data.reference_doctype and self.data.reference_docname:
				custom_redirect_to = None
				try:
					custom_redirect_to = frappe.get_doc(self.data.reference_doctype,
						self.data.reference_docname).run_method("on_payment_authorized", self.flags.status_changed_to)
					braintree_success_page = frappe.get_hooks('braintree_success_page')
					if braintree_success_page:
						custom_redirect_to = frappe.get_attr(braintree_success_page[-1])(self.data)
				except Exception:
					frappe.log_error(frappe.get_traceback())

				if custom_redirect_to:
					redirect_to = custom_redirect_to

			redirect_url = 'payment-success'
		else:
			status = 'Error'
			redirect_url = 'payment-failed'

		if redirect_to:
			redirect_url += '?' + urlencode({'redirect_to': redirect_to})
		if redirect_message:
			redirect_url += '&' + urlencode({'redirect_message': redirect_message})

		return {
		"redirect_to": redirect_url,
		"status": status
		}
Example #17
0
def http_redirect_message(message, location, relay_state="", typ="SAMLRequest",
                          sigalg=None, key=None, **kwargs):
    """The HTTP Redirect binding defines a mechanism by which SAML protocol
    messages can be transmitted within URL parameters.
    Messages are encoded for use with this binding using a URL encoding
    technique, and transmitted using the HTTP GET method.

    The DEFLATE Encoding is used in this function.

    :param message: The message
    :param location: Where the message should be posted to
    :param relay_state: for preserving and conveying state information
    :param typ: What type of message it is SAMLRequest/SAMLResponse/SAMLart
    :param sigalg: The signature algorithm to use.
    :param key: Key to use for signing
    :return: A tuple containing header information and a HTML message.
    """

    if not isinstance(message, six.string_types):
        message = "%s" % (message,)

    _order = None
    if typ in ["SAMLRequest", "SAMLResponse"]:
        if typ == "SAMLRequest":
            _order = REQ_ORDER
        else:
            _order = RESP_ORDER
        args = {typ: deflate_and_base64_encode(message)}
    elif typ == "SAMLart":
        args = {typ: message}
    else:
        raise Exception("Unknown message type: %s" % typ)

    if relay_state:
        args["RelayState"] = relay_state

    if sigalg:
        # sigalgs, one of the ones defined in xmldsig

        args["SigAlg"] = sigalg

        try:
            signer = SIGNER_ALGS[sigalg]
        except:
            raise Unsupported("Signing algorithm")
        else:
            string = "&".join([urlencode({k: args[k]})
                               for k in _order if k in args]).encode('ascii')
            args["Signature"] = base64.b64encode(signer.sign(string, key))
            string = urlencode(args)
    else:
        string = urlencode(args)

    glue_char = "&" if urlparse(location).query else "?"
    login_url = glue_char.join([location, string])
    headers = [('Location', str(login_url))]
    body = []

    return {"headers": headers, "data": body}
Example #18
0
	def create_charge_on_stripe(self):
		headers = {"Authorization":
			"Bearer {0}".format(self.get_password(fieldname="secret_key", raise_exception=False))}
		
		data = {
			"amount": cint(flt(self.data.amount)*100),
			"currency": self.data.currency,
			"source": self.data.stripe_token_id,
			"description": self.data.description
		}
		
		redirect_to = self.data.get('redirect_to') or None
		redirect_message = self.data.get('redirect_message') or None

		try:
			resp = make_post_request(url="https://api.stripe.com/v1/charges", headers=headers, data=data)
			
			if resp.get("captured") == True:
				self.integration_request.db_set('status', 'Completed', update_modified=False)
				self.flags.status_changed_to = "Completed"

			else:
				frappe.log_error(str(resp), 'Stripe Payment not completed')

		except:
			frappe.log_error(frappe.get_traceback())
			# failed
			pass

		status = frappe.flags.integration_request.status_code

		if self.flags.status_changed_to == "Completed":
			if self.data.reference_doctype and self.data.reference_docname:
				custom_redirect_to = None
				try:
					custom_redirect_to = frappe.get_doc(self.data.reference_doctype,
						self.data.reference_docname).run_method("on_payment_authorized", self.flags.status_changed_to)
				except Exception:
					frappe.log_error(frappe.get_traceback())

				if custom_redirect_to:
					redirect_to = custom_redirect_to

			redirect_url = 'payment-success'
		else:
			redirect_url = 'payment-failed'

		if redirect_to:
			redirect_url += '?' + urlencode({'redirect_to': redirect_to})
		if redirect_message:
			redirect_url += '&' + urlencode({'redirect_message': redirect_message})

		return {
			"redirect_to": redirect_url,
			"status": status
		}
Example #19
0
	def authorize_payment(self):
		"""
		An authorization is performed when user’s payment details are successfully authenticated by the bank.
		The money is deducted from the customer’s account, but will not be transferred to the merchant’s account
		until it is explicitly captured by merchant.
		"""
		data = json.loads(self.integration_request.data)
		settings = self.get_settings(data)

		try:
			resp = make_get_request("https://api.razorpay.com/v1/payments/{0}"
				.format(self.data.razorpay_payment_id), auth=(settings.api_key,
					settings.api_secret))

			if resp.get("status") == "authorized":
				self.integration_request.update_status(data, 'Authorized')
				self.flags.status_changed_to = "Authorized"

			else:
				frappe.log_error(str(resp), 'Razorpay Payment not authorized')

		except:
			frappe.log_error(frappe.get_traceback())
			# failed
			pass

		status = frappe.flags.integration_request.status_code

		redirect_to = data.get('notes', {}).get('redirect_to') or None
		redirect_message = data.get('notes', {}).get('redirect_message') or None

		if self.flags.status_changed_to == "Authorized":
			if self.data.reference_doctype and self.data.reference_docname:
				custom_redirect_to = None
				try:
					custom_redirect_to = frappe.get_doc(self.data.reference_doctype,
						self.data.reference_docname).run_method("on_payment_authorized", self.flags.status_changed_to)
				except Exception:
					frappe.log_error(frappe.get_traceback())

				if custom_redirect_to:
					redirect_to = custom_redirect_to

			redirect_url = 'payment-success'
		else:
			redirect_url = 'payment-failed'

		if redirect_to:
			redirect_url += '?' + urlencode({'redirect_to': redirect_to})
		if redirect_message:
			redirect_url += '&' + urlencode({'redirect_message': redirect_message})

		return {
			"redirect_to": redirect_url,
			"status": status
		}
Example #20
0
def confirm_payment(token):
	try:
		redirect = True
		status_changed_to, redirect_to = None, None

		doc = frappe.get_doc("PayPal Settings")
		doc.setup_sandbox_env(token)

		integration_request = frappe.get_doc("Integration Request", token)
		data = json.loads(integration_request.data)

		redirect_to = data.get('redirect_to') or None
		redirect_message = data.get('redirect_message') or None

		params, url = doc.get_paypal_params_and_url()
		params.update({
			"METHOD": "DoExpressCheckoutPayment",
			"PAYERID": data.get("payerid"),
			"TOKEN": token,
			"PAYMENTREQUEST_0_PAYMENTACTION": "SALE",
			"PAYMENTREQUEST_0_AMT": data.get("amount"),
			"PAYMENTREQUEST_0_CURRENCYCODE": data.get("currency").upper()
		})

		response = make_post_request(url, data=params)

		if response.get("ACK")[0] == "Success":
			update_integration_request_status(token, {
				"transaction_id": response.get("PAYMENTINFO_0_TRANSACTIONID")[0],
				"correlation_id": response.get("CORRELATIONID")[0]
			}, "Completed")

			if data.get("reference_doctype") and data.get("reference_docname"):
				custom_redirect_to = frappe.get_doc(data.get("reference_doctype"),
					data.get("reference_docname")).run_method("on_payment_authorized", "Completed")
				frappe.db.commit()

				if custom_redirect_to:
					redirect_to = custom_redirect_to

			redirect_url = '/integrations/payment-success'
		else:
			redirect_url = "/integrations/payment-failed"

		if redirect_to:
			redirect_url += '?' + urlencode({'redirect_to': redirect_to})
		if redirect_message:
			redirect_url += '&' + urlencode({'redirect_message': redirect_message})

		# this is done so that functions called via hooks can update flags.redirect_to
		if redirect:
			frappe.local.response["type"] = "redirect"
			frappe.local.response["location"] = get_url(redirect_url)

	except Exception:
		frappe.log_error(frappe.get_traceback())
Example #21
0
def get_plugin_url(queries):
    try:
        query = urlencode(queries)
    except UnicodeEncodeError:
        for k in queries:
            if is_unicode(queries[k]):
                queries[k] = queries[k].encode('utf-8')
        query = urlencode(queries)

    return sys.argv[0] + '?' + query
Example #22
0
    def _finish_auth_url_param(self, params):
        """
        Make the next=... URL parameter that indicates where the user should go next.

        >>> _finish_auth_url_param([('next', '/dashboard')])
        '/account/finish_auth?next=%2Fdashboard'
        """
        return urlencode({
            'next': '/account/finish_auth?{}'.format(urlencode(params))
        })
Example #23
0
    def assert_setup_flow(self):
        resp = self.client.get(self.init_path)
        assert resp.status_code == 302
        redirect = urlparse(resp['Location'])
        assert redirect.scheme == 'https'
        assert redirect.netloc == 'github.com'
        assert redirect.path == '/apps/sentry-test-app'

        # App installation ID is provided
        resp = self.client.get(u'{}?{}'.format(
            self.setup_path,
            urlencode({'installation_id': self.installation_id})
        ))

        redirect = urlparse(resp['Location'])

        assert resp.status_code == 302
        assert redirect.scheme == 'https'
        assert redirect.netloc == 'github.com'
        assert redirect.path == '/login/oauth/authorize'

        params = parse_qs(redirect.query)

        assert params['state']
        assert params['redirect_uri'] == ['http://testserver/extensions/github/setup/']
        assert params['response_type'] == ['code']
        assert params['client_id'] == ['github-client-id']

        # Compact list values into singular values, since there's only ever one.
        authorize_params = {k: v[0] for k, v in six.iteritems(params)}

        resp = self.client.get(u'{}?{}'.format(
            self.setup_path,
            urlencode({
                'code': 'oauth-code',
                'state': authorize_params['state'],
            })
        ))

        oauth_exchange = responses.calls[0]
        req_params = parse_qs(oauth_exchange.request.body)

        assert req_params['grant_type'] == ['authorization_code']
        assert req_params['code'] == ['oauth-code']
        assert req_params['redirect_uri'] == ['http://testserver/extensions/github/setup/']
        assert req_params['client_id'] == ['github-client-id']
        assert req_params['client_secret'] == ['github-client-secret']

        assert oauth_exchange.response.status_code == 200

        auth_header = responses.calls[2].request.headers['Authorization']
        assert auth_header == 'Bearer jwt_token_1'

        self.assertDialogSuccess(resp)
        return resp
Example #24
0
 def use_http_artifact(message, destination="", relay_state=""):
     if relay_state:
         query = urlencode({"SAMLart": message,
                            "RelayState": relay_state})
     else:
         query = urlencode({"SAMLart": message})
     info = {
         "data": "",
         "url": "%s?%s" % (destination, query)
     }
     return info
Example #25
0
def test_user_enable_disable(
    session,  # noqa: F811
    graph,  # noqa: F811
    users,  # noqa: F811
    user_admin_perm_to_auditors,
    user_enable_perm_to_sre,
    http_client,
    base_url,
):
    username = u"*****@*****.**"
    old_groups = sorted(get_groups(graph, username))
    headers_admin = {"X-Grouper-User": "******"}
    headers_enable = {"X-Grouper-User": "******"}
    body_preserve = urlencode({"preserve_membership": "true"})
    body_base = urlencode({})

    # disable user
    fe_url = url(base_url, "/users/{}/disable".format(username))
    resp = yield http_client.fetch(fe_url, method="POST", headers=headers_admin, body=body_base)
    assert resp.code == 200

    # Attempt to enable user, preserving groups, as user with `grouper.user.enable`.
    # Should fail due to lack of admin perm.
    fe_url = url(base_url, "/users/{}/enable".format(username))
    with pytest.raises(HTTPError):
        resp = yield http_client.fetch(
            fe_url, method="POST", headers=headers_enable, body=body_preserve
        )

    # enable user, PRESERVE groups, as a user with the correct admin permission
    fe_url = url(base_url, "/users/{}/enable".format(username))
    resp = yield http_client.fetch(
        fe_url, method="POST", headers=headers_admin, body=body_preserve
    )
    assert resp.code == 200
    graph.update_from_db(session)
    assert old_groups == sorted(get_groups(graph, username)), "nothing should be removed"

    # disable user again
    fe_url = url(base_url, "/users/{}/disable".format(username))
    resp = yield http_client.fetch(fe_url, method="POST", headers=headers_admin, body=body_base)
    assert resp.code == 200

    # Attempt to enable user, PURGE groups. Should now succeed even with
    # only the `grouper.user.enable` perm.
    fe_url = url(base_url, "/users/{}/enable".format(username))
    resp = yield http_client.fetch(fe_url, method="POST", headers=headers_enable, body=body_base)
    assert resp.code == 200

    graph.update_from_db(session)
    assert len(get_groups(graph, username)) == 0, "all group membership should be removed"
Example #26
0
File: rest.py Project: HMP1/bumps
def _request(http, verb, location, fields=None,
             body=None, mimetype=None, files=None):

    headers = {'User-Agent': 'Basic Agent'}
    if files:
        if body:
            raise TypeError("Use fields instead of body with file upload")
        # Note: this section is public domain; the old code wasn't working
        boundary = uuid.uuid4().hex
        buf = StringIO()
        for key,value in fields.items():
            buf.write(u'--%s\r\n'%boundary)
            buf.write(u'Content-Disposition: form-data; name="%s"' % key)
            buf.write(u'\r\n\r\n%s\r\n'%value)
        for key,filename in enumerate(files):
            content_type = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
            buf.write(u'--%s\r\n'%boundary)
            buf.write(u'Content-Disposition: form-data; name="file"; filename="%s"\r\n' % filename)
            buf.write(u'Content-Type: %s\r\n\r\n' % content_type)
            buf.write(open(filename,'rb').read())
            buf.write(u'\r\n')
        buf.write(u'--%s--\r\n'%boundary)
        body = buf.getvalue()
        headers['Content-Type'] = 'multipart/form-data; boundary='+boundary
        headers['Content-Length'] = str(len(body))
        #print "===== body =====\n",body
    elif body:
        if fields:
            raise TypeError("Body, if included, should encode fields directly.")
        headers['Content-Type']=mimetype
        headers['Content-Length'] = str(len(body))
    elif fields:
        if verb == "GET":
            location += u'?' + parse.urlencode(fields)
            body = u''
        else:
            headers['Content-Type'] = 'application/x-www-form-urlencoded'
            body = parse.urlencode(fields)

    #print "uri",location
    #print "body",body
    #print "headers",headers
    #print "method",verb
    try:
        response, content = http.request(location, verb,
                                         body=body, headers=headers)
    except AttributeError:
        raise IOError("Could not open "+location)

    return response, content.decode('UTF-8')
Example #27
0
def _build_request(method, path, params=None, headers=None, body=None):
    """
    Takes request parameters and returns a byte string of a valid HTTP/1.1 request.

    We really shouldn't manually build a HTTP request, and instead try to capture
    what e.g. urllib or requests would do. But that is non-trivial, so meanwhile
    we hope that our request building doesn't mask any errors.

    This code is messy, because urllib behaves rather different between Python 2
    and 3. Readability is further obstructed by the fact that Python 3.4 doesn't
    support % formatting for bytes, so we need to concat everything.
    If we run into more issues with this, the python-future library has a backport
    of Python 3's urllib.

    :param method: ASCII string of HTTP method.
    :param path: unicode string of URL path.
    :param params: List of two-tuples of bytestrings, ready for consumption for
                   urlencode. Encode to utf8 if necessary.
    :param headers: List of two-tuples ASCII strings of HTTP header, value.
    :param body: ASCII string of request body.

    ASCII string is short for a unicode string containing only ASCII characters,
    or a byte string with ASCII encoding.
    """
    if headers is None:
        headers = []
    else:
        headers = headers[:]

    if six.PY3:
        quoted_path = parse.quote(path)
        if params:
            quoted_path += '?' + parse.urlencode(params)
        quoted_path = quoted_path.encode('ascii')
    else:
        quoted_path = parse.quote(path.encode('utf8'))
        if params:
            quoted_path += b'?' + parse.urlencode(params)

    request = method.encode('ascii') + b' ' + quoted_path + b" HTTP/1.1\r\n"
    for name, value in headers:
        request += header_line(name, value)

    request += b'\r\n'

    if body:
        request += body.encode('ascii')

    return request
Example #28
0
def test_available_itags_parsing(input_itags):
    '''
    Test that the itag -> url map is successfully parsed from an API response.
    '''
    # The YouTube get_video_info API provides its output as a urlencoded
    # string. Individual keys and values inside a urlencoded string are always
    # strings.
    #
    # As such, if we didn't convert these to strings, we'd still get strings
    # back from parse_qsl (which is called inside yturl.itags_for_video). This
    # means that the return value of itags_for_video is always a string to
    # string OrderedDict, so we must convert to strings to be able to do the
    # final equality test.
    input_itags = list(map(str, input_itags))

    # In real life, the URL will obviously not be the itag as a string, but the
    # actual URL we retrieve is inconsequential to this test. We just want to
    # check that they are parsed and linked together properly as tuples.
    itag_to_url_map = collections.OrderedDict(
        (itag, itag) for itag in input_itags
    )

    # This is missing a lot of "real" keys that are returned by the YouTube API
    # inside url_encoded_fmt_stream_map, but we don't check those keys inside
    # itags_for_video, so we don't need them here.
    api_itag_map = ','.join([
        urlencode({
            'itag': itag,
            'url': itag_to_url_map[itag],
        }) for itag in input_itags
    ])

    # This is also missing a lot of keys which are, in reality, returned by the
    # YouTube API. If key references are added inside itags_for_video, the
    # relevant keys will need to be added here.
    fake_api_output = urlencode({
        'url_encoded_fmt_stream_map': api_itag_map,
        'status': 'ok',
    })

    _test_utils.mock_get_video_info_api_response(fake_api_output)
    got_itags_for_video = yturl.itags_for_video(_test_utils.VIDEO_ID)

    # dict to OrderedDict comparisons don't care about order, so if we
    # accidentally started returning a dict from itags_for_video, it's going to
    # return True even though the order actually isn't respected. As such, we
    # need to make sure the return type of itags_for_video is OrderedDict.
    assert_true(isinstance(got_itags_for_video, collections.OrderedDict))
    eq(got_itags_for_video, itag_to_url_map)
Example #29
0
    def index(self, response, result):
        params = dict(response.request.params)
        params.pop('marker', None)
        query = urlparse.urlencode(params)
        result.first = "/v2/metadefs/namespaces"
        result.schema = "/v2/schemas/metadefs/namespaces"
        if query:
            result.first = '%s?%s' % (result.first, query)
        if result.next:
            params['marker'] = result.next
            next_query = urlparse.urlencode(params)
            result.next = '/v2/metadefs/namespaces?%s' % next_query

        ns_json = json.tojson(Namespaces, result)
        response = self.__render(ns_json, response)
Example #30
0
    def save(self):
        if self._original_attrs:
            out = {}
            for k in self.attrs:
                if self.attrs[k] != self._original_attrs[k]:
                    out[k] = self.attrs[k]
            params_str = urlencode(out)
            resp, data = self._query('PUT', body=params_str)

        else:
            params_str = urlencode(self.attrs)
            resp, data = self._collection_query('POST', body=params_str)

        self._original_attrs = data
        self.attrs = data
Example #31
0
def pushover(message, **kwds):
    """
    Send a push notification with ``message`` to ``user`` using https://pushover.net/.

    Pushover is a platform for sending and receiving push notifications. On the server side, it
    provides an HTTP API for queueing messages to deliver to devices. On the device side, iOS and
    Android clients receive those push notifications, show them to the user, and store them for
    offline viewing.

    An account on https://pushover.net is required and the Pushover app must be installed on your
    phone for this function to be able to deliver messages to you.

    INPUT:

      - ``message`` - your message

      - ``user`` - the user key (not e-mail address) of your user (or you), viewable when logged
        into the Pushover dashboard. (default: ``None``)

      - ``device`` - your user's device identifier to send the message directly to that device,
        rather than all of the user's devices (default: ``None``)

      - ``title`` - your message's title, otherwise uses your app's name (default: ``None``)

      - ``url`` - a supplementary URL to show with your message (default: ``None``)

      - ``url_title`` - a title for your supplementary URL (default: ``None``)

      - ``priority`` - set to 1 to display as high-priority and bypass quiet hours, or -1 to always
        send as a quiet notification (default: ``0``)

      - ``timestamp`` - set to a unix timestamp to have your message show with a particular time,
        rather than now (default: ``None``)

      - ``sound`` - set to the name of one of the sounds supported by device clients to override the
        user's default sound choice (default: ``None``)

      - ``token`` - your application's API token (default: Sage's default App token)

    EXAMPLES::

        sage: import sage.misc.messaging
        sage: sage.misc.messaging.pushover("Hi, how are you?", user="******") # not tested

    To set default values populate ``pushover_defaults``::

        sage: sage.misc.messaging.pushover_defaults["user"] = "******"
        sage: sage.misc.messaging.pushover("Hi, how are you?") # not tested

    .. note::

        You may want to populate ``sage.misc.messaging.pushover_defaults`` with default values such
        as the default user in ``$HOME/.sage/init.sage``.
    """
    # import compatible with py2 and py3
    from six.moves import http_client as httplib
    from six.moves.urllib.parse import urlencode

    request = {"message": message}
    request.update(pushover_defaults)
    request.update(kwds)

    conn = httplib.HTTPSConnection("api.pushover.net:443")
    conn.request("POST", "/1/messages.json",
                 urlencode(request),
                 {"Content-type": "application/x-www-form-urlencoded"})
    return conn.getresponse().status == 200
Example #32
0
def github(args=None):
    port = _find_available_port()
    if not port:
        print("Github sign in requires an open port, please open port 3000.")

    # Signal when the HTTP server has started
    server_started_queue = Queue()
    # Signal when we have the access token
    access_token_queue = Queue()

    # Server that we will run in the background to accept a post-OAuth redirect from
    # the Hyperdash server which will contain the user's access token
    def start_server():
        class OAuthRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
            def do_GET(self):
                parsed_path = urlparse(self.path)
                query = parse_qs(parsed_path.query)
                access_token = query["access_token"][0] if "access_token" in query else None
                if not access_token:
                    print("Something went wrong! Please try again.")
                    sys.exit()
                print("Access token auto-detected!")
                access_token_queue.put(access_token)
                # Redirect user's browser
                self.send_response(301)
                self.send_header("Location","{}/{}".format(get_base_http_url(), "/oauth/github/success"))
                self.end_headers()
            # Silence logs
            def log_message(self, _format, *args):
                return

        server = BaseHTTPServer.HTTPServer((LOOPBACK, port), OAuthRedirectHandler)
        server_started_queue.put(True)
        server.handle_request()
    
    server_thread = Thread(target=start_server)
    # Prevent server_thread from preventing program shutdown
    server_thread.setDaemon(True)
    server_thread.start()

    url = "{}/{}".format(get_base_http_url(), GITHUB_OAUTH_START)
    auto_login_query_args = {
        "state": "client_cli_auto:{}".format(port),
    }
    auto_login_url = "{}?{}".format(url, urlencode(auto_login_query_args))
    
    # Copy
    manual_login_query_args = dict(auto_login_query_args)
    manual_login_query_args["state"] = "client_cli_manual"
    manual_login_url = "{}?{}".format(url, urlencode(manual_login_query_args))

    print("Opening browser, please wait. If something goes wrong, press CTRL+C to cancel.")
    print("\033[1m SSH'd into a remote machine, or just don't have access to a browser? Open this link in any browser and then copy/paste the provided access token: \033[4m{}\033[0m \033[0m".format(manual_login_url))

    # If the user doesn't have programatic access to a browser, then we need to give them
    # the option of opening a URL manually and copy-pasting the access token into the CLI.
    # We spin this up in a separate thread so that it doesn't block the happy path where
    # the browser is available and we're able to auto-detect the access token
    manual_entry_thread_started_queue = Queue()
    def manual_entry():
        print("Waiting for Github OAuth to complete.")
        print("If something goes wrong, press CTRL+C to cancel.")        
        manual_entry_thread_started_queue.put(True)
        access_token = get_input("Access token: ")
        access_token_queue.put(access_token)
            
    manual_entry_thread = Thread(target=manual_entry)
    # Prevent manual_entry_thread from preventing program shutdown
    manual_entry_thread.setDaemon(True)
    manual_entry_thread.start()

    # Wait until the server and manual entry threads have started before opening the
    # user's browser to prevent a race condition where the Hyperdash server
    # redirects with an access token but the Python server isn't ready yet.
    # 
    # Also, we set the timeout to ONE_YEAR_IN_SECONDS because without a timeout,
    # the .get() call on the queue can not be interrupted with CTRL+C.
    server_started_queue.get(block=True, timeout=ONE_YEAR_IN_SECONDS)
    manual_entry_thread_started_queue.get(block=True, timeout=ONE_YEAR_IN_SECONDS)
    # Blocks until browser opens, but doesn't wait for user to close it
    webbrowser.open_new_tab(auto_login_url)


    # Wait for the Hyperdash server to redirect with the access token to our embedded
    # server, or for the user to manually enter an access token. Whichever happens
    # first.
    access_token = access_token_queue.get(block=True, timeout=ONE_YEAR_IN_SECONDS)
    # Use the access token to retrieve the user's API key and store a valid
    # hyperdash.json file
    success, default_api_key = _after_access_token_login(access_token)
    if success:
        print("Successfully logged in! We also installed: {} as your default API key".format(
            default_api_key))
Example #33
0
    async def request(self,
                      method,
                      url,
                      query_params=None,
                      headers=None,
                      body=None,
                      post_params=None,
                      _preload_content=True,
                      _request_timeout=None):
        """Execute request

        :param method: http request method
        :param url: http request url
        :param query_params: query parameters in the url
        :param headers: http request headers
        :param body: request json body, for `application/json`
        :param post_params: request post parameters,
                            `application/x-www-form-urlencoded`
                            and `multipart/form-data`
        :param _preload_content: this is a non-applicable field for
                                 the AiohttpClient.
        :param _request_timeout: timeout setting for this request. If one
                                 number provided, it will be total request
                                 timeout. It can also be a pair (tuple) of
                                 (connection, read) timeouts.
        """
        method = method.upper()
        assert method in [
            'GET', 'HEAD', 'DELETE', 'POST', 'PUT', 'PATCH', 'OPTIONS'
        ]

        if post_params and body:
            raise ValueError(
                "body parameter cannot be used with post_params parameter.")

        post_params = post_params or {}
        headers = headers or {}
        timeout = _request_timeout or 5 * 60

        if 'Content-Type' not in headers:
            headers['Content-Type'] = 'application/json'

        args = {
            "method": method,
            "url": url,
            "timeout": timeout,
            "headers": headers
        }

        if query_params:
            args["url"] += '?' + urlencode(query_params)

        # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
        if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
            if re.search('json', headers['Content-Type'], re.IGNORECASE):
                if body is not None:
                    body = json.dumps(body)
                args["data"] = body
            elif headers[
                    'Content-Type'] == 'application/x-www-form-urlencoded':  # noqa: E501
                args["data"] = aiohttp.FormData(post_params)
            elif headers['Content-Type'] == 'multipart/form-data':
                # must del headers['Content-Type'], or the correct
                # Content-Type which generated by aiohttp
                del headers['Content-Type']
                data = aiohttp.FormData()
                for param in post_params:
                    k, v = param
                    if isinstance(v, tuple) and len(v) == 3:
                        data.add_field(k,
                                       value=v[1],
                                       filename=v[0],
                                       content_type=v[2])
                    else:
                        data.add_field(k, v)
                args["data"] = data

            # Pass a `bytes` parameter directly in the body to support
            # other content types than Json when `body` argument is provided
            # in serialized form
            elif isinstance(body, bytes):
                args["data"] = body
            else:
                # Cannot generate the request from given parameters
                msg = """Cannot prepare a request message for provided
                         arguments. Please check that your arguments match
                         declared content type."""
                raise ApiException(status=0, reason=msg)

        r = await self.pool_manager.request(**args)
        if _preload_content:

            data = await r.text()
            r = RESTResponse(r, data)

            # log response body
            logger.debug("response body: %s", r.data)

            if not 200 <= r.status <= 299:
                raise ApiException(http_resp=r)

        return r
Example #34
0
 def url_encode(self, data):
     return urlencode(data)
Example #35
0
def test_audit_end_to_end(session, users, groups, http_client, base_url,
                          graph):  # noqa: F811
    """ Tests an end-to-end audit cycle. """
    groupname = "audited-team"

    gary_id = users["*****@*****.**"].id

    # make everyone an auditor or global audit will have issues
    add_member(groups["auditors"], users["*****@*****.**"])
    add_member(groups["auditors"], users["*****@*****.**"])
    add_member(groups["auditors"], users["*****@*****.**"])
    add_member(groups["auditors"], users["*****@*****.**"])

    # add some users to test removal
    add_member(groups[groupname], users["*****@*****.**"])
    add_member(groups[groupname], users["*****@*****.**"])

    graph.update_from_db(session)

    # start the audit
    end_at_str = (datetime.now() + timedelta(days=10)).strftime("%m/%d/%Y")
    fe_url = url(base_url, "/audits/create")
    resp = yield http_client.fetch(
        fe_url,
        method="POST",
        body=urlencode({"ends_at": end_at_str}),
        headers={"X-Grouper-User": "******"},
    )
    assert resp.code == 200

    open_audits = get_audits(session, only_open=True).all()
    assert len(open_audits) == 4, "audits created"

    assert groupname in [x.group.name for x in open_audits
                         ], "group we expect also gets audit"

    # pull all the info we need to resolve audits, avoids detached sqlalchemy sessions
    # (DetachedInstanceError)
    MyAuditMemberInfo = NamedTuple("MyAuditMemberInfo", [("am_id", int),
                                                         ("edge_type", int),
                                                         ("edge_id", int)])
    Audit = NamedTuple(
        "Audit",
        [
            ("audit_id", int),
            ("owner_name", str),
            ("group_name", str),
            ("audit_members_infos", List[MyAuditMemberInfo]),
        ],
    )
    all_group_ids = [x.group.id for x in open_audits]
    open_audits = [
        Audit(
            x.id,
            next(iter(x.group.my_owners())),
            x.group.name,
            [
                MyAuditMemberInfo(
                    ami.audit_member_obj.id,
                    ami.audit_member_obj.edge.member_type,
                    ami.audit_member_obj.edge_id,
                ) for ami in get_group_audit_members_infos(session, x.group)
            ],
        ) for x in open_audits
    ]

    # approve everything but the one we added members to
    for one_audit in open_audits:
        fe_url = url(base_url,
                     "/audits/{}/complete".format(one_audit.audit_id))

        if one_audit.group_name == groupname:
            continue

        # blanket approval
        body = urlencode({
            "audit_{}".format(ami.am_id): "approved"
            for ami in one_audit.audit_members_infos
        })

        resp = yield http_client.fetch(
            fe_url,
            method="POST",
            body=body,
            headers={"X-Grouper-User": one_audit.owner_name})
        assert resp.code == 200

    open_audits = get_audits(session, only_open=True).all()
    assert len(open_audits) == 1, "only our test group remaining"

    one_audit = open_audits[0]
    one_audit.id

    body_dict = {}
    for ami in get_group_audit_members_infos(session, one_audit.group):
        if gary_id == ami.member_obj.id:
            # deny
            body_dict["audit_{}".format(ami.audit_member_obj.id)] = "remove"
        else:
            # approve
            body_dict["audit_{}".format(ami.audit_member_obj.id)] = "approved"

    owner_name = next(iter(one_audit.group.my_owners()))
    fe_url = url(base_url, "/audits/{}/complete".format(one_audit.id))
    resp = yield http_client.fetch(fe_url,
                                   method="POST",
                                   body=urlencode(body_dict),
                                   headers={"X-Grouper-User": owner_name})
    assert resp.code == 200

    # check all the logs
    assert len(AuditLog.get_entries(
        session, action="start_audit")) == 1, "global start is logged"
    assert (len(AuditLog.get_entries(
        session,
        action="complete_global_audit")) == 1), "global complete is logged"

    for group_id in all_group_ids:
        assert (len(
            AuditLog.get_entries(
                session,
                on_group_id=group_id,
                action="complete_audit",
                category=AuditLogCategory.audit,
            )) == 1), "complete entry for each group"

    assert (len(
        AuditLog.get_entries(session,
                             on_user_id=gary_id,
                             category=AuditLogCategory.audit)) == 1
            ), "removal AuditLog entry on user"
Example #36
0
 def build_uri(self, plural_name, **kwargs):
     uri = self.get_uri(plural_name)
     if kwargs:
         uri += '?' + urlparse.urlencode(kwargs, doseq=1)
     return uri
Example #37
0
    def get(self, request, *args, **kwargs):
        content_id = kwargs['content_id']
        if content_id is None:
            content_id = 'user/-/state/com.google/reading-list'
        base = {
            "direction": "ltr",
            "id": content_id,
            "self": [{
                "href": request.build_absolute_uri(request.path),
            }],
            "author": request.user.username,
            "updated": int(timezone.now().strftime("%s")),
            "items": [],
        }

        if content_id.startswith("feed/"):
            url = feed_url(content_id)
            feeds = request.user.feeds.filter(url=url).order_by('pk')[:1]
            if len(feeds) == 0:
                raise Http404
            feed = feeds[0]
            base.update({
                'title': feed.name,
                'description': feed.name,
            })
            try:
                unique = UniqueFeed.objects.get(url=url)
                uniques = {url: unique.link}
            except UniqueFeed.DoesNotExist:
                uniques = {url: url}
                base.update({
                    'alternate': [{
                        'href': url,
                        'type': 'text/html',
                    }],
                })
            else:
                base.update({
                    'alternate': [{
                        'href': unique.link,
                        'type': 'text/html',
                    }],
                })
                updated = unique.last_update
                if updated is None:
                    updated = timezone.now() - timedelta(days=7)
                base['updated'] = int(updated.strftime('%s'))

        elif is_stream(content_id, request.user.pk):
            uniques = get_unique_map(request.user)

            state = is_stream(content_id, request.user.pk)
            base['id'] = 'user/{0}/state/com.google/{1}'.format(
                request.user.pk, state)
            if state == 'reading-list':
                base['title'] = u"{0}'s reading list on FeedHQ".format(
                    request.user.username)

            elif state == 'kept-unread':
                base['title'] = u"{0}'s unread items on FeedHQ".format(
                    request.user.username)

            elif state == 'starred':
                base["title"] = "Starred items on FeedHQ"

            elif state in ['broadcast', 'broadcast-friends']:
                base["title"] = "Broadcast items on FeedHQ"

        elif is_label(content_id, request.user.pk):
            name = is_label(content_id, request.user.pk)
            base['title'] = u'"{0}" via {1} on FeedHQ'.format(
                name, request.user.username)
            base['id'] = u'user/{0}/label/{1}'.format(request.user.pk, name)
            uniques = get_unique_map(request.user)
        else:
            logger.info("unknown stream id",
                        stream_id=content_id,
                        request=request)
            raise exceptions.ParseError(
                "Unknown stream id: {0}".format(content_id))

        # Ordering
        # ?r=d|n last entry first (default), ?r=o oldest entry first
        ordering = ('date'
                    if request.query_params.get('r', 'd') == 'o' else '-date')

        per_page, page = bounds(n=request.query_params.get('n'),
                                c=request.query_params.get('c'))
        entries = get_es_entries(
            content_id,
            request.user,
            exclude=request.query_params.getlist('xt'),
            include=request.query_params.getlist('it'),
            limit=request.query_params.get('ot'),
            offset=request.query_params.get('nt'),
        ).aggregate('__query__').order_by(ordering.replace(
            'date', 'timestamp')).fetch(page=page,
                                        per_page=per_page,
                                        annotate=request.user)

        continuation = continuation_(
            entries['aggregations']['entries']['query']['doc_count'],
            per_page,
            page,
        )
        entries = entries['hits']
        start = max(0, (page - 1) * per_page)

        qs = {}
        if start > 0:
            qs['c'] = request.query_params['c']

        if 'output' in request.query_params:
            qs['output'] = request.query_params['output']

        if qs:
            base['self'][0]['href'] += '?{0}'.format(urlparse.urlencode(qs))

        if continuation:
            base['continuation'] = continuation

        forced = False  # Make at most 1 full refetch
        for entry in entries:
            if entry.feed.url not in uniques and not forced:
                uniques = get_unique_map(request.user, force=True)
                forced = True
            item = serialize_entry(request, entry, uniques)
            base['items'].append(item)
        return Response(base)
Example #38
0
    def request(self, method, url, query_params=None, headers=None, body=None,
                post_params=None, _preload_content=True,
                _request_timeout=None):
        """Execute Request

        :param method: http request method
        :param url: http request url
        :param query_params: query parameters in the url
        :param headers: http request headers
        :param body: request json body, for `application/json`
        :param post_params: request post parameters,
                            `application/x-www-form-urlencoded`
                            and `multipart/form-data`
        :param _preload_content: this is a non-applicable field for
                                 the AiohttpClient.
        :param _request_timeout: timeout setting for this request. If one
                                 number provided, it will be total request
                                 timeout. It can also be a pair (tuple) of
                                 (connection, read) timeouts.
        """
        method = method.upper()
        assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT',
                          'PATCH', 'OPTIONS']

        if post_params and body:
            raise ValueError(
                "body parameter cannot be used with post_params parameter."
            )

        request = httpclient.HTTPRequest(url)
        request.allow_nonstandard_methods = True
        request.ca_certs = self.ca_certs
        request.client_key = self.client_key
        request.client_cert = self.client_cert
        request.proxy_host = self.proxy_host
        request.proxy_port = self.proxy_port
        request.method = method
        if headers:
            request.headers = headers
        if 'Content-Type' not in headers:
            request.headers['Content-Type'] = 'application/json'
        request.request_timeout = _request_timeout or 5 * 60

        post_params = post_params or {}

        if query_params:
            request.url += '?' + urlencode(query_params)

        # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
        if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
            if re.search('json', headers['Content-Type'], re.IGNORECASE):
                if body:
                    body = json.dumps(body)
                request.body = body
            elif headers['Content-Type'] == 'application/x-www-form-urlencoded':  # noqa: E501
                request.body = urlencode(post_params)
            elif headers['Content-Type'] == 'multipart/form-data':
                multipart = encode_multipart_formdata(post_params)
                request.body, headers['Content-Type'] = multipart
            # Pass a `bytes` parameter directly in the body to support
            # other content types than Json when `body` argument is provided
            # in serialized form
            elif isinstance(body, bytes):
                request.body = body
            else:
                # Cannot generate the request from given parameters
                msg = """Cannot prepare a request message for provided
                         arguments. Please check that your arguments match
                         declared content type."""
                raise ApiException(status=0, reason=msg)

        r = yield self.pool_manager.fetch(request, raise_error=False)

        if _preload_content:

            r = RESTResponse(r)

            # log response body
            logger.debug("response body: %s", r.data)

        if not 200 <= r.status <= 299:
            raise ApiException(http_resp=r)

        raise tornado.gen.Return(r)
Example #39
0
    def index(self, trans, tool_id=None, data_secret=None, **kwd):
        """Manages ascynchronous connections"""

        if tool_id is None:
            return "tool_id argument is required"
        tool_id = str(tool_id)

        # redirect to main when getting no parameters
        if not kwd:
            return trans.response.send_redirect("/index")

        params = Params(kwd, sanitize=False)
        STATUS = params.STATUS
        URL = params.URL
        data_id = params.data_id

        log.debug('async dataid -> %s' % data_id)
        trans.log_event('Async dataid -> %s' % str(data_id))

        # initialize the tool
        toolbox = self.get_toolbox()
        tool = toolbox.get_tool(tool_id)
        if not tool:
            return "Tool with id %s not found" % tool_id

        #
        # we have an incoming data_id
        #
        if data_id:
            if not URL:
                return "No URL parameter was submitted for data %s" % data_id
            data = trans.sa_session.query(trans.model.HistoryDatasetAssociation).get(data_id)

            if not data:
                return "Data %s does not exist or has already been deleted" % data_id

            if STATUS == 'OK':
                key = hmac_new(trans.app.config.tool_secret, "%d:%d" % (data.id, data.history_id))
                if key != data_secret:
                    return "You do not have permission to alter data %s." % data_id
                # push the job into the queue
                data.state = data.blurb = data.states.RUNNING
                log.debug('executing tool %s' % tool.id)
                trans.log_event('Async executing tool %s' % tool.id, tool_id=tool.id)
                galaxy_url = trans.request.base + '/async/%s/%s/%s' % (tool_id, data.id, key)
                galaxy_url = params.get("GALAXY_URL", galaxy_url)
                params = dict(URL=URL, GALAXY_URL=galaxy_url, name=data.name, info=data.info, dbkey=data.dbkey, data_type=data.ext)

                # Assume there is exactly one output file possible
                TOOL_OUTPUT_TYPE = None
                for key, obj in tool.outputs.items():
                    try:
                        TOOL_OUTPUT_TYPE = obj.format
                        params[key] = data.id
                        break
                    except Exception:
                        # exclude outputs different from ToolOutput (e.g. collections) from the previous assumption
                        continue
                if TOOL_OUTPUT_TYPE is None:
                    raise Exception("Error: ToolOutput object not found")

                original_history = trans.sa_session.query(trans.app.model.History).get(data.history_id)
                tool.execute(trans, incoming=params, history=original_history)
            else:
                log.debug('async error -> %s' % STATUS)
                trans.log_event('Async error -> %s' % STATUS)
                data.state = data.blurb = jobs.JOB_ERROR
                data.info = "Error -> %s" % STATUS

            trans.sa_session.flush()

            return "Data %s with status %s received. OK" % (data_id, STATUS)
        else:
            #
            # no data_id must be parameter submission
            #
            GALAXY_TYPE = None
            if params.data_type:
                GALAXY_TYPE = params.data_type
            elif params.galaxyFileFormat == 'wig':  # this is an undocumented legacy special case
                GALAXY_TYPE = 'wig'
            elif params.GALAXY_TYPE:
                GALAXY_TYPE = params.GALAXY_TYPE
            else:
                # Assume there is exactly one output
                outputs_count = 0
                for obj in tool.outputs.values():
                    try:
                        GALAXY_TYPE = obj.format
                        outputs_count += 1
                    except Exception:
                        # exclude outputs different from ToolOutput (e.g. collections) from the previous assumption
                        # a collection object does not have the 'format' attribute, so it will throw an exception
                        continue
                if outputs_count > 1:
                    raise Exception("Error: the tool should have just one output")

            if GALAXY_TYPE is None:
                raise Exception("Error: ToolOutput object not found")

            GALAXY_NAME = params.name or params.GALAXY_NAME or '%s query' % tool.name
            GALAXY_INFO = params.info or params.GALAXY_INFO or params.galaxyDescription or ''
            GALAXY_BUILD = params.dbkey or params.GALAXY_BUILD or params.galaxyFreeze or '?'

            # data = datatypes.factory(ext=GALAXY_TYPE)()
            # data.ext   = GALAXY_TYPE
            # data.name  = GALAXY_NAME
            # data.info  = GALAXY_INFO
            # data.dbkey = GALAXY_BUILD
            # data.state = jobs.JOB_OK
            # history.datasets.add_dataset( data )

            data = trans.app.model.HistoryDatasetAssociation(create_dataset=True, sa_session=trans.sa_session, extension=GALAXY_TYPE)
            trans.app.security_agent.set_all_dataset_permissions(data.dataset, trans.app.security_agent.history_get_default_permissions(trans.history))
            data.name = GALAXY_NAME
            data.dbkey = GALAXY_BUILD
            data.info = GALAXY_INFO
            trans.sa_session.add(data)  # Need to add data to session before setting state (setting state requires that the data object is in the session, but this may change)
            data.state = data.states.NEW
            open(data.file_name, 'wb').close()  # create the file
            trans.history.add_dataset(data, genome_build=GALAXY_BUILD)
            trans.sa_session.add(trans.history)
            trans.sa_session.flush()
            trans.log_event("Added dataset %d to history %d" % (data.id, trans.history.id), tool_id=tool_id)

            try:
                key = hmac_new(trans.app.config.tool_secret, "%d:%d" % (data.id, data.history_id))
                galaxy_url = trans.request.base + '/async/%s/%s/%s' % (tool_id, data.id, key)
                params.update({'GALAXY_URL': galaxy_url})
                params.update({'data_id': data.id})

                # Use provided URL or fallback to tool action
                url = URL or tool.action
                # Does url already have query params?
                if '?' in url:
                    url_join_char = '&'
                else:
                    url_join_char = '?'
                url = "%s%s%s" % (url, url_join_char, urlencode(params.flatten()))
                log.debug("connecting to -> %s" % url)
                trans.log_event("Async connecting to -> %s" % url)
                text = requests.get(url).text.strip()
                if not text.endswith('OK'):
                    raise Exception(text)
                data.state = data.blurb = data.states.RUNNING
            except Exception as e:
                data.info = unicodify(e)
                data.state = data.blurb = data.states.ERROR

            trans.sa_session.flush()

        return trans.fill_template('root/tool_runner.mako', out_data={}, num_jobs=1, job_errors=[])
Example #40
0
    def get_play_url(media_source, play_session_id):
        log.debug("get_play_url - media_source: {0}", media_source)

        # check if strm file Container
        if media_source.get('Container') == 'strm':
            log.debug("Detected STRM Container")
            playurl, listitem_props = PlayUtils().get_strm_details(
                media_source)
            if playurl is None:
                log.debug("Error, no strm content")
                return None, None, None
            else:
                return playurl, "0", listitem_props

        # get all the options
        addon_settings = xbmcaddon.Addon()
        server = downloadUtils.get_server()
        use_https = addon_settings.getSetting('protocol') == "1"
        verify_cert = addon_settings.getSetting('verify_cert') == 'true'
        allow_direct_file_play = addon_settings.getSetting(
            'allow_direct_file_play') == 'true'

        can_direct_play = media_source["SupportsDirectPlay"]
        can_direct_stream = media_source["SupportsDirectStream"]
        can_transcode = media_source["SupportsTranscoding"]
        container = media_source["Container"]

        playurl = None
        playback_type = None

        # check if file can be directly played
        if allow_direct_file_play and can_direct_play:
            direct_path = media_source["Path"]
            direct_path = direct_path.replace("\\", "/")
            direct_path = direct_path.strip()

            # handle DVD structure
            if container == "dvd":
                direct_path = direct_path + "/VIDEO_TS/VIDEO_TS.IFO"
            elif container == "bluray":
                direct_path = direct_path + "/BDMV/index.bdmv"

            if direct_path.startswith("//"):
                direct_path = "smb://" + direct_path[2:]

            log.debug("playback_direct_path: {0}".format(direct_path))

            if xbmcvfs.exists(direct_path):
                playurl = direct_path
                playback_type = "0"

        # check if file can be direct streamed
        if can_direct_stream and playurl is None:
            item_id = media_source.get('Id')
            playurl = ("%s/Videos/%s/stream" + "?static=true" +
                       "&PlaySessionId=%s" + "&MediaSourceId=%s")
            playurl = playurl % (server, item_id, play_session_id, item_id)
            if use_https and not verify_cert:
                playurl += "|verifypeer=false"
            playback_type = "1"

        # check is file can be transcoded
        if can_transcode and playurl is None:
            item_id = media_source.get('Id')
            client_info = ClientInformation()
            device_id = client_info.get_device_id()
            user_token = downloadUtils.authenticate()
            playback_bitrate = addon_settings.getSetting(
                "force_max_stream_bitrate")
            bitrate = int(playback_bitrate) * 1000
            playback_max_width = addon_settings.getSetting(
                "playback_max_width")
            audio_codec = addon_settings.getSetting("audio_codec")
            audio_playback_bitrate = addon_settings.getSetting(
                "audio_playback_bitrate")
            audio_bitrate = int(audio_playback_bitrate) * 1000
            audio_max_channels = addon_settings.getSetting(
                "audio_max_channels")
            playback_video_force_8 = addon_settings.getSetting(
                "playback_video_force_8") == "true"

            transcode_params = {
                "MediaSourceId": item_id,
                "DeviceId": device_id,
                "PlaySessionId": play_session_id,
                "api_key": user_token,
                "SegmentContainer": "ts",
                "VideoCodec": "h264",
                "VideoBitrate": bitrate,
                "MaxWidth": playback_max_width,
                "AudioCodec": audio_codec,
                "TranscodingMaxAudioChannels": audio_max_channels,
                "AudioBitrate": audio_bitrate
            }
            if playback_video_force_8:
                transcode_params.update({"MaxVideoBitDepth": "8"})

            transcode_path = urlencode(transcode_params)

            playurl = "%s/Videos/%s/master.m3u8?%s" % (server, item_id,
                                                       transcode_path)

            if use_https and not verify_cert:
                playurl += "|verifypeer=false"

            playback_type = "2"

        return playurl, playback_type, []
Example #41
0
def single_urlencode(text):
    # urlencode needs a utf- string
    text = urlencode({'blahblahblah': text.encode('utf-8')})
    text = text[13:]
    return text.decode('utf-8')  # return the result again as unicode
Example #42
0
 def add_data(self, data):
     """Provide data to be sent with POST."""
     urllib.request.Request.add_data(self, urlencode(data))
Example #43
0
    def request(self,
                method,
                url,
                query_params=None,
                headers=None,
                body=None,
                post_params=None,
                _preload_content=True,
                _request_timeout=None):
        """Perform requests.

        :param method: http request method
        :param url: http request url
        :param query_params: query parameters in the url
        :param headers: http request headers
        :param body: request json body, for `application/json`
        :param post_params: request post parameters,
                            `application/x-www-form-urlencoded`
                            and `multipart/form-data`
        :param _preload_content: if False, the urllib3.HTTPResponse object will
                                 be returned without reading/decoding response
                                 data. Default is True.
        :param _request_timeout: timeout setting for this request. If one
                                 number provided, it will be total request
                                 timeout. It can also be a pair (tuple) of
                                 (connection, read) timeouts.
        """
        method = method.upper()
        assert method in [
            'GET', 'HEAD', 'DELETE', 'POST', 'PUT', 'PATCH', 'OPTIONS'
        ]

        if post_params and body:
            raise ApiValueError(
                "body parameter cannot be used with post_params parameter.")

        post_params = post_params or {}
        headers = headers or {}

        timeout = None
        if _request_timeout:
            if isinstance(_request_timeout, (int, ) if six.PY3 else
                          (int, long)):  # noqa: E501,F821
                timeout = urllib3.Timeout(total=_request_timeout)
            elif (isinstance(_request_timeout, tuple)
                  and len(_request_timeout) == 2):
                timeout = urllib3.Timeout(connect=_request_timeout[0],
                                          read=_request_timeout[1])

        if 'Content-Type' not in headers:
            headers['Content-Type'] = 'application/json'

        try:
            # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
            if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
                if query_params:
                    url += '?' + urlencode(query_params)
                if re.search('json', headers['Content-Type'], re.IGNORECASE):
                    request_body = None
                    if body is not None:
                        request_body = json.dumps(body)
                    r = self.pool_manager.request(
                        method,
                        url,
                        body=request_body,
                        preload_content=_preload_content,
                        timeout=timeout,
                        headers=headers)
                elif headers[
                        'Content-Type'] == 'application/x-www-form-urlencoded':  # noqa: E501
                    r = self.pool_manager.request(
                        method,
                        url,
                        fields=post_params,
                        encode_multipart=False,
                        preload_content=_preload_content,
                        timeout=timeout,
                        headers=headers)
                elif headers['Content-Type'] == 'multipart/form-data':
                    # must del headers['Content-Type'], or the correct
                    # Content-Type which generated by urllib3 will be
                    # overwritten.
                    del headers['Content-Type']
                    r = self.pool_manager.request(
                        method,
                        url,
                        fields=post_params,
                        encode_multipart=True,
                        preload_content=_preload_content,
                        timeout=timeout,
                        headers=headers)
                # Pass a `string` parameter directly in the body to support
                # other content types than Json when `body` argument is
                # provided in serialized form
                elif isinstance(body, str) or isinstance(body, bytes):
                    request_body = body
                    r = self.pool_manager.request(
                        method,
                        url,
                        body=request_body,
                        preload_content=_preload_content,
                        timeout=timeout,
                        headers=headers)
                else:
                    # Cannot generate the request from given parameters
                    msg = """Cannot prepare a request message for provided
                             arguments. Please check that your arguments match
                             declared content type."""
                    raise ApiException(status=0, reason=msg)
            # For `GET`, `HEAD`
            else:
                r = self.pool_manager.request(method,
                                              url,
                                              fields=query_params,
                                              preload_content=_preload_content,
                                              timeout=timeout,
                                              headers=headers)
        except urllib3.exceptions.SSLError as e:
            msg = "{0}\n{1}".format(type(e).__name__, str(e))
            raise ApiException(status=0, reason=msg)

        if _preload_content:
            r = RESTResponse(r)

            # In the python 3, the response.data is bytes.
            # we need to decode it to string.
            if six.PY3:
                r.data = r.data.decode('utf8')

            # log response body
            logger.debug("response body: %s", r.data)

        if not 200 <= r.status <= 299:
            raise ApiException(http_resp=r)

        return r
Example #44
0
def prepare_query_string(params):
    """Convert dict params to query string"""
    params = sorted(params.items(), key=lambda x: x[0])
    return '?%s' % parse.urlencode(params) if params else ''
Example #45
0
    def request(self, method='GET', path='/', data=None, flags=None, params=None, headers=None, files=None,
                trailing=None):
        """

        :param method:
        :param path:
        :param data:
        :param flags:
        :param params:
        :param headers:
        :param files:
        :param trailing: bool
        :return:
        """
        self.log_curl_debug(method=method, path=path, headers=headers, data=data, trailing=None)
        url = self.url_joiner(self.url, path, trailing)
        if params or flags:
            url += '?'
        if params:
            url += urlencode(params or {})
        if flags:
            url += ('&' if params else '') + '&'.join(flags or [])
        if files is None:
            data = json.dumps(data)

        headers = headers or self.default_headers
        response = self._session.request(
            method=method,
            url=url,
            headers=headers,
            data=data,
            timeout=self.timeout,
            verify=self.verify_ssl,
            files=files
        )
        try:
            if response.text:
                response_content = response.json()
            else:
                response_content = response.content
        except ValueError:
            response_content = response.content
        if response.status_code == 200:
            log.debug('Received: {0}\n {1}'.format(response.status_code, response_content))
        elif response.status_code == 201:
            log.debug('Received: {0}\n "Created" response'.format(response.status_code))
        elif response.status_code == 204:
            log.debug('Received: {0}\n "No Content" response'.format(response.status_code))
        elif response.status_code == 400:
            log.error('Received: {0}\n Bad request \n'.format(response.status_code, response_content))
        elif response.status_code == 401:
            log.error('Received: {0}\n "UNAUTHORIZED" response'.format(response.status_code))
        elif response.status_code == 404:
            log.error('Received: {0}\n Not Found'.format(response.status_code))
        elif response.status_code == 403:
            log.error('Received: {0}\n Forbidden. Please, check permissions'.format(response.status_code))
        elif response.status_code == 409:
            log.error('Received: {0}\n Conflict \n '.format(response.status_code, response_content))
        elif response.status_code == 413:
            log.error('Received: {0}\n Request entity too large'.format(response.status_code))
        else:
            log.debug('Received: {0}\n {1}'.format(response.status_code, response))
            self.log_curl_debug(method=method, path=path, headers=headers, data=data, level=logging.DEBUG)
            log.error(response_content)
            try:
                response.raise_for_status()
            except requests.exceptions.HTTPError as err:
                log.error("HTTP Error occurred")
                log.error('Response is: {content}'.format(content=err.response.content))
        return response
 def list(self, **kwargs):
     """Get a list of software deployments.
     :rtype: list of :class:`SoftwareDeployment`
     """
     url = '/software_deployments?%s' % parse.urlencode(kwargs)
     return self._list(url, "software_deployments")
Example #47
0
    def to_querystring(self):
        """Produce a querystring of the form expected by
        GET /allocation_candidates.
        """
        # TODO(gibi): We have a RequestGroup OVO so we can move this to that
        # class as a member function.
        # NOTE(efried): The sorting herein is not necessary for the API; it is
        # to make testing easier and logging/debugging predictable.
        def to_queryparams(request_group, suffix):
            res = request_group.resources
            required_traits = request_group.required_traits
            forbidden_traits = request_group.forbidden_traits
            aggregates = request_group.aggregates

            resource_query = ",".join(
                sorted("%s:%s" % (rc, amount)
                       for (rc, amount) in res.items()))
            qs_params = [('resources%s' % suffix, resource_query)]

            # Assemble required and forbidden traits, allowing for either/both
            # to be empty.
            required_val = ','.join(
                sorted(required_traits) +
                ['!%s' % ft for ft in sorted(forbidden_traits)])
            if required_val:
                qs_params.append(('required%s' % suffix, required_val))
            if aggregates:
                aggs = []
                # member_ofN is a list of lists.  We need a tuple of
                # ('member_ofN', 'in:uuid,uuid,...') for each inner list.
                for agglist in aggregates:
                    aggs.append(('member_of%s' % suffix,
                                 'in:' + ','.join(sorted(agglist))))
                qs_params.extend(sorted(aggs))
            return qs_params

        if self._limit is not None:
            qparams = [('limit', self._limit)]
        else:
            qparams = []
        if self._group_policy is not None:
            qparams.append(('group_policy', self._group_policy))
        nr_of_numbered_groups = 0
        for ident, rg in self._rg_by_id.items():
            # [('resourcesN', 'rclass:amount,rclass:amount,...'),
            #  ('requiredN', 'trait_name,!trait_name,...'),
            #  ('member_ofN', 'in:uuid,uuid,...'),
            #  ('member_ofN', 'in:uuid,uuid,...')]
            qparams.extend(to_queryparams(rg, ident or ''))
            if ident:
                nr_of_numbered_groups += 1
        if nr_of_numbered_groups >= 2 and not self._group_policy:
            # we know this will fail in placement so help the troubleshooting
            LOG.warning(
                "There is more than one numbered request group in the "
                "allocation candidate query but the flavor did not specify "
                "any group policy. This query will fail in placement due to "
                "the missing group policy. If you specified more than one "
                "numbered request group in the flavor extra_spec or booted "
                "with more than one neutron port that has resource request "
                "(i.e. the port has a QoS minimum bandwidth policy rule "
                "attached) then you have to specify the group policy in the "
                "flavor extra_spec. If it is OK to let these groups be "
                "satisfied by overlapping resource providers then use "
                "'group_policy': 'None'. If you want each group to be "
                "satisfied from a separate resource provider then use "
                "'group_policy': 'isolate'.")
        return parse.urlencode(sorted(qparams))
Example #48
0
def get_signed_url(rse_id, service, operation, url, lifetime=600):
    """
    Get a signed URL for a particular service and operation.

    The signed URL will be valid for 1 hour but can be overriden.

    :param rse_id: The ID of the RSE that the URL points to.
    :param service: The service to authorise, either 'gcs', 's3' or 'swift'.
    :param operation: The operation to sign, either 'read', 'write', or 'delete'.
    :param url: The URL to sign.
    :param lifetime: Lifetime of the signed URL in seconds.
    :returns: Signed URL as a variable-length string.
    """

    global CREDS_GCS

    if service not in ['gcs', 's3', 'swift']:
        raise UnsupportedOperation('Service must be "gcs", "s3" or "swift"')

    if operation not in ['read', 'write', 'delete']:
        raise UnsupportedOperation('Operation must be "read", "write", or "delete"')

    if url is None or url == '':
        raise UnsupportedOperation('URL must not be empty')

    if lifetime:
        if not isinstance(lifetime, integer_types):
            try:
                lifetime = int(lifetime)
            except:
                raise UnsupportedOperation('Lifetime must be convertible to numeric.')

    signed_url = None
    if service == 'gcs':
        if not CREDS_GCS:
            CREDS_GCS = ServiceAccountCredentials.from_json_keyfile_name(config_get('credentials', 'gcs',
                                                                                    raise_exception=False,
                                                                                    default='/opt/rucio/etc/google-cloud-storage-test.json'))

        # select the correct operation
        operations = {'read': 'GET', 'write': 'PUT', 'delete': 'DELETE'}
        operation = operations[operation]

        # special case to test signature, force epoch time
        if lifetime is None:
            lifetime = 0
        else:
            # GCS is timezone-sensitive, don't use UTC
            # has to be converted to Unixtime
            lifetime = datetime.datetime.now() + datetime.timedelta(seconds=lifetime)
            lifetime = int(time.mktime(lifetime.timetuple()))

        # sign the path only
        path = urlparse(url).path

        # assemble message to sign
        to_sign = "%s\n\n\n%s\n%s" % (operation, lifetime, path)

        # create URL-capable signature
        # first character is always a '=', remove it
        signature = urlencode({'': base64.b64encode(CREDS_GCS.sign_blob(to_sign)[1])})[1:]

        # assemble final signed URL
        signed_url = 'https://storage.googleapis.com:443%s?GoogleAccessId=%s&Expires=%s&Signature=%s' % (path,
                                                                                                         CREDS_GCS.service_account_email,
                                                                                                         lifetime,
                                                                                                         signature)

    elif service == 's3':
        # split URL to get hostname, bucket and key
        components = urlparse(url)
        host = components.netloc
        pathcomponents = components.path.split('/')
        if len(pathcomponents) < 3:
            raise UnsupportedOperation('Not a valid S3 URL')
        bucket = pathcomponents[1]
        key = '/'.join(pathcomponents[2:])

        # remove port number from host if present
        colon = host.find(':')
        port = '443'
        if colon >= 0:
            port = host[colon + 1:]
            host = host[:colon]

        # look up in RSE account configuration by RSE ID
        cred_name = rse_id
        cred = REGION.get('s3-%s' % cred_name)
        if cred is NO_VALUE:
            rse_cred = get_rse_credentials()
            cred = rse_cred.get(cred_name)
            REGION.set('s3-%s' % cred_name, cred)
        access_key = cred['access_key']
        secret_key = cred['secret_key']
        signature_version = cred['signature_version']
        region_name = cred['region']

        if operation == 'read':
            s3op = 'get_object'
        elif operation == 'write':
            s3op = 'put_object'
        else:
            s3op = 'delete_object'

        with record_timer_block('credential.signs3'):
            s3 = boto3.client('s3', endpoint_url='https://' + host + ':' + port, aws_access_key_id=access_key, aws_secret_access_key=secret_key, config=Config(signature_version=signature_version, region_name=region_name))

            signed_url = s3.generate_presigned_url(s3op, Params={'Bucket': bucket, 'Key': key}, ExpiresIn=lifetime)

    elif service == 'swift':
        # split URL to get hostname and path
        components = urlparse(url)
        host = components.netloc

        # remove port number from host if present
        colon = host.find(':')
        if colon >= 0:
            host = host[:colon]

        # use RSE ID to look up key
        cred_name = rse_id

        # look up tempurl signing key
        cred = REGION.get('swift-%s' % cred_name)
        if cred is NO_VALUE:
            rse_cred = get_rse_credentials()
            cred = rse_cred.get(cred_name)
            REGION.set('swift-%s' % cred_name, cred)
        tempurl_key = cred['tempurl_key']

        if operation == 'read':
            swiftop = 'GET'
        elif operation == 'write':
            swiftop = 'PUT'
        else:
            swiftop = 'DELETE'

        expires = int(time.time() + lifetime)

        # create signed URL
        with record_timer_block('credential.signswift'):
            hmac_body = u'%s\n%s\n%s' % (swiftop, expires, components.path)
            # Python 3 hmac only accepts bytes or bytearray
            sig = hmac.new(bytearray(tempurl_key, 'utf-8'), bytearray(hmac_body, 'utf-8'), sha1).hexdigest()
            signed_url = 'https://' + host + components.path + '?temp_url_sig=' + sig + '&temp_url_expires=' + str(expires)

    return signed_url
Example #49
0
    def start_requests(self):
        params = parse.urlencode(self._request_params)
        uri = self._API_URL_TPL.format(method=self._API_METHOD, params=params)

        yield Request(uri, callback=self.retrieve_positions)
Example #50
0
        def execute(self):
            self.api.cached_result = False

            # Build the request URL
            url = self.api_root + self.path
            full_url = 'https://' + self.host + url

            # Query the cache if one is available
            # and this request uses a GET method.
            if self.use_cache and self.api.cache and self.method == 'GET':
                cache_result = self.api.cache.get(
                    '%s?%s' % (url, urlencode(self.session.params)))
                # if cache result found and not expired, return it
                if cache_result:
                    # must restore api reference
                    if isinstance(cache_result, list):
                        for result in cache_result:
                            if isinstance(result, Model):
                                result._api = self.api
                    else:
                        if isinstance(cache_result, Model):
                            cache_result._api = self.api
                    self.api.cached_result = True
                    return cache_result

            # Continue attempting request until successful
            # or maximum number of retries is reached.
            retries_performed = 0
            while retries_performed < self.retry_count + 1:
                # handle running out of api calls
                if self.wait_on_rate_limit:
                    if self._reset_time is not None:
                        if self._remaining_calls is not None:
                            if self._remaining_calls < 1:
                                sleep_time = self._reset_time - int(
                                    time.time())
                                if sleep_time > 0:
                                    if self.wait_on_rate_limit_notify:
                                        log.warning(
                                            "Rate limit reached. Sleeping for: %d"
                                            % sleep_time)
                                    time.sleep(sleep_time +
                                               5)  # sleep for few extra sec

                # if self.wait_on_rate_limit and self._reset_time is not None and \
                #                 self._remaining_calls is not None and self._remaining_calls < 1:
                #     sleep_time = self._reset_time - int(time.time())
                #     if sleep_time > 0:
                #         if self.wait_on_rate_limit_notify:
                #             log.warning("Rate limit reached. Sleeping for: %d" % sleep_time)
                #         time.sleep(sleep_time + 5)  # sleep for few extra sec

                # Apply authentication
                auth = None
                if self.api.auth:
                    auth = self.api.auth.apply_auth()

                # Request compression if configured
                if self.api.compression:
                    self.session.headers['Accept-encoding'] = 'gzip'

                # Execute request
                try:
                    resp = self.session.request(self.method,
                                                full_url,
                                                data=self.post_data,
                                                json=self.json_payload,
                                                timeout=self.api.timeout,
                                                auth=auth,
                                                proxies=self.api.proxy)
                except Exception as e:
                    six.reraise(TweepError,
                                TweepError('Failed to send request: %s' % e),
                                sys.exc_info()[2])

                rem_calls = resp.headers.get('x-rate-limit-remaining')

                if rem_calls is not None:
                    self._remaining_calls = int(rem_calls)
                elif isinstance(self._remaining_calls, int):
                    self._remaining_calls -= 1
                reset_time = resp.headers.get('x-rate-limit-reset')
                if reset_time is not None:
                    self._reset_time = int(reset_time)
                if self.wait_on_rate_limit and self._remaining_calls == 0 and (
                        # if ran out of calls before waiting switching retry last call
                        resp.status_code == 429 or resp.status_code == 420):
                    continue
                retry_delay = self.retry_delay
                # Exit request loop if non-retry error code
                if resp.status_code in (200, 204):
                    break
                elif (resp.status_code == 429
                      or resp.status_code == 420) and self.wait_on_rate_limit:
                    if 'retry-after' in resp.headers:
                        retry_delay = float(resp.headers['retry-after'])
                elif self.retry_errors and resp.status_code not in self.retry_errors:
                    break

                # Sleep before retrying request again
                time.sleep(retry_delay)
                retries_performed += 1

            # If an error was returned, throw an exception
            self.api.last_response = resp
            if resp.status_code and not 200 <= resp.status_code < 300:
                try:
                    error_msg, api_error_code = \
                        self.parser.parse_error(resp.text)
                except Exception:
                    error_msg = "Twitter error response: status code = %s" % resp.status_code
                    api_error_code = None

                if is_rate_limit_error_message(error_msg):
                    raise RateLimitError(error_msg, resp)
                else:
                    raise TweepError(error_msg, resp, api_code=api_error_code)

            # Parse the response payload
            self.return_cursors = self.return_cursors or 'cursor' in self.session.params
            result = self.parser.parse(self,
                                       resp.text,
                                       return_cursors=self.return_cursors)

            # Store result into cache if one is available.
            if self.use_cache and self.api.cache and self.method == 'GET' and result:
                self.api.cache.store(
                    '%s?%s' % (url, urlencode(self.session.params)), result)

            return result
Example #51
0
 def build_url(self, bucket, key, qs):
     url = '/buckets/%s/keys/%s' % tuple(map(quote_plus, (bucket, key)))
     if qs:
         url += '?' + urlencode(qs)
     return url
Example #52
0
 def uri(self, path, params=None):
     return '{0}{1}{2}'.format(self.base_uri, path,
                               params and '?' + urlencode(params) or '')
Example #53
0
    def test_environment_filtering(self):
        user = self.create_user()
        org = self.create_organization()
        team = self.create_team(organization=org)
        self.create_member(organization=org, user=user, teams=[team])

        self.login_as(user=user)

        project = self.create_project(organization=org, teams=[team])
        environment = self.create_environment(project=project,
                                              name="production")
        environment2 = self.create_environment(project=project)
        null_env = self.create_environment(project=project, name='')
        group = self.create_group(project=project)

        event_1 = self.create_event('a' * 32,
                                    group=group,
                                    datetime=self.min_ago,
                                    tags={'environment': environment.name})
        event_2 = self.create_event('b' * 32,
                                    group=group,
                                    datetime=self.min_ago,
                                    tags={'environment': environment.name})
        event_3 = self.create_event('c' * 32,
                                    group=group,
                                    datetime=self.min_ago,
                                    tags={'environment': environment2.name})
        event_4 = self.create_event('d' * 32,
                                    group=group,
                                    datetime=self.min_ago,
                                    tags={'environment': None})

        base_url = reverse('sentry-api-0-organization-events',
                           kwargs={
                               'organization_slug': org.slug,
                           })

        # test as part of query param
        url = '%s?environment=%s' % (base_url, environment.name)
        response = self.client.get(url, format='json')

        assert response.status_code == 200, response.content
        assert len(response.data) == 2
        self.assert_events_in_response(response,
                                       [event_1.event_id, event_2.event_id])

        # test multiple as part of query param
        url = '%s?%s' % (base_url,
                         urlencode((
                             ('environment', environment.name),
                             ('environment', environment2.name),
                         )))
        response = self.client.get(url, format='json')

        assert response.status_code == 200, response.content
        assert len(response.data) == 3
        self.assert_events_in_response(
            response, [event_1.event_id, event_2.event_id, event_3.event_id])

        # test multiple as part of query param with no env
        url = '%s?%s' % (base_url,
                         urlencode((
                             ('environment', environment.name),
                             ('environment', null_env.name),
                         )))
        response = self.client.get(url, format='json')

        assert response.status_code == 200, response.content
        assert len(response.data) == 3
        self.assert_events_in_response(
            response, [event_1.event_id, event_2.event_id, event_4.event_id])

        # test as part of search
        url = '%s?query=environment:%s' % (base_url, environment.name)
        response = self.client.get(url, format='json')

        assert response.status_code == 200, response.content
        assert len(response.data) == 2
        self.assert_events_in_response(response,
                                       [event_1.event_id, event_2.event_id])

        # test as part of search - no environment
        url = '%s?query=environment:""' % (base_url, )
        response = self.client.get(url, format='json')

        assert response.status_code == 200, response.content
        assert len(response.data) == 1
        self.assert_events_in_response(response, [event_4.event_id])

        # test nonexistent environment
        url = '%s?environment=notanenvironment' % (base_url, )
        response = self.client.get(url, format='json')
        assert response.status_code == 404
Example #54
0
 def _full_query_with_params(self, params):
     params = ("&" + urlencode(params)) if params else ""
     return params
Example #55
0
def encode_state(params):
    return parse.urlencode(params)
Example #56
0
 def build_url(self, path, format="html"):
     return u"{}?{}".format(path, urlencode({"format": format, "seed": b"123"}))
Example #57
0
def _http_response(host,
                   port,
                   method,
                   cmd,
                   data=None,
                   timeout=None,
                   pars=[],
                   hdrs={}):

    # Prepare all headers that need to be sent
    hdrs = dict(hdrs)

    # In python 3.6 the http.client module changed how it uses the body of a
    # request to automatically calculate the Content-Length header, if none has
    # been previously specified.
    # In particular, file objects before 3.6 were previously automatically
    # handled by calling fstat(f).st_size on them. In 3.6 they now do not yield
    # a Content-Length header, but instead are they are sent using chunked
    # transfer encoding, which we do not support explicitly on the server side)
    #
    # In several places throughout the code we trusted on the pre-3.6 rules,
    # so here we exercise them manually for 3.6+
    if (data is not None and sys.version_info >= (3, 6, 0)
            and 'content-length' not in hdrs and 'Content-Length' not in hdrs):
        try:
            thelen = len(data)
        except (TypeError, AttributeError):
            try:
                thelen = os.fstat(data.fileno()).st_size
            except (AttributeError, OSError):
                thelen = None
        if thelen is not None:
            hdrs['Content-Length'] = thelen

    url = cmd
    if pars:
        # urlib.urlencode expects tuple elements (if pars is a list)
        if not hasattr(pars, 'items'):
            pars = [(p[0], p[1]) for p in pars]
        pars = urlparse.urlencode(pars)
        url += '?' + pars

    # Go, go, go!
    logger.info("About to %s to %s:%d/%s", method, host, port, url)
    conn = httplib.HTTPConnection(host, port, timeout=timeout)
    _connect(conn)

    try:
        conn.request(method, url, body=data, headers=hdrs)
        logger.debug("%s request sent to, waiting for a response", method)
    except socket.error as e:

        # If the server closes the connection while we write data
        # we still try to read the response, if any
        #
        # In OSX >= 10.10 this error can come up as EPROTOTYPE instead of EPIPE
        # (although the error code is not mentioned in send(2)). The actual
        # error recognised by the kernel in this situation is slightly different,
        # but still due to remote end closing the connection. For a full, nice
        # explanation of this see:
        #
        # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
        tolerate = e.errno in (errno.EPROTOTYPE, errno.EPIPE)
        if not tolerate:
            try:
                conn.close()
            except:
                pass
            raise

    start = time.time()
    response = conn.getresponse()
    logger.debug("Response to %s request received within %.4f [s]", method,
                 time.time() - start)

    return response
Example #58
0
 def _post_body(self, params):
     return urlencode(params)
Example #59
0
    def authorize_payment(self):
        """
		An authorization is performed when user’s payment details are successfully authenticated by the bank.
		The money is deducted from the customer’s account, but will not be transferred to the merchant’s account
		until it is explicitly captured by merchant.
		"""
        data = json.loads(self.integration_request.data)
        settings = self.get_settings(data)

        try:
            resp = make_get_request(
                "https://api.razorpay.com/v1/payments/{0}".format(
                    self.data.razorpay_payment_id),
                auth=(settings.api_key, settings.api_secret))

            if resp.get("status") == "authorized":
                self.integration_request.update_status(data, 'Authorized')
                self.flags.status_changed_to = "Authorized"

            elif data.get('subscription_id'):
                if resp.get("status") == "refunded":
                    # if subscription start date is in future then
                    # razorpay refunds the amount after authorizing the card details
                    # thus changing status to Verified

                    self.integration_request.update_status(data, 'Completed')
                    self.flags.status_changed_to = "Verified"

                if resp.get("status") == "captured":
                    # if subscription starts immediately then
                    # razorpay charge the actual amount
                    # thus changing status to Completed

                    self.integration_request.update_status(data, 'Completed')
                    self.flags.status_changed_to = "Completed"

            else:
                frappe.log_error(str(resp), 'Razorpay Payment not authorized')

        except:
            frappe.log_error(frappe.get_traceback())
            # failed
            pass

        status = frappe.flags.integration_request.status_code

        redirect_to = data.get('notes', {}).get('redirect_to') or None
        redirect_message = data.get('notes',
                                    {}).get('redirect_message') or None

        if self.flags.status_changed_to in ("Authorized", "Verified",
                                            "Completed"):
            if self.data.reference_doctype and self.data.reference_docname:
                custom_redirect_to = None
                try:
                    frappe.flags.data = data
                    custom_redirect_to = frappe.get_doc(
                        self.data.reference_doctype,
                        self.data.reference_docname).run_method(
                            "on_payment_authorized",
                            self.flags.status_changed_to)

                except Exception:
                    frappe.log_error(frappe.get_traceback())

                if custom_redirect_to:
                    redirect_to = custom_redirect_to

            redirect_url = 'payment-success?doctype={0}&docname={1}'.format(
                self.data.reference_doctype, self.data.reference_docname)
        else:
            redirect_url = 'payment-failed'

        if redirect_to:
            redirect_url += '&' + urlencode({'redirect_to': redirect_to})
        if redirect_message:
            redirect_url += '&' + urlencode(
                {'redirect_message': redirect_message})

        return {"redirect_to": redirect_url, "status": status}
def exploitability_report(request, default_context=None):
    context = default_context or {}

    if not request.GET.get('product'):
        url = reverse('exploitability:report')
        url += '?' + urlencode({'product': settings.DEFAULT_PRODUCT})
        return redirect(url)

    form = ExploitabilityReportForm(
        request.GET,
        active_versions=context['active_versions'],
    )
    if not form.is_valid():
        return http.HttpResponseBadRequest(str(form.errors))

    product = form.cleaned_data['product']
    version = form.cleaned_data['version']

    api = SuperSearchUnredacted()
    params = {
        'product': product,
        'version': version,
        '_results_number': 0,
        # This aggregates on crashes that do NOT contain these
        # key words. For example, if a crash has
        # {'exploitability': 'error: unable to analyze dump'}
        # then it won't get included.
        'exploitability': ['!error', '!interesting'],
        '_aggs.signature': 'exploitability',
        '_facets_size': settings.EXPLOITABILITY_BATCH_SIZE,
    }
    results = api.get(**params)

    base_signature_report_dict = {
        'product': product,
    }
    if version:
        base_signature_report_dict['version'] = version

    crashes = []
    categories = ('high', 'none', 'low', 'medium', 'null')
    for signature_facet in results['facets']['signature']:
        # this 'signature_facet' will look something like this:
        #
        #  {
        #      'count': 1234,
        #      'term': 'My | Signature',
        #      'facets': {
        #          'exploitability': [
        #              {'count': 1, 'term': 'high'},
        #              {'count': 23, 'term': 'medium'},
        #              {'count': 11, 'term': 'other'},
        #
        # And we only want to include those where:
        #
        #   low or medium or high are greater than 0
        #

        exploitability = signature_facet['facets']['exploitability']
        if not any(x['count'] for x in exploitability
                   if x['term'] in ('high', 'medium', 'low')):
            continue
        crash = {
            'bugs': [],
            'signature':
            signature_facet['term'],
            'high_count':
            0,
            'medium_count':
            0,
            'low_count':
            0,
            'none_count':
            0,
            'url': (reverse('signature:signature_report') + '?' + urlencode(
                dict(base_signature_report_dict,
                     signature=signature_facet['term']))),
        }
        for cluster in exploitability:
            if cluster['term'] in categories:
                crash['{}_count'.format(cluster['term'])] = (cluster['count'])
        crash['med_or_high'] = (crash.get('high_count', 0) +
                                crash.get('medium_count', 0))
        crashes.append(crash)

    # Sort by the 'med_or_high' key first (descending),
    # and by the signature second (ascending).
    crashes.sort(key=lambda x: (-x['med_or_high'], x['signature']))

    # now, let's go back and fill in the bugs
    signatures = [x['signature'] for x in crashes]
    if signatures:
        qs = (models.BugAssociation.objects.filter(
            signature__in=signatures).values('bug_id', 'signature').order_by(
                '-bug_id', 'signature'))
        bugs = defaultdict(list)
        for item in qs:
            bugs[item['signature']].append(item['bug_id'])

        for crash in crashes:
            crash['bugs'] = bugs.get(crash['signature'], [])

    context['crashes'] = crashes
    context['product'] = product
    context['version'] = version
    context['report'] = 'exploitable'

    return render(request, 'exploitability/report.html', context)