Exemple #1
0
def add_preserved_filters(context, url, popup=False):
    opts = context.get('opts')
    preserved_filters = context.get('preserved_filters')

    parsed_url = list(urlparse(url))
    parsed_qs = dict(parse_qsl(parsed_url[4]))
    merged_qs = dict()

    if opts and preserved_filters:
        preserved_filters = dict(parse_qsl(preserved_filters))

        try:
            match = resolve(url)
        except Resolver404:
            pass
        else:
            current_url = '%s:%s' % (match.namespace, match.url_name)
            changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name)
            if changelist_url == current_url and '_changelist_filters' in preserved_filters:
                preserved_filters = dict(parse_qsl(preserved_filters['_changelist_filters']))

        merged_qs.update(preserved_filters)

    if popup:
        from django.contrib.admin.options import IS_POPUP_VAR
        merged_qs[IS_POPUP_VAR] = 1

    merged_qs.update(parsed_qs)

    parsed_url[4] = urlencode(merged_qs)
    return urlunparse(parsed_url)
Exemple #2
0
def twitter():
    request_token_url = 'https://api.twitter.com/oauth/request_token'
    access_token_url = 'https://api.twitter.com/oauth/access_token'

    if request.json.get('oauth_token') and request.json.get('oauth_verifier'):
        auth = OAuth1(app.config['OAUTH2_CLIENT_ID'],
                      client_secret=app.config['OAUTH2_CLIENT_SECRET'],
                      resource_owner_key=request.json.get('oauth_token'),
                      verifier=request.json.get('oauth_verifier'))
        r = requests.post(access_token_url, auth=auth)
        profile = dict(parse_qsl(r.text))

        login = profile['screen_name']
        if app.config['AUTH_REQUIRED'] and not db.is_user_valid(login=login):
            return jsonify(status="error", message="User %s is not authorized" % login), 403

        token = create_token(profile['user_id'], '@'+login, login, provider='twitter')
        return jsonify(token=token)
    else:
        oauth = OAuth1(app.config['OAUTH2_CLIENT_ID'],
                       client_secret=app.config['OAUTH2_CLIENT_SECRET'],
                       callback_uri=app.config.get('TWITTER_CALLBACK_URL', request.headers.get('Referer', ''))
        )
        r = requests.post(request_token_url, auth=oauth)
        oauth_token = dict(parse_qsl(r.text))
        return jsonify(oauth_token)
Exemple #3
0
    def _build_url(self):
        """Build resource url

        Parsing ``self._url``, add ``self._params`` to query string if need

        :return self._url: resource url
        """
        scheme, netloc, path, params, query, fragment = urlparse(self._url)

        # IDN domains support
        netloc = to_unicode(netloc)
        # removed idna encode as it was causing python3 urlunparse to error
        # print(repr(netloc), repr(netloc.encode('idna')))

        if not netloc:
            raise ValueError("Invalid url")
        elif not scheme:
            scheme = "http"

        tmp = []
        if self._params is not None:
            for param, value in self._params:
                if isinstance(value, tuple):
                    for i in value:
                        tmp.append((param, i))
                elif isinstance(value, str):
                    tmp.append((param, value))

        if tmp:
            tmp = parse_qsl(query, keep_blank_values=True) + tmp
        else:
            try:
                tmp = parse_qsl(query, keep_blank_values=True, strict_parsing=True)
            except ValueError:
                tmp = query

        if isinstance(tmp, str):
            encode = quote_plus
            noencode = lambda result: result
        else:
            encode = urlencode
            noencode = urlnoencode

        if self._encode_query:
            query = encode(tmp)
        else:
            query = noencode(tmp)

        del tmp
        # print(repr([scheme, netloc, path, query, fragment]))
        url_unparse_list = [
            scheme.encode('utf8'),
            netloc.encode('idna'),
            path.encode('utf8'),
            params.encode('utf8'),
            query.encode('utf8'),
            fragment.encode('utf8')]

        self._url = urlunparse(url_unparse_list)
        return self._url
	def respond_all(environ, start_response):
	    status = '200 OK' # HTTP Status
	    headers = [('Content-type', 'text/plain; charset=utf-8')] # HTTP Headers
	    start_response(status, headers)

	    my_dict = {'method_name': environ['REQUEST_METHOD'], 'path_info': environ['PATH_INFO']}

	    if len(environ['CONTENT_LENGTH'])>0:
	    	my_dict['content_length'] = environ['CONTENT_LENGTH']
	    	request_body = environ['wsgi.input'].read(int(environ['CONTENT_LENGTH'])).decode('utf-8')
	    	request_body = request_body.strip('[] ')
	    	body_dict = parse.parse_qsl(request_body)
	    	my_dict.update(body_dict)
	    else:
	    	pass

	    if len(environ['QUERY_STRING'])>0:
	    	qs = environ['QUERY_STRING']
	    	query_dict = parse.parse_qsl(qs)
	    	my_dict.update(query_dict)
	    else:
	    	pass

	    json_response = json.dumps(my_dict, sort_keys=True, indent=4, separators=(',',': '))
	    
	    return [json_response.encode('utf-8')]
Exemple #5
0
    def get_redirect_to(self):
        assert self.is_redirect()

        if hasattr(self.request, 'response_mode'):
            response_mode = self.request.response_mode
        else:
            response_mode = self.request.get('response_mode')

        if response_mode:
            is_fragment = response_mode == 'fragment'
        else:
            response_types = set(self.request.response_type.split())
            is_fragment = 'token' in response_types

        parsed = urlparse(self.redirect_uri)
        if is_fragment:
            query = parsed.query
            fragment = self.to_query_string()
        else:
            query = parse_qsl(parsed.query)
            query.extend(parse_qsl(self.to_query_string()))
            query = urlencode(query)
            fragment = parsed.fragment

        return urlunparse(parsed[:4] + (query, fragment))
Exemple #6
0
def append_qs(url, query_string):
    """Append query_string values to an existing URL and return it as a string.

    query_string can be:
        * an encoded string: 'test3=val1&test3=val2'
        * a dict of strings: {'test3': 'val'}
        * a dict of lists of strings: {'test3': ['val1', 'val2']}
        * a list of tuples: [('test3', 'val1'), ('test3', 'val2')]

    """
    parsed_url = urlparse.urlsplit(url)
    parsed_qs = urlparse.parse_qsl(parsed_url.query, True)

    if isstr(query_string):
        parsed_qs += urlparse.parse_qsl(query_string)
    elif isdict(query_string):
        for item in query_string.items():
            if islist(item[1]):
                for val in item[1]:
                    parsed_qs.append((item[0], val))
            else:
                parsed_qs.append(item)
    elif islist(query_string):
        parsed_qs += query_string
    else:
        raise TypeError('Unexpected query_string type')

    return urlparse.urlunsplit((
        parsed_url.scheme,
        parsed_url.netloc,
        parsed_url.path,
        urlencode_unicode(parsed_qs),
        parsed_url.fragment,
    ))
def add_preserved_filters(context, url, popup=False, to_field=None):
    opts = context.get('opts')
    preserved_filters = context.get('preserved_filters')

    parsed_url = list(urlparse(url))
    parsed_qs = dict(parse_qsl(parsed_url[4]))
    merged_qs = {}

    if opts and preserved_filters:
        preserved_filters = dict(parse_qsl(preserved_filters))

        match_url = '/%s' % url.partition(get_script_prefix())[2]
        try:
            match = resolve(match_url)
        except Resolver404:
            pass
        else:
            current_url = '%s:%s' % (match.app_name, match.url_name)
            changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name)
            if changelist_url == current_url and '_changelist_filters' in preserved_filters:
                preserved_filters = dict(parse_qsl(preserved_filters['_changelist_filters']))

        merged_qs.update(preserved_filters)

    if popup:
        from django.contrib.admin.options import IS_POPUP_VAR
        merged_qs[IS_POPUP_VAR] = 1
    if to_field:
        from django.contrib.admin.options import TO_FIELD_VAR
        merged_qs[TO_FIELD_VAR] = to_field

    merged_qs.update(parsed_qs)

    parsed_url[4] = urlencode(merged_qs)
    return urlunparse(parsed_url)
Exemple #8
0
def url_concat(url, args):
    """Concatenate url and arguments regardless of whether
    url has existing query parameters.

    ``args`` may be either a dictionary or a list of key-value pairs
    (the latter allows for multiple values with the same key.

    >>> url_concat("http://example.com/foo", dict(c="d"))
    'http://example.com/foo?c=d'
    >>> url_concat("http://example.com/foo?a=b", dict(c="d"))
    'http://example.com/foo?a=b&c=d'
    >>> url_concat("http://example.com/foo?a=b", [("c", "d"), ("c", "d2")])
    'http://example.com/foo?a=b&c=d&c=d2'
    """
    parsed_url = urlparse(url)
    if isinstance(args, dict):
        parsed_query = parse_qsl(parsed_url.query, keep_blank_values=True)
        parsed_query.extend(args.items())
    elif isinstance(args, list) or isinstance(args, tuple):
        parsed_query = parse_qsl(parsed_url.query, keep_blank_values=True)
        parsed_query.extend(args)
    else:
        err = "'args' parameter should be dict, list or tuple. Not {0}".format(
            type(args))
        raise TypeError(err)
    final_query = urlencode(parsed_query)
    url = urlunparse((
        parsed_url[0],
        parsed_url[1],
        parsed_url[2],
        parsed_url[3],
        final_query,
        parsed_url[5]))
    return url
Exemple #9
0
def cleanup(url):
    url = _follow(url)
    # remove trackers params
    try:
        urlp = urlparse(url)
        # cleanup query param
        query = parse_qsl(urlp.query)
        # only if query is non empty and we manage to parse fragment as
        # key/value
        if urlp.query and query:
            for annoying in ANNOYING_PARAMS:
                query = [(x, y) for x, y in query if not x.startswith(annoying)]
            urlp = urlp._replace(
                query=urlencode(query),
            )

        # cleanup fragment param
        fragment = parse_qsl(urlp.fragment)
        # only if fragments is non empty and we manage to parse fragment as
        # key/value
        if urlp.fragment and fragment:
            for annoying in ANNOYING_PARAMS:
                fragment = [(x, y) for x, y in fragment if not x.startswith(annoying)]
            urlp = urlp._replace(
                fragment=urlencode(fragment),
            )
        url = urlp.geturl()
    except Exception:
        app.logger.exception("Problem cleaning url %s", url)

    app.logger.info("Final url %s", url)
    return url
def build_url(base, query_params={}, fragment={}):
    """Construct a URL based off of base containing all parameters in
    the query portion of base plus any additional parameters.
    Taken from https://github.com/NateFerrero/oauth2lib/blob/master/oauth2lib/utils.py and extended to allow
    paramenters as fragment
    :param base: Base URL
    :type base: str
    ::param query_params: Additional query parameters to include.
    :type query_params: dict
    ::param fragment: Additional parameters to include in the fragment section of the url
    :type fragment: dict
    :rtype: str
    """
    url = urlparse.urlparse(base)
    query_params.update(urlparse.parse_qsl(url.query, True))
    query_params = {k: v for k, v in query_params.iteritems() if v is not None}

    fragment.update(urlparse.parse_qsl(url.fragment, True))
    fragment = {k: v for k, v in fragment.iteritems() if v is not None}

    return urlparse.urlunparse((url.scheme,
                                url.netloc,
                                url.path,
                                url.params,
                                urllib.urlencode(query_params),
                                urllib.urlencode(fragment)))
Exemple #11
0
def twitter():
    request_token_url = 'https://api.twitter.com/oauth/request_token'
    access_token_url = 'https://api.twitter.com/oauth/access_token'
    authenticate_url = 'https://api.twitter.com/oauth/authenticate'

    if request.args.get('oauth_token') and request.args.get('oauth_verifier'):
        auth = OAuth1(app.config['TWITTER_CONSUMER_KEY'],
                      client_secret=app.config['TWITTER_CONSUMER_SECRET'],
                      resource_owner_key=request.args.get('oauth_token'),
                      verifier=request.args.get('oauth_verifier'))
        r = requests.post(access_token_url, auth=auth)
        profile = dict(parse_qsl(r.text))

        user = User.query.filter_by(twitter=profile['user_id']).first()
        if user:
            token = create_jwt_token(user)
            return jsonify(token=token)
        u = User(twitter=profile['user_id'],
                 first_name=profile['screen_name'])
        db.session.add(u)
        db.session.commit()
        token = create_jwt_token(u)
        return jsonify(token=token)
    else:
        oauth = OAuth1(app.config['TWITTER_CONSUMER_KEY'],
                       client_secret=app.config['TWITTER_CONSUMER_SECRET'],
                       callback_uri=app.config['TWITTER_CALLBACK_URL'])
        r = requests.post(request_token_url, auth=oauth)
        oauth_token = dict(parse_qsl(r.text))
        qs = urlencode(dict(oauth_token=oauth_token['oauth_token']))
        return redirect(authenticate_url + '?' + qs)
Exemple #12
0
 def legacy_arguments(self):
     arguments = LegacyMultiDict()
     if hasattr(self, 'uri'):
         arguments.update(parse_qsl(self.parsed_uri.query))
     body = getattr(self, 'body', None)
     if body and getattr(self, 'content_type', None) == _FORM_CTYPE:
         arguments.update(parse_qsl(self.body.decode('ascii')))
     return arguments
Exemple #13
0
 def form_arguments(self):
     arguments = {}
     if hasattr(self, 'uri'):
         arguments.update(parse_qsl(self.parsed_uri.query))
     body = getattr(self, 'body', None)
     if body and getattr(self, 'content_type', None) == FORM_CTYPE:
         arguments.update(parse_qsl(self.body.decode('ascii')))
     return arguments
Exemple #14
0
    def assertURLEqual(self, first, second, msg=None):
        """Check that two arguments are equivalent URLs. Ignores the order of
        query arguments.
        """
        first_parsed = urlparse(first)
        second_parsed = urlparse(second)
        self.assertEqual(first_parsed[:3], second_parsed[:3], msg)

        first_qsl = sorted(parse_qsl(first_parsed.query))
        second_qsl = sorted(parse_qsl(second_parsed.query))
        self.assertEqual(first_qsl, second_qsl, msg)
Exemple #15
0
 def _encode_url(self, data):
     query = self.query
     if data:
         data = native_str(data)
         if isinstance(data, str):
             data = parse_qsl(data)
         else:
             data = mapping_iterator(data)
         query = parse_qsl(query)
         query.extend(data)
         query = urlencode(query)
     self.query = query
Exemple #16
0
    def test_full_flow(self, satosa_config_dict, oidc_frontend_config, saml_backend_config, idp_conf):
        user_id = "testuser1"

        # proxy config
        satosa_config_dict["FRONTEND_MODULES"] = [oidc_frontend_config]
        satosa_config_dict["BACKEND_MODULES"] = [saml_backend_config]
        satosa_config_dict["INTERNAL_ATTRIBUTES"]["attributes"] = {attr_name: {"openid": [attr_name],
                                                                               "saml": [attr_name]}
                                                                   for attr_name in USERS[user_id]}
        _, backend_metadata = create_entity_descriptors(SATOSAConfig(satosa_config_dict))

        # application
        test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse)

        # get frontend OP config info
        provider_config = json.loads(test_client.get("/.well-known/openid-configuration").data.decode("utf-8"))

        # create auth req
        claims_request = ClaimsRequest(id_token=Claims(**{k: None for k in USERS[user_id]}))
        req_args = {"scope": "openid", "response_type": "id_token", "client_id": CLIENT_ID,
                    "redirect_uri": REDIRECT_URI, "nonce": "nonce",
                    "claims": claims_request.to_json()}
        auth_req = urlparse(provider_config["authorization_endpoint"]).path + "?" + urlencode(req_args)

        # make auth req to proxy
        proxied_auth_req = test_client.get(auth_req)
        assert proxied_auth_req.status == "303 See Other"

        # config test IdP
        backend_metadata_str = str(backend_metadata[saml_backend_config["name"]][0])
        idp_conf["metadata"]["inline"].append(backend_metadata_str)
        fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf, metadata_construction=False))

        # create auth resp
        req_params = dict(parse_qsl(urlparse(proxied_auth_req.data.decode("utf-8")).query))
        url, authn_resp = fakeidp.handle_auth_req(
            req_params["SAMLRequest"],
            req_params["RelayState"],
            BINDING_HTTP_REDIRECT,
            user_id,
            response_binding=BINDING_HTTP_REDIRECT)

        # make auth resp to proxy
        authn_resp_req = urlparse(url).path + "?" + urlencode(authn_resp)
        authn_resp = test_client.get("/" + authn_resp_req)
        assert authn_resp.status == "303 See Other"

        # verify auth resp from proxy
        resp_dict = dict(parse_qsl(urlparse(authn_resp.data.decode("utf-8")).fragment))
        signing_key = RSAKey(key=rsa_load(oidc_frontend_config["config"]["signing_key_path"]),
                             use="sig", alg="RS256")
        id_token_claims = JWS().verify_compact(resp_dict["id_token"], keys=[signing_key])
        assert all((k, v[0]) in id_token_claims.items() for k, v in USERS[user_id].items())
Exemple #17
0
    def on_get(self, req, res):

        """Create Twitter JWT token
        """
        request_token_url = 'https://api.twitter.com/oauth/request_token'
        access_token_url = 'https://api.twitter.com/oauth/access_token'
        authenticate_url = 'https://api.twitter.com/oauth/authenticate'

        if req.get_param('oauth_token') and req.get_param('oauth_verifier'):
            auth = OAuth1(settings.TWITTER_KEY,
                          client_secret=settings.TWITTER_SECRET,
                          resource_owner_key=req.get_param('oauth_token'),
                          verifier=req.get_param('oauth_verifier'))
            logger.debug("Twitter OAuth: Got auth session.")
            r = requests.post(access_token_url, auth=auth)
            profile = dict(parse_qsl(r.text))
            logger.debug("Twitter OAuth: User profile retrieved")

            try:
                user = User.select().where(User.twitter == profile['user_id'] |
                                           User.username == profile['screen_name']).get()
            except:
                user = User.create(twitter=profile['user_id'],
                                   username=profile['screen_name'])
                user.save()

            token = utils.create_jwt_token(user)
            res.body = json.dumps({"token": token})
            res.status = falcon.HTTP_200
        else:
            oauth = OAuth1(settings.TWITTER_KEY,
                           client_secret=settings.TWITTER_SECRET,
                           callback_uri=settings.TWITTER_CALLBACK_URI)
            logger.debug("Twitter OAuth: Got auth session.")
            r = requests.post(request_token_url, auth=oauth)
            oauth_token = dict(parse_qsl(r.text))
            logger.debug("Twitter OAuth: User profile retrieved")
            qs = urlencode(dict(oauth_token=oauth_token['oauth_token']))

            # Falcon doesn't support redirects, so we have to fake it
            # this implementation has been taken from werkzeug
            final_url = authenticate_url + '?' + qs
            res.body = (
                '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n'
                '<title>Redirecting...</title>\n'
                '<h1>Redirecting...</h1>\n'
                '<p>You should be redirected automatically to target URL: '
                '<a href="{0}">{0}</a>.  If not click the link.'.format(final_url)
            )
            res.location = final_url
            res.status = falcon.HTTP_301
Exemple #18
0
    def test_provisioning_uri(self):
        hotp = pyotp.HOTP('wrn3pqx5uqxqvnqr')

        url = urlparse(
            hotp.provisioning_uri('mark@percival'))
        self.assertEqual(url.scheme, 'otpauth')
        self.assertEqual(url.netloc, 'hotp')
        self.assertEqual(url.path, '/mark%40percival')
        self.assertEqual(dict(parse_qsl(url.query)),
                         {'secret': 'wrn3pqx5uqxqvnqr', 'counter': '0'})

        url = urlparse(
            hotp.provisioning_uri('mark@percival', initial_count=12))
        self.assertEqual(url.scheme, 'otpauth')
        self.assertEqual(url.netloc, 'hotp')
        self.assertEqual(url.path, '/mark%40percival')
        self.assertEqual(dict(parse_qsl(url.query)),
                         {'secret': 'wrn3pqx5uqxqvnqr', 'counter': '12'})

        url = urlparse(
            hotp.provisioning_uri('mark@percival', issuer_name='FooCorp!'))
        self.assertEqual(url.scheme, 'otpauth')
        self.assertEqual(url.netloc, 'hotp')
        self.assertEqual(url.path, '/FooCorp%21:mark%40percival')
        self.assertEqual(dict(parse_qsl(url.query)),
                         {'secret': 'wrn3pqx5uqxqvnqr', 'counter': '0',
                          'issuer': 'FooCorp!'})

        key = 'c7uxuqhgflpw7oruedmglbrk7u6242vb'
        hotp = pyotp.HOTP(key, digits=8, digest=hashlib.sha256)
        url = urlparse(
            hotp.provisioning_uri('baco@peperina', issuer_name='FooCorp'))
        self.assertEqual(url.scheme, 'otpauth')
        self.assertEqual(url.netloc, 'hotp')
        self.assertEqual(url.path, '/FooCorp:baco%40peperina')
        self.assertEqual(dict(parse_qsl(url.query)),
                         {'secret': 'c7uxuqhgflpw7oruedmglbrk7u6242vb',
                          'counter': '0', 'issuer': 'FooCorp',
                          'digits': '8', 'algorithm': 'SHA256'})

        hotp = pyotp.HOTP(key, digits=8)
        url = urlparse(
            hotp.provisioning_uri('baco@peperina', issuer_name='FooCorp',
                                  initial_count=10))
        self.assertEqual(url.scheme, 'otpauth')
        self.assertEqual(url.netloc, 'hotp')
        self.assertEqual(url.path, '/FooCorp:baco%40peperina')
        self.assertEqual(dict(parse_qsl(url.query)),
                         {'secret': 'c7uxuqhgflpw7oruedmglbrk7u6242vb',
                          'counter': '10', 'issuer': 'FooCorp',
                          'digits': '8'})
Exemple #19
0
    def on_user_web_access(self, user_id, get_array, post_array):
        TWITTER_KEY = "1"
        db = a.get_db()

        if 'connect' in get_array and get_array['connect'] == 'twitter':
            consumer = oauth.Consumer(consumer_key, consumer_secret)
            client = oauth.Client(consumer)

            resp, content = client.request(request_token_url, "GET")
            if resp['status'] != '200':
                raise Exception("Invalid response %s." % resp['status'])

            request_token = dict(parse.parse_qsl(content.decode()))

            db.sql("INSERT INTO request_tokens(user_id, token, token_secret) VALUES(%s, '"+request_token['oauth_token']+"', '"+request_token['oauth_token_secret']+"')", (str(user_id),))
            db.commit()


            a.p('<p>Connexion a Twitter requise.</p>')
            a.p('<a href="'+authorize_url+'?oauth_token='+(request_token['oauth_token'])+'">Continuer sur Twitter</a>')

        elif 'twitter' in get_array and get_array["twitter"] == TWITTER_KEY:
            consumer = oauth.Consumer(consumer_key, consumer_secret)
            client = oauth.Client(consumer)

            oauth_token = get_array['oauth_token']
            oauth_verifier = get_array['oauth_verifier']

            request_token = db.sql('SELECT * FROM request_tokens WHERE user_id=%s', (str(user_id),))[0]

            token = oauth.Token(request_token[1],
                request_token[2])
            token.set_verifier(oauth_verifier)
            client = oauth.Client(consumer, token)

            resp, content = client.request(access_token_url, "POST")

            if resp['status'] != '200':
                raise Exception("Invalid response %s." % resp['status'])

            access_token = dict(parse.parse_qsl(content.decode()))

            db.sql("INSERT INTO usr(usr_id, usr_token, usr_token_secret) VALUES(%s, '"+access_token['oauth_token']+"', '"+access_token['oauth_token_secret']+"')", (str(user_id),))
            db.sql('DELETE FROM request_tokens WHERE user_id = %s', (str(user_id),))
            db.commit()

            a.p('<p>Twitter pairing successful</p>')
        else:

            a.p('<a href="'+a.get_url()+'&connect=twitter">Connect with Twitter</a>')
Exemple #20
0
    def run_test(self, satosa_config_dict, sp_conf, oidc_backend_config, frontend_config):
        subject_id = "testuser1"
        # proxy config
        satosa_config_dict["FRONTEND_MODULES"] = [frontend_config]
        satosa_config_dict["BACKEND_MODULES"] = [oidc_backend_config]
        satosa_config_dict["INTERNAL_ATTRIBUTES"]["attributes"] = {attr_name: {"openid": [attr_name],
                                                                               "saml": [attr_name]}
                                                                   for attr_name in USERS[subject_id]}
        frontend_metadata, backend_metadata = create_entity_descriptors(SATOSAConfig(satosa_config_dict))

        # application
        test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse)

        # config test SP
        frontend_metadata_str = str(frontend_metadata[frontend_config["name"]][0])
        sp_conf["metadata"]["inline"].append(frontend_metadata_str)
        fakesp = FakeSP(SPConfig().load(sp_conf, metadata_construction=False))

        # create auth req
        destination, req_args = fakesp.make_auth_req(frontend_metadata[frontend_config["name"]][0].entity_id)
        auth_req = urlparse(destination).path + "?" + urlencode(req_args)

        # make auth req to proxy
        proxied_auth_req = test_client.get(auth_req)
        assert proxied_auth_req.status == "302 Found"
        parsed_auth_req = dict(parse_qsl(urlparse(proxied_auth_req.data.decode("utf-8")).query))

        # create auth resp
        id_token_claims = {k: v[0] for k, v in USERS[subject_id].items()}
        id_token_claims["sub"] = subject_id
        id_token_claims["iat"] = time.time()
        id_token_claims["exp"] = time.time() + 3600
        id_token_claims["iss"] = "https://op.example.com"
        id_token_claims["aud"] = oidc_backend_config["config"]["client"]["client_metadata"]["client_id"]
        id_token_claims["nonce"] = parsed_auth_req["nonce"]
        id_token = IdToken(**id_token_claims).to_jwt()
        authn_resp = {"state": parsed_auth_req["state"], "id_token": id_token}

        # make auth resp to proxy
        redirect_uri_path = urlparse(
            oidc_backend_config["config"]["client"]["client_metadata"]["redirect_uris"][0]).path
        authn_resp_req = redirect_uri_path + "?" + urlencode(authn_resp)
        authn_resp = test_client.get(authn_resp_req)
        assert authn_resp.status == "303 See Other"

        # verify auth resp from proxy
        resp_dict = dict(parse_qsl(urlparse(authn_resp.data.decode("utf-8")).query))
        auth_resp = fakesp.parse_authn_request_response(resp_dict["SAMLResponse"], BINDING_HTTP_REDIRECT)
        assert auth_resp.ava == USERS[subject_id]
Exemple #21
0
    def register(self):
        import webbrowser
        if sys.version_info[0] >= 3:
            from urllib.parse import parse_qsl
        else:
            from urlparse import parse_qsl
        import oauth2 as oauth

        consumer = oauth.Consumer(pct.consumer_key, pct.consumer_secret)
        client = oauth.Client(consumer)
        # step 1 - obtain a request token
        resp, content = client.request(
            'https://api.twitter.com/oauth/request_token', 'POST')
        if resp['status'] != '200':
            print('Failed to get request token. [%s]' % resp['status'])
            return
        if isinstance(content, bytes):
            content = content.decode('utf-8')
        request_token = dict(parse_qsl(content))
        # step 2 - redirect the user
        redirect_url = 'https://api.twitter.com/oauth/authorize?oauth_token=%s' % (
            request_token['oauth_token'])
        if not webbrowser.open(redirect_url, new=2, autoraise=0):
            print('Please use a web browser to open the following URL')
            print(redirect_url)
        if sys.version_info[0] >= 3:
            input_ = input
        else:
            input_ = raw_input
        pin = input_('Please enter the PIN shown in your web browser: ')
        pin = pin.strip()
        # step 3 - convert the request token to an access token
        token = oauth.Token(
            request_token['oauth_token'], request_token['oauth_token_secret'])
        token.set_verifier(pin)
        client = oauth.Client(consumer, token)
        resp, content = client.request(
            'https://api.twitter.com/oauth/access_token', 'POST')
        if resp['status'] != '200':
            print('Failed to get access token. [%s]' % resp['status'])
            return
        if isinstance(content, bytes):
            content = content.decode('utf-8')
        access_token = dict(parse_qsl(content))
        self.context.params.set(
            service_name, 'key', access_token['oauth_token'])
        self.context.params.set(
            service_name, 'secret', access_token['oauth_token_secret'])
Exemple #22
0
    def run_test(self, satosa_config_dict, sp_conf, idp_conf, saml_backend_config, frontend_config):
        user_id = "testuser1"
        # proxy config
        satosa_config_dict["FRONTEND_MODULES"] = [frontend_config]
        satosa_config_dict["BACKEND_MODULES"] = [saml_backend_config]
        satosa_config_dict["INTERNAL_ATTRIBUTES"]["attributes"] = {attr_name: {"saml": [attr_name]} for attr_name in
                                                                   USERS[user_id]}
        frontend_metadata, backend_metadata = create_entity_descriptors(SATOSAConfig(satosa_config_dict))

        # application
        test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse)

        # config test SP
        frontend_metadata_str = str(frontend_metadata[frontend_config["name"]][0])
        sp_conf["metadata"]["inline"].append(frontend_metadata_str)
        fakesp = FakeSP(SPConfig().load(sp_conf, metadata_construction=False))

        # create auth req
        destination, req_args = fakesp.make_auth_req(frontend_metadata[frontend_config["name"]][0].entity_id)
        auth_req = urlparse(destination).path + "?" + urlencode(req_args)

        # make auth req to proxy
        proxied_auth_req = test_client.get(auth_req)
        assert proxied_auth_req.status == "303 See Other"

        # config test IdP
        backend_metadata_str = str(backend_metadata[saml_backend_config["name"]][0])
        idp_conf["metadata"]["inline"].append(backend_metadata_str)
        fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf, metadata_construction=False))

        # create auth resp
        req_params = dict(parse_qsl(urlparse(proxied_auth_req.data.decode("utf-8")).query))
        url, authn_resp = fakeidp.handle_auth_req(
            req_params["SAMLRequest"],
            req_params["RelayState"],
            BINDING_HTTP_REDIRECT,
            user_id,
            response_binding=BINDING_HTTP_REDIRECT)

        # make auth resp to proxy
        authn_resp_req = urlparse(url).path + "?" + urlencode(authn_resp)
        authn_resp = test_client.get("/" + authn_resp_req)
        assert authn_resp.status == "303 See Other"

        # verify auth resp from proxy
        resp_dict = dict(parse_qsl(urlparse(authn_resp.data.decode("utf-8")).query))
        auth_resp = fakesp.parse_authn_request_response(resp_dict["SAMLResponse"], BINDING_HTTP_REDIRECT)
        assert auth_resp.ava == USERS[user_id]
Exemple #23
0
    def make_absolute_redirect_uri(self, uri):
        """Make absolute redirect URIs

        internal redirect uris, e.g. `/user/foo/oauth_handler`
        are allowed in jupyterhub, but oauthlib prohibits them.
        Add `$HOST` header to redirect_uri to make them acceptable.

        Currently unused in favor of monkeypatching
        oauthlib.is_absolute_uri to skip the check
        """
        redirect_uri = self.get_argument('redirect_uri')
        if not redirect_uri or not redirect_uri.startswith('/'):
            return uri
        # make absolute local redirects full URLs
        # to satisfy oauthlib's absolute URI requirement
        redirect_uri = (
            self.request.protocol + "://" + self.request.headers['Host'] + redirect_uri
        )
        parsed_url = urlparse(uri)
        query_list = parse_qsl(parsed_url.query, keep_blank_values=True)
        for idx, item in enumerate(query_list):
            if item[0] == 'redirect_uri':
                query_list[idx] = ('redirect_uri', redirect_uri)
                break

        return urlunparse(urlparse(uri)._replace(query=urlencode(query_list)))
Exemple #24
0
    def request_token(self, url="", refresh_token=""):
        """Get authentication token from GOG"""
        if refresh_token:
            grant_type = "refresh_token"
            extra_params = {"refresh_token": refresh_token}
        else:
            grant_type = "authorization_code"
            parsed_url = urlparse(url)
            response_params = dict(parse_qsl(parsed_url.query))
            if "code" not in response_params:
                logger.error("code not received from GOG")
                logger.error(response_params)
                return
            extra_params = {
                "code": response_params["code"],
                "redirect_uri": self.redirect_uri,
            }

        params = {
            "client_id": self.client_id,
            "client_secret": self.client_secret,
            "grant_type": grant_type,
        }
        params.update(extra_params)
        url = "https://auth.gog.com/token?" + urlencode(params)
        request = Request(url)
        try:
            request.get()
        except HTTPError as ex:
            logger.error("Failed to get token, check your GOG credentials")
            return

        token = request.json
        with open(self.token_path, "w") as token_file:
            token_file.write(json.dumps(token))
Exemple #25
0
def query(**default_kwargs) -> types.QueryParams:
    """Returns the query params sent with the request."""
    params: types.QueryParams = types.ImmutableDict(parse_qsl(ctx.query.strip("?")))
    for key, value in default_kwargs.items():
        if key not in params:
            params[key] = value
    return params
Exemple #26
0
    def GET(self):
        """A multidict with all the variables in the query string.

        Lazy property.
        """
        return MultiDictProxy(MultiDict(parse_qsl(self.query_string,
                                                  keep_blank_values=True)))
Exemple #27
0
  def methodNext(self, previous_request, previous_response):
    """Retrieves the next page of results.

Args:
  previous_request: The request for the previous page. (required)
  previous_response: The response from the request for the previous page. (required)

Returns:
  A request object that you can call 'execute()' on to request the next
  page. Returns None if there are no more items in the collection.
    """
    # Retrieve nextPageToken from previous_response
    # Use as pageToken in previous_request to create new request.

    if 'nextPageToken' not in previous_response:
      return None

    request = copy.copy(previous_request)

    pageToken = previous_response['nextPageToken']
    parsed = list(urllib.parse.urlparse(request.uri))
    q = parse_qsl(parsed[4])

    # Find and remove old 'pageToken' value from URI
    newq = [(key, value) for (key, value) in q if key != 'pageToken']
    newq.append(('pageToken', pageToken))
    parsed[4] = urllib.parse.urlencode(newq)
    uri = urllib.parse.urlunparse(parsed)

    request.uri = uri

    logger.info('URL being requested: %s' % uri)

    return request
Exemple #28
0
def query_string_to_dict(qs):
    """ Returns a dictionary from a QUERY_STRING """

    pairs = parse_qsl(qs)
    if pairs:
        return dict(pairs)
    return {}
Exemple #29
0
 def __init__(self, query_string, mutable=False, encoding=None):
     super(QueryDict, self).__init__()
     if not encoding:
         encoding = settings.DEFAULT_CHARSET
     self.encoding = encoding
     if six.PY3:
         for key, value in parse_qsl(query_string or '',
                                     keep_blank_values=True,
                                     encoding=encoding):
             self.appendlist(key, value)
     else:
         for key, value in parse_qsl(query_string or '',
                                     keep_blank_values=True):
             self.appendlist(force_text(key, encoding, errors='replace'),
                             force_text(value, encoding, errors='replace'))
     self._mutable = mutable
Exemple #30
0
def _update_url_query_param(url, query_params):
    url_parts = parse.urlparse(url)
    old_qs_args = dict(parse.parse_qsl(url_parts[4]))
    old_qs_args.update(query_params)
    new_qs = parse.urlencode(old_qs_args)
    return parse.urlunparse(
        list(url_parts[0:4]) + [new_qs] + list(url_parts[5:]))
Exemple #31
0
 def parse_qsl(s):
     return '\n'.join(
         "{:<20} {}".format(k, v)
         for k, v in urlparse.parse_qsl(s, keep_blank_values=True))
Exemple #32
0
    def test_provisioning_uri(self):
        hotp = pyotp.HOTP('wrn3pqx5uqxqvnqr')

        url = urlparse(hotp.provisioning_uri('mark@percival'))
        self.assertEqual(url.scheme, 'otpauth')
        self.assertEqual(url.netloc, 'hotp')
        self.assertEqual(url.path, '/mark%40percival')
        self.assertEqual(dict(parse_qsl(url.query)), {
            'secret': 'wrn3pqx5uqxqvnqr',
            'counter': '0'
        })

        url = urlparse(hotp.provisioning_uri('mark@percival',
                                             initial_count=12))
        self.assertEqual(url.scheme, 'otpauth')
        self.assertEqual(url.netloc, 'hotp')
        self.assertEqual(url.path, '/mark%40percival')
        self.assertEqual(dict(parse_qsl(url.query)), {
            'secret': 'wrn3pqx5uqxqvnqr',
            'counter': '12'
        })

        url = urlparse(
            hotp.provisioning_uri('mark@percival', issuer_name='FooCorp!'))
        self.assertEqual(url.scheme, 'otpauth')
        self.assertEqual(url.netloc, 'hotp')
        self.assertEqual(url.path, '/FooCorp%21:mark%40percival')
        self.assertEqual(dict(parse_qsl(url.query)), {
            'secret': 'wrn3pqx5uqxqvnqr',
            'counter': '0',
            'issuer': 'FooCorp!'
        })

        key = 'c7uxuqhgflpw7oruedmglbrk7u6242vb'
        hotp = pyotp.HOTP(key, digits=8, digest=hashlib.sha256)
        url = urlparse(
            hotp.provisioning_uri('baco@peperina', issuer_name='FooCorp'))
        self.assertEqual(url.scheme, 'otpauth')
        self.assertEqual(url.netloc, 'hotp')
        self.assertEqual(url.path, '/FooCorp:baco%40peperina')
        self.assertEqual(
            dict(parse_qsl(url.query)), {
                'secret': 'c7uxuqhgflpw7oruedmglbrk7u6242vb',
                'counter': '0',
                'issuer': 'FooCorp',
                'digits': '8',
                'algorithm': 'SHA256'
            })

        hotp = pyotp.HOTP(key, digits=8)
        url = urlparse(
            hotp.provisioning_uri('baco@peperina',
                                  issuer_name='Foo Corp',
                                  initial_count=10))
        self.assertEqual(url.scheme, 'otpauth')
        self.assertEqual(url.netloc, 'hotp')
        self.assertEqual(url.path, '/Foo%20Corp:baco%40peperina')
        self.assertEqual(
            dict(parse_qsl(url.query)), {
                'secret': 'c7uxuqhgflpw7oruedmglbrk7u6242vb',
                'counter': '10',
                'issuer': 'Foo Corp',
                'digits': '8'
            })
    def do_POST(self):
        url_parts = list(urlparse.urlparse(self.path))
        self.path = url_parts[2]
        query = dict(urlparse.parse_qsl(url_parts[4]))
        userId = 0
        if len(query) != 0:
            userId = int(query["id"])
            print(query["id"])

        # -----------------record--------------------
        if self.path == "/start_record":
            self.send_response(200)
            self.end_headers()
            print("_____start_recording_video____")
            # print(self.rfile.read(int(self.headers['Content-Length'])))
            data = self.rfile.read(int(self.headers['Content-Length']))
            data = str(data.decode("utf-8"))
            data = json.loads(data)
            # data = data.split("//")
            # self.recordVideo.startRecord(filename,True,self.camera)
            print(data)
            self.wfile.write("ok".encode('utf-8'))

        if self.path == "/start":
            self.send_response(200)
            self.end_headers()

            # print(self.rfile.read(int(self.headers['Content-Length'])))
            resolution = str(
                self.rfile.read(int(
                    self.headers['Content-Length'])).decode("utf-8"))
            print("resolution")
            print(resolution)

        if self.path == '/start_stream':
            self.send_response(200)
            self.end_headers()
            print("_______start_stream")
            print("UserId")
            print(userId)
            print(
                str(
                    self.rfile.read(int(
                        self.headers['Content-Length'])).decode("utf-8")))
            self.wfile.write("hello".encode('utf-8'))
            print("_________After Stopping recording_________")

        if self.path == "/capture_image":
            self.send_response(200)
            self.end_headers()
            # TODO change on json
            data = self.rfile.read(int(self.headers['Content-Length']))
            data = str(data.decode("utf-8"))
            data = json.loads(data)
            print(data["filename"])
            print(data["resolution"])  # data = data.split("//")
            # print(data)
        # ------------------------------------

        if self.path == "/set_settings_feeder":
            print("")
            self.send_response(200)
            self.end_headers()

            data = self.rfile.read(int(self.headers['Content-Length']))
            data = str(data.decode("utf-8"))
            print(data)
Exemple #34
0
def callback(request):
    if request.method == 'POST':
        signature = request.META['HTTP_X_LINE_SIGNATURE']
        body = request.body.decode('utf-8')
        try:
            events = parser.parse(body, signature)

        except InvalidSignatureError:
            return HttpResponseForbidden()
        except LineBotApiError:
            return HttpResponseBadRequest()

        for event in events:
            user_id = event.source.user_id
            if not (users.objects.filter(uid=user_id).exists()):
                unit = users.objects.create(uid=user_id)
                unit.save()
            if isinstance(event, MessageEvent):
                if isinstance(event.message, TextMessage):

                    mtext = event.message.text

                    if mtext == '@傳送文字':
                        func.sendText(event)
                    elif mtext == '@國內相關組織':
                        func.sendQuickreply(event)
                    elif mtext == '@傳送圖片':
                        func.sendImage(event)
                    elif mtext == '@傳送聲音':
                        func.sendVoice(event)

                    elif mtext == '@傳送影片':
                        func.sendVedio(event)
                    elif mtext == '@傳送貼圖':
                        func.sendStick(event)

                    elif mtext == '@多項傳送':
                        func.sendMulti(event)

                    elif mtext == '@傳送位置':
                        func.sendPosition(event)

                    elif mtext == '@快速選單':
                        func.sendQuickreply(event)

                    elif mtext == '@按鈕樣板':
                        func.sendButton(event)

                    elif mtext == '@確認樣板':
                        func.sendConfirm(event)

                    elif mtext == '@轉盤樣板':
                        func.sendCarousel(event)

                    elif mtext == '@圖片轉盤':
                        func.sendImgCarousel(event)

                    elif mtext == '@購買披薩':
                        func.sendPizza(event)

                    elif mtext == '@yes':
                        func.sendYes(event)
                    elif mtext == '@圖片地圖':
                        func.sendImgmap(event)
                    elif mtext == '@日期時間':
                        func.sendDatetime(event)

                    elif mtext == '@彈性配置':
                        func.sendFlex(event)

                    elif mtext[:3] == '###' and len(mtext) > 3:
                        func.manageForm(event, mtext, user_id)
                    elif mtext == '@使用說明':
                        func.sendUse(event)

                    elif mtext == '@房間預約':
                        func.sendBooking(event, user_id)

                    elif mtext == '@取消訂房':
                        func.sendCancel(event, user_id)

                    elif mtext == '@關於我們':
                        func.sendAbout(event)

                    elif mtext == '@位置資訊':
                        func.sendPosition(event)

                    elif mtext == '@聯絡我們':
                        func.sendContact(event)
                    elif mtext == '@辨別洗錢小知識':
                        func.sendMulti2(event)  # func.sendMulti2-->INSERT  2

                    elif mtext == '@法律資訊':
                        func.sendImage(event)

                    elif mtext == '@重要資訊':
                        func.sendButton(event)

                    elif mtext == '@國內相關組織':
                        func.sendQuickreply(event)

                    elif mtext == '@國外相關組織':
                        func.sendButtonb(event)

                    elif mtext == '@其他資訊':
                        func.sendCarousel(event)

                    elif mtext[:3] == '###' and len(
                            mtext) > 3:  #處理LIFF傳回的FORM資料
                        func.manageForm(event, mtext, user_id)

                    elif mtext[:6] == '123456' and len(mtext) > 6:  #推播給所有顧客
                        func.pushMessage(event, mtext)

            if isinstance(event, PostbackEvent):  #PostbackTemplateAction觸發此事件
                backdata = dict(parse_qsl(event.postback.data))  #取得Postback資料
                if backdata.get('action') == 'sell':
                    func.sendData_sell(event, backdata)
                if backdata.get('action') == 'yes':
                    func.sendYes(event, event.source.user_id)

            else:
                line_bot_api.reply_message(
                    event.reply_token,
                    TextSendMessage(text=event.message.text))

        return HttpResponse()

    else:
        return HttpResponseBadRequest()
def parse_query_from_url(url: str) -> dict:
    parsed_url = urlparse(url)
    parsed_query = dict(parse_qsl(parsed_url.query))

    return parsed_query
Exemple #36
0
 def parse_qsl_text(qs, encoding='utf-8'):
     qsl = parse_qsl(qs, keep_blank_values=True, strict_parsing=False)
     for (x, y) in qsl:
         yield (x.decode(encoding), y.decode(encoding))
Exemple #37
0
def url(url):
    parsed = parse.urlparse(url)
    result = {'query_json': dict(parse.parse_qsl(parsed.query))}
    result.update(parsed._asdict())
    return result
Exemple #38
0
def oauth(request):
    oaserver = request.env.auth.oauth

    oauth_url = request.route_url('auth.oauth')
    oauth_path = request.route_path('auth.oauth')

    def cookie_name(state):
        return 'ngw-oastate-' + state

    if 'error' in request.params:
        raise AuthorizationException()

    elif 'code' in request.params and 'state' in request.params:
        # Extract data from state named cookie
        state = request.params['state']
        try:
            data = dict(parse_qsl(request.cookies[cookie_name(state)]))
        except ValueError:
            raise AuthorizationException("State cookie parse error")

        tresp = oaserver.grant_type_authorization_code(request.params['code'],
                                                       oauth_url)

        if data['merge'] == '1' and request.user.keyname != 'guest':
            user = oaserver.access_token_to_user(tresp.access_token,
                                                 merge_user=request.user)
        else:
            user = oaserver.access_token_to_user(tresp.access_token)

        if user is None:
            raise InvalidTokenException()

        DBSession.flush()
        headers = remember(request, (user.id, tresp))

        event = OnUserLogin(user, request, data['next_url'])
        zope.event.notify(event)

        response = HTTPFound(location=event.next_url, headers=headers)
        response.delete_cookie(cookie_name(state), path=oauth_path)
        return response

    else:
        data = dict(next_url=request.params.get('next',
                                                request.application_url),
                    merge=request.params.get('merge', '0'))

        alphabet = string.ascii_letters + string.digits
        state = ''.join(secrets.choice(alphabet) for i in range(16))
        ac_url = oaserver.authorization_code_url(oauth_url, state=state)

        response = HTTPFound(location=ac_url)

        # Store data in state named cookie
        response.set_cookie(cookie_name(state),
                            value=urlencode(data),
                            path=oauth_path,
                            max_age=600,
                            httponly=True)

        return response
Exemple #39
0
def parse_all_request_kv(request):
    """
    Extract FreewheelId from string of HTML parameters
    """
    request_dict = dict(parse_qsl(request))
    return request_dict.get('am_crmid')
Exemple #40
0
def _append_req_id_to_url(url: str) -> str:
    components = list(parse.urlparse(url))
    components[4] = parse.urlencode(parse.parse_qsl(components[4]) + [('req_id', str(uuid4()))])
    return parse.urlunparse(components)
Exemple #41
0
 def __init__(self, url):
     self.parts = urlparse(url)
     self.query = dict(parse_qsl(self.parts[4]))
Exemple #42
0
 def text_callback(request, context):
     # Python 2's parse_qsl doesn't like None argument
     query = parse_qsl(request.text) if request.text else ()
     assert (set(query) == set(expected_post))
     return 'Success!'
    def parse(self, response):

        with CommonFuncs.get_db() as db:
            todoforsite = db.query(UnprocessedJob).filter(UnprocessedJob.bot_type == 'Indeed_Bot').all()
        if len(todoforsite) >= 100:
            return

        this_url = response._url
        try:
            searching_by = dict(parse_qsl(urlsplit(this_url).query))
            print('searching by: ' + str(searching_by))
        except:
            pass
        # CommonFuncs.log('starting parsing job page for IndeedWebcrawler: ' + response.url)

        # COLLECT NEW JOB LINKS FROM SITE
        jobs = response.xpath("//div[@data-tn-component='organicJob']")
        new_count = 0
        for job in jobs:
            bot = CommonFuncs.get_bot('Indeed_Bot')
            if not bot.is_running: return    # exit if the bot is not running
            extracted_job = job.extract()
            job_state = None
            if 'Easily apply' in extracted_job:
                job_link = JOB_SITE_LINKS[ 'Indeed' ][ 'job_site_base' ] + job.xpath('h2/a/@href').extract()[0]
                with CommonFuncs.get_db() as db:
                    db_results = db.query(Job).filter(Job.link_to_job == job_link).all()
                if db_results is None or db_results == []:
                    new_count += 1
                    try:
                        with CommonFuncs.get_db() as db:
                            u_job = UnprocessedJob()
                            u_job.bot_type = 'Indeed_Bot'
                            u_job.job = job_link
                            db.add(u_job)
                            db.commit()
                    except:
                        pass

        # CommonFuncs.log('%s new jobs found on page %s' % (new_count, response.url))
        if new_count > 0: print('%s new jobs found on page' % new_count)

        ##########
        # JUMP TO NEXT PAGE WHILE THE BOT IS STILL RUNNING
        ##########
        pagination_links = response.xpath( "//div[@class='pagination']/a" ).extract()
        for link in pagination_links:
            if 'Next' in link:
                bot = CommonFuncs.get_bot('Indeed_Bot')
                if bot.is_running:  # verify that the bot is running before continuing to the next page
                    # CommonFuncs.log('finished parsing job page for IndeedWebcrawler: ' + this_url)
                    next_link = bs(link,'lxml').body.find('a').get('href')
                    full_link = JOB_SITE_LINKS[ 'Indeed' ][ 'job_site_base' ] + next_link
                    yield scrapy.Request( url=full_link, callback=self.parse )
                else:
                    return


# if __name__ == '__main__':
#     runner = CrawlerRunner()
#     runner.crawl(IndeedWebcrawler)
#     d = runner.join()
#     d.addBoth(lambda _: reactor.stop())
#     reactor.run()
Exemple #44
0
    def _stream_data(self, formats):
        data = {}
        for item in formats:
            stream_map = item

            stream_map.update(
                dict(
                    parse_qsl(
                        item.get('signatureCipher', item.get('cipher', '')))))
            stream_map['itag'] = str(stream_map.get('itag'))

            mime_type = stream_map.get('mimeType')
            mime_type = unquote(mime_type).split(';')

            key = mime_type[0]
            itag = stream_map.get('itag')

            if key not in data:
                data[key] = {}
            data[key][itag] = {}

            codec = str(mime_type[1][1:])
            data[key][itag]['codec'] = codec

            match = re.search('codecs="(?P<codec>[^"]+)"', codec)
            if match:
                data[key][itag]['codec'] = match.group('codec')

            data[key][itag]['id'] = itag

            data[key][itag]['width'] = stream_map.get('width')
            data[key][itag]['height'] = stream_map.get('height')

            data[key][itag]['quality_label'] = str(
                stream_map.get('qualityLabel'))

            data[key][itag]['bandwidth'] = stream_map.get('bitrate', 0)

            # map frame rates to a more common representation to
            # lessen the chance of double refresh changes sometimes
            # 30 fps is 30 fps, more commonly it is 29.97 fps (same for all mapped frame rates)
            frame_rate = None
            fps_scale_map = {24: 1001, 30: 1001, 60: 1001}

            if 'fps' in stream_map:
                fps = int(stream_map.get('fps'))
                data[key][itag]['fps'] = fps
                scale = fps_scale_map.get(fps, 1000)
                frame_rate = '%d/%d' % (fps * 1000, scale)

            data[key][itag]['frameRate'] = frame_rate

            url = unquote(stream_map.get('url'))

            signature_parameter = '&signature='
            if 'sp' in stream_map:
                signature_parameter = '&%s=' % stream_map['sp']

            if 'sig' in stream_map:
                url = ''.join([url, signature_parameter, stream_map['sig']])

            url = url.replace("&", "&amp;").replace('"', "&quot;")
            url = url.replace("<", "&lt;").replace(">", "&gt;")

            data[key][itag]['baseUrl'] = url

            data[key][itag]['indexRange'] = '0-0'
            data[key][itag]['initRange'] = '0-0'

            if 'indexRange' in stream_map and 'initRange' in stream_map:
                data[key][itag]['indexRange'] = \
                    '-'.join([stream_map.get('indexRange').get('start'),
                              stream_map.get('indexRange').get('end')])

                data[key][itag]['init'] = \
                    '-'.join([stream_map.get('initRange').get('start'),
                              stream_map.get('initRange').get('end')])

            if ('indexRange' not in stream_map or 'initRange' not in stream_map
                    or data[key][itag].get('indexRange') == '0-0'
                    and data[key][itag].get('initRange') == '0-0'):

                if key.startswith('video'):
                    self.discard_video(key, itag, data[key][itag],
                                       'no init or index')

                else:
                    stream_format = self.itags.get(itag, {})
                    self.discard_audio(stream_format, key, itag,
                                       data[key][itag], 'no init or index')

                del data[key][itag]

        return data
 def _parse_auth_redirect(self, url):
     query = dict(parse_qsl(urlparse(url).query))
     return query['code']
Exemple #46
0
 def parse_state(self, state):
     return dict(parse_qsl(b64decode(str(state))))
    def init_globals(self, argv, reinitialize_database=False):
        """Initialized globally used module variables.
        Needs to be called at start of each plugin instance!
        This is an ugly hack because Kodi doesn't execute statements defined on
        module level if reusing a language invoker."""
        # IS_ADDON_FIRSTRUN specifies when the addon is at its first run (reuselanguageinvoker is not yet used)
        self.IS_ADDON_FIRSTRUN = self.IS_ADDON_FIRSTRUN is None
        self.IS_ADDON_EXTERNAL_CALL = False
        self.PY_IS_VER2 = sys.version_info.major == 2
        self.COOKIES = {}
        self.ADDON = xbmcaddon.Addon()
        self.ADDON_ID = self.py2_decode(self.ADDON.getAddonInfo('id'))
        self.PLUGIN = self.py2_decode(self.ADDON.getAddonInfo('name'))
        self.VERSION_RAW = self.py2_decode(self.ADDON.getAddonInfo('version'))
        self.VERSION = self.remove_ver_suffix(self.VERSION_RAW)
        self.DEFAULT_FANART = self.py2_decode(self.ADDON.getAddonInfo('fanart'))
        self.ICON = self.py2_decode(self.ADDON.getAddonInfo('icon'))
        self.ADDON_DATA_PATH = self.py2_decode(self.ADDON.getAddonInfo('path'))  # Addon folder
        self.DATA_PATH = self.py2_decode(self.ADDON.getAddonInfo('profile'))  # Addon user data folder

        # Add absolute paths of embedded py modules to python system directory
        module_paths = [
            os.path.join(self.ADDON_DATA_PATH, 'modules', 'mysql-connector-python')
        ]
        for path in module_paths:
            path = xbmc.translatePath(path)
            if path not in sys.path:
                sys.path.insert(0, g.py2_decode(path))

        self.CACHE_PATH = os.path.join(self.DATA_PATH, 'cache')
        self.COOKIE_PATH = os.path.join(self.DATA_PATH, 'COOKIE')
        self.URL = urlparse(argv[0])
        try:
            self.PLUGIN_HANDLE = int(argv[1])
            self.IS_SERVICE = False
            self.BASE_URL = '{scheme}://{netloc}'.format(scheme=self.URL[0],
                                                         netloc=self.URL[1])
        except IndexError:
            self.PLUGIN_HANDLE = 0
            self.IS_SERVICE = True
            self.BASE_URL = '{scheme}://{netloc}'.format(scheme='plugin',
                                                         netloc=self.ADDON_ID)
        self.PATH = g.py2_decode(unquote(self.URL[2][1:]))
        try:
            self.PARAM_STRING = argv[2][1:]
        except IndexError:
            self.PARAM_STRING = ''
        self.REQUEST_PARAMS = dict(parse_qsl(self.PARAM_STRING))
        self.reset_time_trace()
        self.TIME_TRACE_ENABLED = self.ADDON.getSettingBool('enable_timing')
        self.IPC_OVER_HTTP = self.ADDON.getSettingBool('enable_ipc_over_http')

        self._init_database(self.IS_ADDON_FIRSTRUN or reinitialize_database)

        self.settings_monitor_suspend(False)  # Reset the value in case of addon crash

        # Initialize the cache
        self.CACHE_TTL = self.ADDON.getSettingInt('cache_ttl') * 60
        self.CACHE_MYLIST_TTL = self.ADDON.getSettingInt('cache_mylist_ttl') * 60
        self.CACHE_METADATA_TTL = self.ADDON.getSettingInt('cache_metadata_ttl') * 24 * 60 * 60
        if self.IS_ADDON_FIRSTRUN:
            if self.IS_SERVICE:
                from resources.lib.services.cache.cache_management import CacheManagement
                self.CACHE_MANAGEMENT = CacheManagement()
            from resources.lib.common.cache import Cache
            self.CACHE = Cache()
        from resources.lib.common.kodiops import GetKodiVersion
        self.KODI_VERSION = GetKodiVersion()
Exemple #48
0
        'consumer_key': consumer_key,
        'consumer_secret': consumer_secret
    })
    body = json.dumps(access_token)
    print(body)
    server.wfile.write(body.encode('utf-8'))


def run():
    server = http.server.HTTPServer
    handler = EchoHandler
    runserver(server=server, handler=handler)


if __name__ == '__main__':
    oauth_callback = 'http://localhost'

    twitter = OAuth1Session(consumer_key, consumer_secret)

    response = twitter.post('https://api.twitter.com/oauth/request_token',
                            params={'oauth_callback': oauth_callback})

    request_token = dict(parse_qsl(response.content.decode('utf-8')))

    authenticate_url = 'https://api.twitter.com/oauth/authenticate?oauth_token=%s' % (
        request_token['oauth_token'])

    webbrowser.open(authenticate_url)

    run()
Exemple #49
0
 def test_authn_request(self, context, idp_conf):
     resp = self.samlbackend.authn_request(context, idp_conf["entityid"])
     assert_redirect_to_idp(resp, idp_conf)
     req_params = dict(parse_qsl(urlparse(resp.message).query))
     assert context.state[
         self.samlbackend.name]["relay_state"] == req_params["RelayState"]
Exemple #50
0
def geoserver_proxy(request,
                    proxy_path,
                    downstream_path,
                    workspace=None,
                    layername=None):
    """
    WARNING: Decorators are applied in the order they appear in the source.
    """
    # AF: No need to authenticate first. We will check if "access_token" is present
    # or not on session

    # @dismissed
    # if not request.user.is_authenticated:
    #     return HttpResponse(
    #         "You must be logged in to access GeoServer",
    #         content_type="text/plain",
    #         status=401)

    def strip_prefix(path, prefix):
        if prefix not in path:
            _s_prefix = prefix.split('/', 3)
            _s_path = path.split('/', 3)
            assert _s_prefix[1] == _s_path[1]
            _prefix = f'/{_s_path[1]}/{_s_path[2]}'
        else:
            _prefix = prefix
        assert _prefix in path
        prefix_idx = path.index(_prefix)
        _prefix = path[:prefix_idx] + _prefix
        full_prefix = "%s/%s/%s" % (
            _prefix, layername, downstream_path) if layername else _prefix
        return path[len(full_prefix):]

    path = strip_prefix(request.get_full_path(), proxy_path)

    raw_url = str(
        "".join([ogc_server_settings.LOCATION, downstream_path, path]))

    if settings.DEFAULT_WORKSPACE or workspace:
        ws = (workspace or settings.DEFAULT_WORKSPACE)
        if ws and ws in path:
            # Strip out WS from PATH
            try:
                path = "/%s" % strip_prefix(path, "/%s:" % (ws))
            except Exception:
                pass

        if proxy_path == '/gs/%s' % settings.DEFAULT_WORKSPACE and layername:
            import posixpath
            raw_url = urljoin(ogc_server_settings.LOCATION,
                              posixpath.join(workspace, layername, downstream_path, path))

        if downstream_path in ('rest/styles') and len(request.body) > 0:
            if ws:
                # Lets try
                # http://localhost:8080/geoserver/rest/workspaces/<ws>/styles/<style>.xml
                _url = str("".join([ogc_server_settings.LOCATION,
                                    'rest/workspaces/', ws, '/styles',
                                    path]))
            else:
                _url = str("".join([ogc_server_settings.LOCATION,
                                    'rest/styles',
                                    path]))
            raw_url = _url

    if downstream_path in 'ows' and (
        'rest' in path or
            re.match(r'/(w.*s).*$', path, re.IGNORECASE) or
            re.match(r'/(ows).*$', path, re.IGNORECASE)):
        _url = str("".join([ogc_server_settings.LOCATION, '', path[1:]]))
        raw_url = _url
    url = urlsplit(raw_url)
    affected_layers = None

    if '%s/layers' % ws in path:
        downstream_path = 'rest/layers'
    elif '%s/styles' % ws in path:
        downstream_path = 'rest/styles'

    if request.method in ("POST", "PUT", "DELETE"):
        if downstream_path in ('rest/styles', 'rest/layers',
                               'rest/workspaces'):
            if not style_change_check(request, downstream_path):
                return HttpResponse(
                    _(
                        "You don't have permissions to change style for this layer"),
                    content_type="text/plain",
                    status=401)
            elif downstream_path == 'rest/styles':
                logger.debug(
                    "[geoserver_proxy] Updating Style ---> url %s" %
                    url.geturl())
                _style_name, _style_ext = os.path.splitext(os.path.basename(urlsplit(url.geturl()).path))
                if _style_name == 'styles.json' and request.method == "PUT":
                    _parsed_get_args = dict(parse_qsl(urlsplit(url.geturl()).query))
                    if 'name' in _parsed_get_args:
                        _style_name, _style_ext = os.path.splitext(_parsed_get_args['name'])
                else:
                    _style_name, _style_ext = os.path.splitext(_style_name)
                if _style_name != 'style-check' and _style_ext == '.json' and \
                not re.match(temp_style_name_regex, _style_name):
                    affected_layers = style_update(request, raw_url)
            elif downstream_path == 'rest/layers':
                logger.debug(
                    "[geoserver_proxy] Updating Layer ---> url %s" %
                    url.geturl())
                try:
                    _layer_name = os.path.splitext(os.path.basename(request.path))[0]
                    _layer = Layer.objects.get(name__icontains=_layer_name)
                    affected_layers = [_layer]
                except Exception:
                    logger.warn("Could not find any Layer %s on DB" % os.path.basename(request.path))

    kwargs = {'affected_layers': affected_layers}
    raw_url = unquote(raw_url)
    timeout = getattr(ogc_server_settings, 'TIMEOUT') or 60
    allowed_hosts = [urlsplit(ogc_server_settings.public_url).hostname, ]
    response = proxy(request, url=raw_url, response_callback=_response_callback,
                     timeout=timeout, allowed_hosts=allowed_hosts, **kwargs)
    return response
    async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self._method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if content_type not in (
            "",
            "application/x-www-form-urlencoded",
            "multipart/form-data",
        ):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        out = MultiDict()  # type: MultiDict[Union[str, bytes, FileField]]

        if content_type == "multipart/form-data":
            multipart = await self.multipart()
            max_size = self._client_max_size

            field = await multipart.next()
            while field is not None:
                size = 0
                field_ct = field.headers.get(hdrs.CONTENT_TYPE)

                if isinstance(field, BodyPartReader):
                    assert field.name is not None

                    # Note that according to RFC 7578, the Content-Type header
                    # is optional, even for files, so we can't assume it's
                    # present.
                    # https://tools.ietf.org/html/rfc7578#section-4.4
                    if field.filename:
                        # store file in temp file
                        tmp = tempfile.TemporaryFile()
                        chunk = await field.read_chunk(size=2 ** 16)
                        while chunk:
                            chunk = field.decode(chunk)
                            tmp.write(chunk)
                            size += len(chunk)
                            if 0 < max_size < size:
                                tmp.close()
                                raise HTTPRequestEntityTooLarge(
                                    max_size=max_size, actual_size=size
                                )
                            chunk = await field.read_chunk(size=2 ** 16)
                        tmp.seek(0)

                        if field_ct is None:
                            field_ct = "application/octet-stream"

                        ff = FileField(
                            field.name,
                            field.filename,
                            cast(io.BufferedReader, tmp),
                            field_ct,
                            field.headers,
                        )
                        out.add(field.name, ff)
                    else:
                        # deal with ordinary data
                        value = await field.read(decode=True)
                        if field_ct is None or field_ct.startswith("text/"):
                            charset = field.get_charset(default="utf-8")
                            out.add(field.name, value.decode(charset))
                        else:
                            out.add(field.name, value)
                        size += len(value)
                        if 0 < max_size < size:
                            raise HTTPRequestEntityTooLarge(
                                max_size=max_size, actual_size=size
                            )
                else:
                    raise ValueError(
                        "To decode nested multipart you need " "to use custom reader",
                    )

                field = await multipart.next()
        else:
            data = await self.read()
            if data:
                charset = self.charset or "utf-8"
                out.extend(
                    parse_qsl(
                        data.rstrip().decode(charset),
                        keep_blank_values=True,
                        encoding=charset,
                    )
                )

        self._post = MultiDictProxy(out)
        return self._post
Exemple #52
0
def apply_descrambler(stream_data: Dict, key: str) -> None:
    """Apply various in-place transforms to YouTube's media stream data.

    Creates a ``list`` of dictionaries by string splitting on commas, then
    taking each list item, parsing it as a query string, converting it to a
    ``dict`` and unquoting the value.

    :param dict stream_data:
        Dictionary containing query string encoded values.
    :param str key:
        Name of the key in dictionary.

    **Example**:

    >>> d = {'foo': 'bar=1&var=test,em=5&t=url%20encoded'}
    >>> apply_descrambler(d, 'foo')
    >>> print(d)
    {'foo': [{'bar': '1', 'var': 'test'}, {'em': '5', 't': 'url encoded'}]}

    """
    otf_type = "FORMAT_STREAM_TYPE_OTF"

    if key == "url_encoded_fmt_stream_map" and not stream_data.get(
        "url_encoded_fmt_stream_map"
    ):
        formats = json.loads(stream_data["player_response"])["streamingData"]["formats"]
        formats.extend(
            json.loads(stream_data["player_response"])["streamingData"][
                "adaptiveFormats"
            ]
        )
        try:
            stream_data[key] = [
                {
                    "url": format_item["url"],
                    "type": format_item["mimeType"],
                    "quality": format_item["quality"],
                    "itag": format_item["itag"],
                    "bitrate": format_item.get("bitrate"),
                    "is_otf": (format_item.get("type") == otf_type),
                }
                for format_item in formats
            ]
        except KeyError:
            cipher_url = [
                parse_qs(
                    data[
                        "cipher" if "cipher" in data.keys() else "signatureCipher"
                    ]
                )
                for data in formats
            ]
            stream_data[key] = [
                {
                    "url": cipher_url[i]["url"][0],
                    "s": cipher_url[i]["s"][0],
                    "type": format_item["mimeType"],
                    "quality": format_item["quality"],
                    "itag": format_item["itag"],
                    "bitrate": format_item.get("bitrate"),
                    "is_otf": (format_item.get("type") == otf_type),
                }
                for i, format_item in enumerate(formats)
            ]
    else:
        stream_data[key] = [
            {k: unquote(v) for k, v in parse_qsl(i)}
            for i in stream_data[key].split(",")
        ]

    logger.debug("applying descrambler")
Exemple #53
0
 async def post(self) -> "MultiDict[str]":
     """Return POST parameters."""
     return MultiDict(parse_qsl(self._text, keep_blank_values=True))
Exemple #54
0
    def run_test(self, satosa_config_dict, sp_conf, idp_conf,
                 saml_backend_config, frontend_config):
        subject_id = "testuser1"
        # proxy config
        satosa_config_dict["FRONTEND_MODULES"] = [frontend_config]
        satosa_config_dict["BACKEND_MODULES"] = [saml_backend_config]
        satosa_config_dict["INTERNAL_ATTRIBUTES"]["attributes"] = {
            attr_name: {
                "saml": [attr_name]
            }
            for attr_name in USERS[subject_id]
        }
        frontend_metadata, backend_metadata = create_entity_descriptors(
            SATOSAConfig(satosa_config_dict))

        # application
        test_client = Client(make_app(SATOSAConfig(satosa_config_dict)),
                             Response)

        # config test SP
        frontend_metadata_str = str(
            frontend_metadata[frontend_config["name"]][0])
        sp_conf["metadata"]["inline"].append(frontend_metadata_str)
        fakesp = FakeSP(SPConfig().load(sp_conf))

        # create auth req
        destination, req_args = fakesp.make_auth_req(
            frontend_metadata[frontend_config["name"]][0].entity_id)
        auth_req = urlparse(destination).path + "?" + urlencode(req_args)

        # make auth req to proxy
        proxied_auth_req = test_client.get(auth_req)
        assert proxied_auth_req.status == "303 See Other"

        # config test IdP
        backend_metadata_str = str(
            backend_metadata[saml_backend_config["name"]][0])
        idp_conf["metadata"]["inline"].append(backend_metadata_str)
        fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf))

        # create auth resp
        req_params = dict(
            parse_qsl(urlparse(proxied_auth_req.data.decode("utf-8")).query))
        url, authn_resp = fakeidp.handle_auth_req(
            req_params["SAMLRequest"],
            req_params["RelayState"],
            BINDING_HTTP_REDIRECT,
            subject_id,
            response_binding=BINDING_HTTP_REDIRECT)

        # make auth resp to proxy
        authn_resp_req = urlparse(url).path + "?" + urlencode(authn_resp)
        authn_resp = test_client.get(authn_resp_req)
        assert authn_resp.status == "303 See Other"

        # verify auth resp from proxy
        resp_dict = dict(
            parse_qsl(urlparse(authn_resp.data.decode("utf-8")).query))
        auth_resp = fakesp.parse_authn_request_response(
            resp_dict["SAMLResponse"], BINDING_HTTP_REDIRECT)
        assert auth_resp.ava == USERS[subject_id]
# (7)parse_qs()
'''
有了序列化,必然就有反序列化。如果我们有一串GET请求参数,利用parse_qs()方法,就可以将其转化为字典
'''
from urllib.parse import parse_qs

query = 'name=germey&age=22'
print(parse_qs(query))

# (8)parse_qsl()
'''
另外,还有一个parse_qsl()方法,它用于将参数转化为元祖组成的列表
'''
from urllib.parse import parse_qsl
query = 'name=germey&age=22'
print(parse_qsl(query))
'''
总结7,8:
8运行结果是一个列表,而列表中的每一个元素都是一个元祖,元祖的第一个内容是参数名,第二个内容是参数值
'''

# (9)quote()
'''
该方法可以将内容转化为URL编码格式,URL中带有中文参数时,有时可能会导致乱码问题,
此时用这个方法可以将中文字符转化为URL编码
'''
from urllib.parse import quote
keyword = '壁纸'
url = 'http://www.baidu.com/s?wd=' + quote(keyword)
print(url)
Exemple #56
0
 def query(self) -> "MultiDict[str]":
     """Return a dictionary with the query variables."""
     return MultiDict(parse_qsl(self.query_string, keep_blank_values=True))
Exemple #57
0
# Consumer is just used to identify the app, it cannot be used to make API requests
consumer = oauth2.Consumer(constants.CONSUMER_KEY, constants.CONSUMER_SECRET)

# Client is used for making API requests
client = oauth2.Client(consumer)

# Twitter works very well with Oauth library hence we are directly making API calls
# using client. Otherwise, normally, you would use 'requests' library to make these calls.
response, content = client.request(constants.REQUEST_TOKEN_URL, 'POST')

if response.status != 200:
    print("An error occurred getting request token from Twitter!")

# qsl = query string parameter. It is used to parse the content
request_token = dict(urlparse.parse_qsl(content.decode('utf-8')))

print("Go to the following site in your brower:")
print("{}?oauth_token={}".format(constants.AUTHORIZATION_URL,
                                 request_token['oauth_token']))

oauth_verifier = input("what is the PIN?")

token = oauth2.Token(request_token['oauth_token'],
                     request_token['oauth_token_secret'])
token.set_verifier(oauth_verifier)

client = oauth2.Client(consumer, token)
response, content = client.request(constants.ACCESS_TOKEN_URL, 'POST')

access_token = dict(urlparse.parse_qsl(content.decode('utf-8')))
Exemple #58
0
def getResponseByPayload(payload):
    argss = argsparse().args()

    # init args
    urlafter = argss.url
    dataafter = argss.postdata
    cookieafter = argss.cookie
    refererafter = argss.referer
    useragentafter = argss.useragent

    param_replace = {test_result.target_parameter: payload}

    #  distinguish POST / GET

    argsInUrl = parse.parse_qs(parse.urlparse(argss.url).query)
    argsInPostdata = parse.parse_qs(argss.postdata) if argss.postdata else {}
    argsInCookie = parse.parse_qs(argss.cookie) if argss.cookie else {}
    argsInUsergent = parse.parse_qs(argss.useragent) if argss.useragent else {}
    argsInReferer = parse.parse_qs(argss.referer) if argss.referer else {}

    def encoder(mydict):
        return ("&".join("{}={}".format(*i) for i in mydict.items()))

    if test_result.target_parameter in argsInUrl:
        url_parts = list(parse.urlparse(argss.url))
        query_url = dict(parse.parse_qsl(url_parts[4]))
        query_url.update(param_replace)
        url_parts[4] = encoder(query_url)
        urlafter = parse.urlunparse(url_parts)

    if test_result.target_parameter in argsInPostdata:
        query_postdata = dict(parse.parse_qsl(argss.postdata))
        query_postdata.update(param_replace)
        dataafter = encoder(query_postdata)

    if test_result.target_parameter in argsInCookie:
        query_cookie = dict(parse.parse_qsl(argss.cookie))
        query_cookie.update(param_replace)
        cookieafter = encoder(query_cookie)

    if test_result.target_parameter in argsInReferer:
        query_referer = dict(parse.parse_qsl(argss.referer))
        query_referer.update(param_replace)
        refererafter = encoder(query_referer)

    if test_result.target_parameter in argsInUsergent:
        query_useragent = dict(parse.parse_qsl(argss.useragent))
        query_useragent.update(param_replace)
        useragentafter = encoder(query_useragent)

    # set proxy http/https
    if argss.proxy and "https" in urlafter:
        proxy_support_https = request.ProxyHandler({'https': argss.proxy})
        opener = request.build_opener(proxy_support_https)
        request.install_opener(opener)
    elif argss.proxy and "http" in urlafter:
        proxy_support_http = request.ProxyHandler({'http': argss.proxy})
        opener = request.build_opener(proxy_support_http)
        request.install_opener(opener)

    # set header
    header = {
        'Content-Type': 'application/x-www-form-urlencoded',
    }
    if argss.cookie: header['Cookie'] = cookieafter
    if argss.referer: header['Referer'] = refererafter
    header[
        'User-Agent'] = useragentafter if argss.useragent else "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:65.0) Gecko/20100101 Firefox/65.0"
    url_request = request.Request(
        url=urlafter.replace(" ", "%20"),
        data=dataafter.encode('utf-8'),
        headers=header) if argss.postdata else request.Request(
            url=urlafter.replace(" ", "%20"), headers=header)

    try:
        url_response = request.urlopen(url_request,
                                       timeout=int(argss.timeout),
                                       capath=None)  # capath 不解析 https 证书
        try:
            return url_response.read().decode('utf-8')
        except:
            try:
                return url_response.read().decode('gb2312')
            except:
                print(
                    output.colour_blue(get_time()),
                    "\033[1;33m[WARNING]\033[0m" +
                    "Unrecognized response encoding, force bytes-to-string")
                return str(url_response.read())
    except Exception as e:
        if argss.verbose:
            print(
                output.colour_blue(get_time()), "\033[1;30m[ERROR] \033[0m" +
                "\033[1;30m" + str(e) + "\033[0m")

        # if argss.verbose : print("\033[1;31m[ERROR]\033[0m unable to connect to the target URL. May be due to a security policy")
        return "NoResponse"
Exemple #59
0
    def _layer_element_to_json(self, layer_element: etree.Element) -> dict:
        """Return json of layer from xml element"""
        nsmap = _get_nsmap(layer_element.nsmap)
        nsmap['xlink'] = "http://www.w3.org/1999/xlink"
        name = get_xpath_value(layer_element, "wms:Name", nsmap)
        title = get_xpath_value(layer_element, "wms:Title", nsmap)
        abstract = get_xpath_value(layer_element, "wms:Abstract", nsmap)
        try:
            keywords = layer_element.xpath(
                "wms:KeywordList/wms:Keyword/text()", namespaces=nsmap)
            keywords = [str(keyword) for keyword in keywords]
        except IndexError:
            keywords = []

        try:
            legend_url = layer_element.xpath(
                "wms:Style/wms:LegendURL/wms:OnlineResource",
                namespaces=nsmap)[0].attrib[f"{{{nsmap['xlink']}}}href"]
        except (IndexError, KeyError):
            legend_url = ''
        params = {}
        (wms_url, _service, _version,
         _request) = self._get_cleaned_url_params(self.remote_url)
        if _service:
            params['service'] = _service
        if _version:
            params['version'] = _version
        if wms_url.query:
            for _param in parse_qsl(wms_url.query):
                params[_param[0]] = _param[1]

        wms_url = self.get_ogc_wms_url(
            wms_url._replace(query=urlencode(params)), version=_version)

        crs = None
        spatial_extent = None
        try:
            for bbox in layer_element.xpath("wms:BoundingBox",
                                            namespaces=nsmap):
                crs = bbox.attrib.get('CRS')
                if 'EPSG:' in crs.upper() or crs.upper() == 'CRS:84':
                    crs = 'EPSG:4326' if crs.upper() == 'CRS:84' else crs
                    left_x = bbox.attrib.get('minx')
                    right_x = bbox.attrib.get('maxx')
                    lower_y = bbox.attrib.get('miny')
                    upper_y = bbox.attrib.get('maxy')

                    # Preventing if it returns comma as the decimal separator
                    spatial_extent = geos.Polygon.from_bbox((
                        float(left_x.replace(",", ".")),
                        float(lower_y.replace(",", ".")),
                        float(right_x.replace(",", ".")),
                        float(upper_y.replace(",", ".")),
                    ))
                    break
            if not spatial_extent:
                crs = None
                raise Exception("No suitable wms:BoundingBox element found!")
        except Exception as e:
            logger.exception(e)
            try:
                for crs in layer_element.xpath("wms:CRS//text()",
                                               namespaces=nsmap):
                    if 'EPSG:' in crs.upper() or crs.upper() == 'CRS:84':
                        crs = 'EPSG:4326' if crs.upper() == 'CRS:84' else crs
                        left_x = get_xpath_value(
                            layer_element,
                            "wms:EX_GeographicBoundingBox/wms:westBoundLongitude",
                            nsmap)
                        right_x = get_xpath_value(
                            layer_element,
                            "wms:EX_GeographicBoundingBox/wms:eastBoundLongitude",
                            nsmap)
                        lower_y = get_xpath_value(
                            layer_element,
                            "wms:EX_GeographicBoundingBox/wms:southBoundLatitude",
                            nsmap)
                        upper_y = get_xpath_value(
                            layer_element,
                            "wms:EX_GeographicBoundingBox/wms:northBoundLatitude",
                            nsmap)

                        # Preventing if it returns comma as the decimal separator
                        spatial_extent = geos.Polygon.from_bbox((
                            float(left_x.replace(",", ".")),
                            float(lower_y.replace(",", ".")),
                            float(right_x.replace(",", ".")),
                            float(upper_y.replace(",", ".")),
                        ))
                        break
                if not spatial_extent:
                    crs = None
                    raise Exception("No suitable wms:CRS element found!")
            except Exception as e:
                logger.exception(e)
                spatial_extent = None
            if not spatial_extent:
                crs = "EPSG:4326"
                spatial_extent = geos.Polygon.from_bbox(
                    (-180.0, -90.0, 180.0, 90.0))
        return {
            'name': name,
            'title': title,
            'abstract': abstract,
            'crs': crs,
            'keywords': keywords,
            'spatial_extent': spatial_extent,
            'wms_url': wms_url,
            'legend_url': legend_url,
        }
    def do_GET(self):
        if self.path == '/':
            self.send_response(301)
            global counter
            counter = counter + 1
            print("_________counter_____________")

            self.send_header('Location', '/index.html?id=' + str(counter))
            self.end_headers()
            # self.path = "/index.html?id=" + str(counter)
            return

        elif self.path == '/sensors':
            content_type = 'text/html; charset=utf-8'
            # connectedId = 2
            preview = True
            stream = False
            recording = False
            if preview == True:
                if stream == True:
                    connectedId = 2
                elif recording == True:
                    connectedId = 2

            data = {
                "sensors": [{
                    "name": "sot",
                    "Temp": 34,
                    "Hum": 50,
                }, {
                    "name": "outside",
                    "Temp": 33,
                    "Hum": 44,
                }, {
                    "name": "arena",
                    "Temp": 33,
                    "Hum": 45,
                }],
                "waterLevel":
                60,
                "connectedId":
                2,
                "streaming":
                False
            }
            result = json.dumps(data)
            content = result.encode("utf-8")
            # content = (self.sensors.getSensorsData(connectedId)).encode('utf-8')
            # content = "ok".encode('utf-8')
            self.send_response(200)
            self.send_header('Content-Type', content_type)
            self.send_header('Content-Length', len(content))
            # @TODO add last modified
            self.end_headers()
            self.wfile.write(content)

        else:
            url_parts = list(urlparse.urlparse(self.path))
            self.path = url_parts[2]
            query = dict(urlparse.parse_qsl(url_parts[4]))
            userId = 0

            if url_parts[2].startswith('/download') == True:
                urls = url_parts[2].split("/")
                print(urls[2])
                filepath = "media/" + urls[2]
                with open(filepath, 'rb') as f:
                    self.send_response(200)
                    self.send_header("Content-Type",
                                     'application/octet-stream')
                    self.send_header(
                        "Content-Disposition",
                        'attachment; filename="{}"'.format(
                            os.path.basename(filepath)))
                    fs = os.fstat(f.fileno())
                    self.send_header("Content-Length", str(fs.st_size))
                    self.end_headers()
                    shutil.copyfileobj(f, self.wfile)
                    # self.path = 'videos/file.h264'

            if url_parts[2].startswith('/delete') == True:
                self.send_response(204)
                self.end_headers()
                urls = url_parts[2].split("/")
                print(urls[2])
                filepath = "media/" + urls[2]
                if os.path.exists(filepath):
                    os.remove(filepath)
                else:
                    print("The file does not exist")

            # print()
            if len(query) != 0:
                userId = int(query["id"])
                print(query["id"])
            # ----feed
            if self.path == "/feed":
                print("feed")
                self.send_response(200)
                self.end_headers()
            # *************
            # Note delete all print(self.rfile.read(int(self.headers['Content-Length'])))
            if self.path == "/stop":
                self.send_response(200)
                self.end_headers()
                # print(self.rfile.read(int(self.headers['Content-Length'])))

            if self.path == '/wait_start_preview':
                self.send_response(200)
                self.end_headers()
                print(self.rfile.read(int(self.headers['Content-Length'])))
                self.wfile.write("hello".encode('utf-8'))

            # ---- getSettings-------
            if self.path == '/stream_settings':
                self.send_response(200)
                self.end_headers()
                # @TODO change it
                YOUTUBE = ""
                KEY = ""
                # ------
                data = {"youtube": YOUTUBE, "key": KEY}
                # data = str(data)
                data = json.dumps(data)
                self.wfile.write(data.encode('utf-8'))

        # --------------stream---------------------

            if self.path == "/stop_stream":
                self.send_response(200)
                self.end_headers()
                print("_________________Stop stream____")
                print(self.rfile.read(int(self.headers['Content-Length'])))
                self.wfile.write("hello".encode('utf-8'))

            if self.path == "/index.html":
                self.path = 'templates/index.html'

            # if self.path == "/download/file.h264":
            #     with open(FILEPATH, 'rb') as f:
            #         self.send_response(200)
            #         self.send_header("Content-Type", 'application/octet-stream')
            #         self.send_header(
            #             "Content-Disposition", 'attachment; filename="{}"'.format(os.path.basename(FILEPATH)))
            #         fs = os.fstat(f.fileno())
            #         self.send_header("Content-Length", str(fs.st_size))
            #         self.end_headers()
            #         shutil.copyfileobj(f, self.wfile)
            # self.path = 'videos/file.h264'

            # capture_image

            if self.path == "/stop":
                self.send_response(200)

            # -----------finding video files
            if self.path == "/media":
                print("In videos")
                content_type = 'text/html; charset=utf-8'
                mypath = "./media/"
                fileNames = [
                    f for f in listdir(mypath) if isfile(join(mypath, f))
                ]
                print()
                if len(fileNames) > 0:
                    fileNames = str(fileNames)
                else:
                    fileNames = ""

                content = fileNames.encode("utf-8")
                self.send_response(200)
                self.send_header('Content-Type', content_type)
                self.send_header('Content-Length', len(content))
                # @TODO add last modified
                self.end_headers()
                self.wfile.write(content)

            if self.path == "/stop_record":
                self.send_response(200)
                self.end_headers()
                print("_____stop_recording_video____")
                # print(self.rfile.read(int(self.headers['Content-Length'])))
                # self.wfile.write("ok".encode('utf-8'))
        # -------------------------
        # --------------------------------

            if self.path == '/stream.mjpg':
                print("*************/stream.mjpg")
                if userId != 0:
                    print("UserId in stream/mjpg")

                # self.wfile.write(b'--FRAME\r\n')
                # self.send_header('Content-Type', 'image/jpeg')
                # self.send_header('Content-Length', len(b'12'))
                # self.end_headers()
                # self.wfile.write(b'12')
                # self.wfile.write(b'\r\n')

                # if (self.connectedClients == 0 ):

                # self.camera.start_recording(self.output, format='mjpeg')
            try:
                # Check the file extension required and
                # set the right mime type
                sendReply = False
                if self.path.endswith(".html"):
                    global connectedUsers
                    # try:
                    #     if connectedUsers.index(userId) >=0:
                    #         print("________Ok____")
                    # except:
                    #     print("Error")
                    if userId in connectedUsers:
                        self.send_response(301)

                        counter = counter + 1

                        print("_________counter22122_____________")

                        self.send_header('Location',
                                         '/index.html?id=' + str(counter))
                        self.end_headers()
                    else:

                        connectedUsers.append(userId)

                        # print(connectedUsers.index(userId))
                        # if (userId )
                        print('it is html ')
                        mimetype = 'text/html'
                        content_type = 'text/html; charset=utf-8'
                        with io.open(self.path, 'r') as f:
                            index_template = f.read()

                        tpl = Template(index_template)
                        print("In Template __________")
                        # sotHum = 40

                        values = (
                            "0 200; {0:0} 180; {1} 150; {2} 135; {2} 135;".
                            format(int(55 / 3), int(55 / 2), 55))
                        # values = self.sensors.getAnimationValues()

                        print(values)
                        content = tpl.safe_substitute(
                            dict(
                                COLOR=COLOR,
                                BGCOLOR=BGCOLOR,
                                # animationValuesSot = values["valuesHumSot"],
                                # animationValuesArena = values["valuesHumArena"] ,
                                # animationValuesOutside = values["valuesHumOutside"]
                            ))

                        content = content.encode('utf-8')
                        self.send_response(200)
                        self.send_header('Content-Type', content_type)
                        self.send_header('Content-Length', len(content))
                        # -------------------------------
                        # cookies
                        # cookie = http.cookies.SimpleCookie()
                        # self.stream.counter = self.stream.counter + 1
                        # # users.append(stream.counter)
                        # cookie['user_id'] = str(self.stream.counter)
                        #
                        # self.send_header("Set-Cookie", cookie.output(header='', sep=''))
                        # --------------------------------------------------------
                        # self.send_header('Last-Modified', self.date_time_string(time()))
                        self.end_headers()
                        self.wfile.write(content)

                if self.path.endswith(".jpg"):
                    mimetype = 'image/jpg'
                    sendReply = True
                if self.path.endswith(".gif"):
                    mimetype = 'image/gif'
                    sendReply = True
                if self.path.endswith(".js"):
                    mimetype = 'application/javascript'
                    sendReply = True
                if self.path.endswith("min.js.map"):
                    mimetype = 'application/javascript'
                    sendReply = True
                if self.path.endswith(".css"):
                    mimetype = 'text/css'
                    sendReply = True
                if self.path.endswith("min.css.map"):
                    mimetype = 'text/css'
                    sendReply = True
                if self.path.endswith("slim.min.js"):
                    mimetype = 'application/javascript'
                    sendReply = True
                if self.path.endswith(".png"):
                    mimetype = 'text/png'
                    sendReply = True
                # if self.path.endswith(".h264"):
                #         mimetype='text/png'
                #         sendReply = True
                if self.path.endswith(".woff2"):
                    mimetype = 'text/png'
                    sendReply = True
                if self.path.endswith(".woff"):
                    mimetype = 'text/png'
                    sendReply = True
                if self.path.endswith(".ttf"):
                    mimetype = 'text/png'
                    sendReply = True

                if sendReply == True:
                    f = open(curdir + sep + self.path, 'rb')
                    self.send_response(200)
                    self.send_header('Content-type', mimetype)
                    self.end_headers()
                    self.wfile.write(f.read())
                    f.close()
                return

            except IOError as ex:
                self.send_error(404, 'File Not Found: %s' % self.path)


# class StreamingHttpServer():
#     def startServer(self):
#         self.http_server = HTTPServer(('', HTTP_PORT), StreamingHttpHandler)
#         self.http_server.serve_forever()
#
#     def stopServer(self):
#         self.http_server.socket.close()