def parse_dict_querystring(environ): """Parses a query string like parse_querystring, but returns a MultiDict Caches this value in case parse_dict_querystring is called again for the same request. Example:: >>> environ = {'QUERY_STRING': 'day=Monday&user=fred&user=jane'} >>> parsed = parse_dict_querystring(environ) >>> parsed['day'] 'Monday' >>> parsed['user'] 'fred' >>> parsed.getall('user') ['fred', 'jane'] """ source = environ.get('QUERY_STRING', '') if not source: return MultiDict() if 'paste.parsed_dict_querystring' in environ: parsed, check_source = environ['paste.parsed_dict_querystring'] if check_source == source: return parsed parsed = parse_qsl(source, keep_blank_values=True, strict_parsing=False) multi = MultiDict(parsed) environ['paste.parsed_dict_querystring'] = (multi, source) return multi
def init(self): plain_http_config = MultiDict() soap_config = MultiDict() dol = deepcopy(self.worker_config.http_soap).dict_of_lists() for url_path in dol: for item in dol[url_path]: for soap_action, channel_info in item.items(): if channel_info['connection'] == 'channel': if channel_info.transport == 'plain_http': config = plain_http_config.setdefault(url_path, Bunch()) config[soap_action] = deepcopy(channel_info) else: config = soap_config.setdefault(url_path, Bunch()) config[soap_action] = deepcopy(channel_info) self.request_dispatcher = RequestDispatcher(simple_io_config=self.worker_config.simple_io) self.request_dispatcher.soap_handler = SOAPHandler(soap_config, self.server) self.request_dispatcher.plain_http_handler = PlainHTTPHandler(plain_http_config, self.server) # Statistics maintenance self.stats_maint = MaintenanceTool(self.kvdb.conn) self.request_dispatcher.security = ConnectionHTTPSOAPSecurity( self.server.odb.get_url_security(self.server.cluster_id, 'channel')[0], self.worker_config.basic_auth, self.worker_config.tech_acc, self.worker_config.wss) # Create all the expected connections self.init_sql() self.init_ftp() self.init_http_soap()
def test_parsevars(): e = make_post('a=1&b=2&c=3&b=4') cur_input = e['wsgi.input'] d = parse_formvars(e) assert isinstance(d, MultiDict) assert d == MultiDict([('a', '1'), ('b', '2'), ('c', '3'), ('b', '4')]) assert e['paste.parsed_formvars'] == ((d, e['wsgi.input']))
def test_publish_with_multiple_urls(self, mock): data = MultiDict({'hub.mode': 'publish'}) data.add('hub.url', 'http://www.example.com/') data.add('hub.url', 'http://www.site.com/') request = self.r('/publish', self.valid_headers, POST=data) info = publish(None, request) self.assertEqual(info.status_code, 204)
def search(self, ver=None, register=None): id = None try: params = MultiDict(self._get_search_params(request.params)) if 'q' in params.keys(): id = params['q'] if 'query' in params.keys(): id = params['query'] except ValueError, e: pass
def parse_path(self): # parse path and parameters path = self.path.split('?')[0] params = self.path[len(path) + 1:].split('&') param_dict = MultiDict() for param in params: key = param.split('=')[0] value = param[len(key) + 1:] param_dict.add(key, value) return path, param_dict
def _parse_query(self): """ Decompose the query string into fields and terms. """ self._combined_fields = MultiDict(self._fields) self._combined_terms = list(self._terms) for token in self._query_tokens(): colon_pos = token.find(u':') if colon_pos != -1: field = token[:colon_pos] value = token[colon_pos + 1:] value = value.strip('"').strip("'").strip() self._combined_fields.add(field, value) else: self._combined_terms.append(token)
def _external_request(self, uri, method, body, headers, wsgi_request, input, output, trusted): body = self._serialize_body(input, body, headers) # @@: Does httplib2 handle Content-Length? dict_headers = MultiDict(headers) (res, content) = self.httplib2.request( uri, method=method, body=body, headers=dict_headers, redirections=self.redirections) status = '%s %s' % (res.status, res.reason) # @@: Hrm... headers = res.items() remove_header(headers, 'status') return self._create_response( status, headers, output, [content], trusted)
def test_view_login(self): from apex.lib.libapex import create_user create_user(username='******', password='******') from apex.views import login request = testing.DummyRequest() # wtforms requires this request.POST = MultiDict() request.context = testing.DummyResource() response = login(request) self.assertEqual(response['title'], 'You need to login')
def search(self, ver=None, register=None): id = None try: params = MultiDict(self._get_search_params(request.params)) if "q" in list(params.keys()): id = params["q"] if "query" in list(params.keys()): id = params["query"] except ValueError as e: log.debug(str(e)) pass self._post_analytics(c.user, register, "search", id) return ApiController.search(self, ver, register)
def test_publish_fetches_topic_content(self, mock): data = MultiDict({'hub.mode': 'publish'}) data.add('hub.url', 'http://www.example.com/') data.add('hub.url', 'http://www.site.com/') request = self.r('/publish', self.valid_headers, POST=data) hub = request.root info = publish(None, request) first = hub.topics.get('http://www.example.com/') second = hub.topics.get('http://www.site.com/') self.assertEqual(info.status_code, 204) self.assertTrue(first.timestamp is not None) self.assertTrue(second.timestamp is not None)
def test_fail_login(self): from apex.lib.libapex import create_user create_user(username='******', password='******') request = testing.DummyRequest(environ=environ) request.method = 'POST' # wtforms requires this request.POST = MultiDict() request.POST['username'] = '******' request.POST['password'] = '******' from apex.views import login request.context = testing.DummyResource() response = login(request) self.assertEqual(len(response['form'].errors), 1)
def transform(self, fn): """Accepts a function, getting a key, val and returning a new pair of key, val and applies the function to all header, value pairs in the message. """ changed = [False] def tracking_fn(key, val): new_key, new_val = fn(key, val) if new_val != val or new_key != key: changed[0] = True return new_key, new_val v = MultiDict(tracking_fn(key, val) for key, val in self.v.iteritems()) if changed[0]: self.v = v self.changed = True
def _loadOwsParams(self): """ Method to load OWS parameters from the query string. If request method is POST then this is ignored after creating the instance dictionary self._owsParams. """ # All OWS parameter names are case insensitive. self._owsParams = {} if self._requestMethod() == "POST": return log.debug('REQUEST: %s' % request) if PARAMETER_MODE == 'html_4': params = request.params elif PARAMETER_MODE == 'wps_1': # request.params will use ';' as a QS separator which is not compatible # with WPS-1.0. Therefore we parse the QS ourselves. qs = request.environ['QUERY_STRING'] params = MultiDict( parse_qsl(qs, semicolon_separator=False, keep_blank_values=True, strict_parsing=False)) else: raise ValueError( "Value of cows.parameter_mode not recognised (%s)" % PARAMETER_MODE) #!TODO: unicode is converted here. # At some point we need to expect COWS apps to accept unicode try: for k in params: if k.lower() == 'x': self._owsParams['i'] = str(params[k]) elif k.lower() == 'y': self._owsParams['j'] = str(params[k]) else: self._owsParams[k.lower()] = str(params[k]) except UnicodeError: raise ValueError( "Cannot convert unicode to string. COWS does not accept unicode parameters" )
def copy(self): """ Creates a copy of this ConfigStore. All configuration data is copied over except for SQL connections. """ config_store = ConfigStore() # Grab all ConfigDicts - even if they're actually ZATO_NONE - and make their copies for attr_name in dir(self): attr = getattr(self, attr_name) if isinstance(attr, ConfigDict): copy_func = getattr(attr, 'copy') setattr(config_store, attr_name, copy_func()) elif attr is ZATO_NONE: setattr(config_store, attr_name, ZATO_NONE) http_soap = MultiDict() dict_of_lists = self.http_soap.dict_of_lists() for url_path, lists in dict_of_lists.items(): _info = Bunch() for elem in lists: for soap_action, item in elem.items(): _info[soap_action] = Bunch() _info[soap_action].id = item.id _info[soap_action].name = item.name _info[soap_action].is_active = item.is_active _info[soap_action].is_internal = item.is_internal _info[soap_action].url_path = item.url_path _info[soap_action].method = item.method _info[soap_action].soap_version = item.soap_version _info[soap_action].service_id = item.service_id _info[soap_action].service_name = item.service_name _info[soap_action].impl_name = item.impl_name _info[soap_action].transport = item.transport _info[soap_action].connection = item.connection http_soap.add(url_path, _info) config_store.http_soap = http_soap config_store.url_sec = self.url_sec config_store.broker_config = self.broker_config config_store.odb_data = deepcopy(self.odb_data) return config_store
def test_dict(): d = MultiDict({'a': 1}) assert d.items() == [('a', 1)] d['b'] = 2 d['c'] = 3 assert d.items() == [('a', 1), ('b', 2), ('c', 3)] d['b'] = 4 assert d.items() == [('a', 1), ('c', 3), ('b', 4)] d.add('b', 5) raises(KeyError, 'd.getone("b")') assert d.getall('b') == [4, 5] assert d.items() == [('a', 1), ('c', 3), ('b', 4), ('b', 5)] del d['b'] assert d.items() == [('a', 1), ('c', 3)] assert d.pop('xxx', 5) == 5 assert d.getone('a') == 1 assert d.popitem() == ('c', 3) assert d.items() == [('a', 1)] item = [] assert d.setdefault('z', item) is item assert d.items() == [('a', 1), ('z', item)] assert d.setdefault('y', 6) == 6 assert d.mixed() == {'a': 1, 'y': 6, 'z': item} assert d.dict_of_lists() == {'a': [1], 'y': [6], 'z': [item]} assert 'a' in d dcopy = d.copy() assert dcopy is not d assert dcopy == d d['x'] = 'x test' assert dcopy != d d[(1, None)] = (None, 1) assert d.items() == [('a', 1), ('z', []), ('y', 6), ('x', 'x test'), ((1, None), (None, 1))]
def params(self): """Dictionary-like object of keys from POST, GET, URL dicts Return a key value from the parameters, they are checked in the following order: POST, GET, URL Additional methods supported: ``getlist(key)`` Returns a list of all the values by that key, collected from POST, GET, URL dicts Returns a ``MultiDict`` container or a ``UnicodeMultiDict`` when ``charset`` is set. """ params = MultiDict() params.update(self._POST()) params.update(self._GET()) if self.charset: params = UnicodeMultiDict(params, encoding=self.charset, errors=self.errors, decode_keys=self.decode_param_names) return params
def frontpage_featured_orgs(self, path=None, data=None, errors=None, error_summary=None): msg = '' if p.toolkit.request.method == 'POST' and not data: data = MultiDict(p.toolkit.request.POST) data = data.getall('featured_orgs') if len(data) < 1: msg = 'At least one featured org must be selected' h.flash_error(msg, allow_html=False) else: forgs = '' for i in range(len(data)): data[i] = data[i].encode('utf-8') forgs += data[i] if i < len(data) - 1: forgs += ' ' try: junk = p.toolkit.get_action('config_option_update')( { 'user': c.user }, { 'ckan.featured_orgs': forgs }) except p.toolkit.ValidationError, e: errors = e.error_dict error_summary = e.error_summary return self.frontpage_featured_orgs( '', data, errors, error_summary) p.toolkit.redirect_to('/frontpage')
def _test_unicode_dict(decode_param_names=False): d = UnicodeMultiDict(MultiDict({b'a': 'a test'})) d.encoding = 'utf-8' d.errors = 'ignore' if decode_param_names: key_str = six.text_type k = lambda key: key d.decode_keys = True else: key_str = six.binary_type k = lambda key: key.encode() def assert_unicode(obj): assert isinstance(obj, six.text_type) def assert_key_str(obj): assert isinstance(obj, key_str) def assert_unicode_item(obj): key, value = obj assert isinstance(key, key_str) assert isinstance(value, six.text_type) assert d.items() == [(k('a'), u'a test')] map(assert_key_str, d.keys()) map(assert_unicode, d.values()) d[b'b'] = b'2 test' d[b'c'] = b'3 test' assert d.items() == [(k('a'), u'a test'), (k('b'), u'2 test'), (k('c'), u'3 test')] list(map(assert_unicode_item, d.items())) d[k('b')] = b'4 test' assert d.items() == [(k('a'), u'a test'), (k('c'), u'3 test'), (k('b'), u'4 test')], d.items() list(map(assert_unicode_item, d.items())) d.add(k('b'), b'5 test') pytest.raises(KeyError, d.getone, k("b")) assert d.getall(k('b')) == [u'4 test', u'5 test'] map(assert_unicode, d.getall('b')) assert d.items() == [(k('a'), u'a test'), (k('c'), u'3 test'), (k('b'), u'4 test'), (k('b'), u'5 test')] list(map(assert_unicode_item, d.items())) del d[k('b')] assert d.items() == [(k('a'), u'a test'), (k('c'), u'3 test')] list(map(assert_unicode_item, d.items())) assert d.pop('xxx', u'5 test') == u'5 test' assert isinstance(d.pop('xxx', u'5 test'), six.text_type) assert d.getone(k('a')) == u'a test' assert isinstance(d.getone(k('a')), six.text_type) assert d.popitem() == (k('c'), u'3 test') d[k('c')] = b'3 test' assert_unicode_item(d.popitem()) assert d.items() == [(k('a'), u'a test')] list(map(assert_unicode_item, d.items())) item = [] assert d.setdefault(k('z'), item) is item items = d.items() assert items == [(k('a'), u'a test'), (k('z'), item)] assert isinstance(items[1][0], key_str) assert isinstance(items[1][1], list) assert isinstance(d.setdefault(k('y'), b'y test'), six.text_type) assert isinstance(d[k('y')], six.text_type) assert d.mixed() == {k('a'): u'a test', k('y'): u'y test', k('z'): item} assert d.dict_of_lists() == {k('a'): [u'a test'], k('y'): [u'y test'], k('z'): [item]} del d[k('z')] list(map(assert_unicode_item, six.iteritems(d.mixed()))) list(map(assert_unicode_item, [(key, value[0]) for \ key, value in six.iteritems(d.dict_of_lists())])) assert k('a') in d dcopy = d.copy() assert dcopy is not d assert dcopy == d d[k('x')] = 'x test' assert dcopy != d d[(1, None)] = (None, 1) assert d.items() == [(k('a'), u'a test'), (k('y'), u'y test'), (k('x'), u'x test'), ((1, None), (None, 1))] item = d.items()[-1] assert isinstance(item[0], tuple) assert isinstance(item[1], tuple) fs = cgi.FieldStorage() fs.name = 'thefile' fs.filename = 'hello.txt' fs.file = StringIO('hello') d[k('f')] = fs ufs = d[k('f')] assert isinstance(ufs, cgi.FieldStorage) assert ufs is not fs assert ufs.name == fs.name assert isinstance(ufs.name, str if six.PY3 else key_str) assert ufs.filename == fs.filename assert isinstance(ufs.filename, six.text_type) assert isinstance(ufs.value, str) assert ufs.value == 'hello'
class SubscribeTests(BaseTest): default_data = MultiDict({ 'hub.verify': 'sync', 'hub.callback': 'http://httpbin.org/get', 'hub.mode': "subscribe", 'hub.topic': "http://www.google.com/" }) challenge = "abcdefg" def test_subscribe(self): request = Request.blank('/subscribe') info = subscribe(None, request) self.assertEqual(info.status_code, 405) def test_invalid_content_type(self): headers = [("Content-Type", "text/plain")] request = self.r( '/subscribe', headers=headers, POST={"thing": "thing"} ) info = subscribe(None, request) self.assertEqual(info.status_code, 406) self.assertEqual( info.headers['Accept'], 'application/x-www-form-urlencoded' ) def test_invalid_verify_type(self): data = {"hub.verify": "bogus"} request = self.r( '/subscribe', POST=data ) info = subscribe(None, request) self.assertEqual(info.status_code, 400) self.assertEqual(info.headers['Content-Type'], 'text/plain') self.assertTrue("hub.verify" in info.body) @patch.object(Hub, 'get_challenge_string') def test_multiple_verify_types_one_valid(self, mock_get_challenge_string): data = self.default_data.copy() mock_get_challenge_string.return_value = self.challenge del data["hub.verify"] data.add('hub.verify', 'bogus') data.add('hub.verify', 'sync') request = self.r( '/subscribe', POST=data ) with patch('requests.get', new_callable=MockResponse, content=self.challenge, status_code=200): info = subscribe(None, request) self.assertEqual(info.status_code, 204) def test_multiple_invalid_verify_types(self): data = self.default_data.copy() del data["hub.verify"] data.add('hub.verify', 'bad') data.add('hub.verify', 'wrong') request = self.r( '/subscribe', POST=data ) info = subscribe(None, request) self.assertEqual(info.status_code, 400) self.assertEqual(info.headers['Content-Type'], 'text/plain') self.assertTrue("hub.verify" in info.body) def test_invalid_callback(self): data = self.default_data.copy() del data['hub.callback'] data.add("hub.callback", "www.google.com") request = self.r( '/subscribe', POST=data ) info = subscribe(None, request) self.assertEqual(info.status_code, 400) self.assertTrue('hub.callback' in info.body) @patch.object(Hub, 'get_challenge_string') def test_valid_callback(self, mock_get_challenge_string): data = self.default_data.copy() mock_get_challenge_string.return_value = self.challenge request = self.r( '/subscribe', POST=data ) with patch('requests.get', new_callable=MockResponse, content=self.challenge, status_code=200): info = subscribe(None, request) self.assertEqual(info.status_code, 204) def test_invalid_mode(self): data = self.default_data.copy() del data['hub.mode'] data.add('hub.mode', 'bad') request = self.r( '/subscribe', POST=data, ) info = subscribe(None, request) self.assertEqual(info.status_code, 400) self.assertTrue('hub.mode' in info.body) @patch.object(Hub, 'get_challenge_string') def test_valid_mode(self, mock_get_challenge_string): data = self.default_data.copy() mock_get_challenge_string.return_value = self.challenge request = self.r( '/subscribe', POST=data ) with patch('requests.get', new_callable=MockResponse, content=self.challenge, status_code=200): info = subscribe(None, request) self.assertEqual(info.status_code, 204) @patch.object(Hub, 'get_challenge_string') def test_valid_topic(self, mock_get_challenge_string): data = self.default_data.copy() mock_get_challenge_string.return_value = self.challenge request = self.r( '/subscribe', POST=data ) with patch('requests.get', new_callable=MockResponse, content=self.challenge, status_code=200): info = subscribe(None, request) self.assertEqual(info.status_code, 204) def test_invalid_topic(self): data = self.default_data.copy() del data['hub.topic'] data.add('hub.topic', 'http://google.com/#fragment') request = self.r( '/subscribe', POST=data ) info = subscribe(None, request) self.assertEqual(info.status_code, 400) self.assertTrue('hub.topic' in info.body) def test_not_verified_subscription(self): data = self.default_data.copy() del data["hub.callback"] data.add('hub.callback', 'http://httpbin.org/status/404') request = self.r( '/subscribe', POST=data ) with patch('requests.get', new_callable=MockResponse, status_code=404): info = subscribe(None, request) self.assertEqual(info.status_code, 409) @patch.object(Hub, 'get_challenge_string') def test_subscribe_to_actual_topic(self, mock_get_challenge_string): """ Make sure that the topic subscribed to is same as published. """ data = self.default_data.copy() mock_get_challenge_string.return_value = self.challenge request = self.r( '/subscribe', POST=data ) # Publish the URL first. self.root.publish('http://www.google.com/') urls = { 'http://www.google.com/': MockResponse(content=good_atom), 'http://httpbin.org/get': MockResponse( content=self.challenge, status_code=200 ) } with patch('requests.get', new_callable=MultiResponse, mapping=urls): subscribe(None, request) hub = self.root topic = hub.topics.get('http://www.google.com/') subscriber = hub.subscribers.get('http://httpbin.org/get') self.assertEqual( topic, subscriber.topics.get('http://www.google.com/'))
def parse_formvars(environ, include_get_vars=True): """Parses the request, returning a MultiDict of form variables. If ``include_get_vars`` is true then GET (query string) variables will also be folded into the MultiDict. All values should be strings, except for file uploads which are left as ``FieldStorage`` instances. If the request was not a normal form request (e.g., a POST with an XML body) then ``environ['wsgi.input']`` won't be read. """ source = environ['wsgi.input'] if 'paste.parsed_formvars' in environ: parsed, check_source = environ['paste.parsed_formvars'] if check_source == source: if include_get_vars: parsed.update(parse_querystring(environ)) return parsed # @@: Shouldn't bother FieldStorage parsing during GET/HEAD and # fake_out_cgi requests type = environ.get('CONTENT_TYPE', '').lower() if ';' in type: type = type.split(';', 1)[0] fake_out_cgi = type not in ('', 'application/x-www-form-urlencoded', 'multipart/form-data') # FieldStorage assumes a default CONTENT_LENGTH of -1, but a # default of 0 is better: if not environ.get('CONTENT_LENGTH'): environ['CONTENT_LENGTH'] = '0' # Prevent FieldStorage from parsing QUERY_STRING during GET/HEAD # requests old_query_string = environ.get('QUERY_STRING','') environ['QUERY_STRING'] = '' if fake_out_cgi: input = six.BytesIO(b'') old_content_type = environ.get('CONTENT_TYPE') old_content_length = environ.get('CONTENT_LENGTH') environ['CONTENT_LENGTH'] = '0' environ['CONTENT_TYPE'] = '' else: input = environ['wsgi.input'] fs = cgi.FieldStorage(fp=input, environ=environ, keep_blank_values=1) environ['QUERY_STRING'] = old_query_string if fake_out_cgi: environ['CONTENT_TYPE'] = old_content_type environ['CONTENT_LENGTH'] = old_content_length formvars = MultiDict() if isinstance(fs.value, list): for name in fs.keys(): values = fs[name] if not isinstance(values, list): values = [values] for value in values: if not value.filename: value = value.value formvars.add(name, value) environ['paste.parsed_formvars'] = (formvars, source) if include_get_vars: formvars.update(parse_querystring(environ)) return formvars
def __init__(self, items=()): self.v = MultiDict([(normalize(key), val) for (key, val) in items]) self.changed = False
def _test_unicode_dict(decode_param_names=False): d = UnicodeMultiDict(MultiDict({'a': 'a test'})) d.encoding = 'utf-8' d.errors = 'ignore' if decode_param_names: key_str = unicode d.decode_keys = True else: key_str = str def assert_unicode(obj): assert isinstance(obj, unicode) def assert_key_str(obj): assert isinstance(obj, key_str) def assert_unicode_item(obj): key, value = obj assert isinstance(key, key_str) assert isinstance(value, unicode) assert d.items() == [('a', u'a test')] map(assert_key_str, d.keys()) map(assert_unicode, d.values()) d['b'] = '2 test' d['c'] = '3 test' assert d.items() == [('a', u'a test'), ('b', u'2 test'), ('c', u'3 test')] map(assert_unicode_item, d.items()) d['b'] = '4 test' assert d.items() == [('a', u'a test'), ('c', u'3 test'), ('b', u'4 test')] map(assert_unicode_item, d.items()) d.add('b', '5 test') raises(KeyError, 'd.getone("b")') assert d.getall('b') == [u'4 test', u'5 test'] map(assert_unicode, d.getall('b')) assert d.items() == [('a', u'a test'), ('c', u'3 test'), ('b', u'4 test'), ('b', u'5 test')] map(assert_unicode_item, d.items()) del d['b'] assert d.items() == [('a', u'a test'), ('c', u'3 test')] map(assert_unicode_item, d.items()) assert d.pop('xxx', u'5 test') == u'5 test' assert isinstance(d.pop('xxx', u'5 test'), unicode) assert d.getone('a') == u'a test' assert isinstance(d.getone('a'), unicode) assert d.popitem() == ('c', u'3 test') d['c'] = '3 test' assert_unicode_item(d.popitem()) assert d.items() == [('a', u'a test')] map(assert_unicode_item, d.items()) item = [] assert d.setdefault('z', item) is item items = d.items() assert items == [('a', u'a test'), ('z', item)] assert isinstance(items[1][0], key_str) assert isinstance(items[1][1], list) assert isinstance(d.setdefault('y', 'y test'), unicode) assert isinstance(d['y'], unicode) assert d.mixed() == {u'a': u'a test', u'y': u'y test', u'z': item} assert d.dict_of_lists() == { u'a': [u'a test'], u'y': [u'y test'], u'z': [item] } del d['z'] map(assert_unicode_item, d.mixed().iteritems()) map(assert_unicode_item, [(k, v[0]) for \ k, v in d.dict_of_lists().iteritems()]) assert u'a' in d dcopy = d.copy() assert dcopy is not d assert dcopy == d d['x'] = 'x test' assert dcopy != d d[(1, None)] = (None, 1) assert d.items() == [('a', u'a test'), ('y', u'y test'), ('x', u'x test'), ((1, None), (None, 1))] item = d.items()[-1] assert isinstance(item[0], tuple) assert isinstance(item[1], tuple) fs = cgi.FieldStorage() fs.name = 'thefile' fs.filename = 'hello.txt' fs.file = StringIO('hello') d['f'] = fs ufs = d['f'] assert isinstance(ufs, cgi.FieldStorage) assert ufs is not fs assert ufs.name == fs.name assert isinstance(ufs.name, key_str) assert ufs.filename == fs.filename assert isinstance(ufs.filename, unicode) assert isinstance(ufs.value, str) assert ufs.value == 'hello'
since_time_str = request.params['since_time'] try: since_time = h.date_str_to_datetime(since_time_str) except ValueError, inst: return self._finish_bad_request('ValueError: %s' % inst) else: return self._finish_bad_request( _("Missing search term ('since_id=UUID' or " + " 'since_time=TIMESTAMP')")) revs = model.Session.query(model.Revision).\ filter(model.Revision.timestamp > since_time).\ limit(50) # reasonable enough for a page return self._finish_ok([rev.id for rev in revs]) elif register in ['dataset', 'package', 'resource']: try: params = MultiDict(self._get_search_params(request.params)) except ValueError, e: return self._finish_bad_request( _('Could not read parameters: %r' % e)) # if using API v2, default to returning the package ID if # no field list is specified if register in ['dataset', 'package'] and not params.get('fl'): params['fl'] = 'id' if ver == 2 else 'name' try: if register == 'resource': query = search.query_for(model.Resource) # resource search still uses ckan query parser options = search.QueryOptions()
return thisvalue i = get_indexpage(indexpage) for inturl in i: idsm = re.search('TDATA\-(\S+)', inturl, re.MULTILINE) xmldatafile = 'somefile' if idsm: xmldatafile = "./data/%s.xml" % idsm.group(1) xmlfile = open(xmldatafile,'w') html = read_remote_page(inturl) parsed_html = BeautifulSoup(html) #, "html.parser") items = parsed_html.findAll('p') #, attrs={'class':'toolDataItemName'}) mainkey = '' d = MultiDict() DEBUG = 0 for item in items: cleanitem = item thiskey = item.find('b') #, attrs={'img':'class bdtIcon'}) if thiskey: value = str(item) value = value.replace(str(thiskey), '') value = cleaner(value) if DEBUG: print "X %s" % value mainkey = thiskey if value: d.add(thiskey.text, value) else: if mainkey:
def search(self, ver=None, register=None): log.debug('search %s params: %r', register, request.params) if register == 'revision': since_time = None if 'since_id' in request.params: id = request.params['since_id'] if not id: return self._finish_bad_request( _(u'No revision specified')) rev = model.Session.query(model.Revision).get(id) if rev is None: return self._finish_not_found( _(u'There is no revision with id: %s') % id) since_time = rev.timestamp elif 'since_time' in request.params: since_time_str = request.params['since_time'] try: since_time = h.date_str_to_datetime(since_time_str) except ValueError as inst: return self._finish_bad_request('ValueError: %s' % inst) else: return self._finish_bad_request( _("Missing search term ('since_id=UUID' or " + " 'since_time=TIMESTAMP')")) revs = model.Session.query(model.Revision) \ .filter(model.Revision.timestamp > since_time) \ .order_by(model.Revision.timestamp) \ .limit(50) # reasonable enough for a page return self._finish_ok([rev.id for rev in revs]) elif register in ['dataset', 'package', 'resource']: try: params = MultiDict(self._get_search_params(request.params)) except ValueError as e: return self._finish_bad_request( _('Could not read parameters: %r' % e)) # if using API v2, default to returning the package ID if # no field list is specified if register in ['dataset', 'package'] and not params.get('fl'): params['fl'] = 'id' if ver == 2 else 'name' try: if register == 'resource': query = search.query_for(model.Resource) # resource search still uses ckan query parser options = search.QueryOptions() for k, v in params.items(): if (k in search.DEFAULT_OPTIONS.keys()): options[k] = v options.update(params) options.username = c.user options.search_tags = False options.return_objects = False query_fields = MultiDict() for field, value in params.items(): field = field.strip() if field in search.DEFAULT_OPTIONS.keys() or \ field in IGNORE_FIELDS: continue values = [value] if isinstance(value, list): values = value for v in values: query_fields.add(field, v) results = query.run(query=params.get('q'), fields=query_fields, options=options) else: # For package searches in API v3 and higher, we can pass # parameters straight to Solr. if ver in [1, 2]: # Otherwise, put all unrecognised ones into the q # parameter params = search.\ convert_legacy_parameters_to_solr(params) query = search.query_for(model.Package) # Remove any existing fq param and set the capacity to # public if 'fq' in params: del params['fq'] params['fq'] = '+capacity:public' # if callback is specified we do not want to send that to # the search if 'callback' in params: del params['callback'] results = query.run(params) return self._finish_ok(results) except search.SearchError as e: log.exception(e) return self._finish_bad_request(_('Bad search option: %s') % e) else: return self._finish_not_found(_('Unknown register: %s') % register)
def __init__(self, query, terms, fields): self._query = query self._terms = terms self._fields = MultiDict(fields)
def get_url_security(self, cluster_id, connection=None): """ Returns the security configuration of HTTP URLs. """ # What DB class to fetch depending on the string value of the security type. sec_type_db_class = { 'tech_acc': TechnicalAccount, 'basic_auth': HTTPBasicAuth, 'wss': WSSDefinition } result = MultiDict() query = http_soap_security_list(self._session, cluster_id, connection) columns = Bunch() # So ConfigDict has its data in the format it expects for c in query.statement.columns: columns[c.name] = None for item in query.all(): _info = Bunch() _info[item.soap_action] = Bunch() _info[item.soap_action].is_active = item.is_active _info[item.soap_action].transport = item.transport _info[item.soap_action].data_format = item.data_format if item.security_id: _info[item.soap_action].sec_def = Bunch() # Will raise KeyError if the DB gets somehow misconfigured. db_class = sec_type_db_class[item.sec_type] sec_def = self._session.query(db_class).\ filter(db_class.id==item.security_id).\ one() # Common things first _info[item.soap_action].sec_def.name = sec_def.name _info[item.soap_action].sec_def.password = sec_def.password _info[item.soap_action].sec_def.sec_type = item.sec_type if item.sec_type == security_def_type.tech_account: _info[item.soap_action].sec_def.salt = sec_def.salt elif item.sec_type == security_def_type.basic_auth: _info[item.soap_action].sec_def.username = sec_def.username _info[item.soap_action].sec_def.password = sec_def.password _info[item.soap_action].sec_def.realm = sec_def.realm elif item.sec_type == security_def_type.wss: _info[item.soap_action].sec_def.username = sec_def.username _info[item.soap_action].sec_def.password = sec_def.password _info[ item. soap_action].sec_def.password_type = sec_def.password_type _info[ item. soap_action].sec_def.reject_empty_nonce_creat = sec_def.reject_empty_nonce_creat _info[ item. soap_action].sec_def.reject_stale_tokens = sec_def.reject_stale_tokens _info[ item. soap_action].sec_def.reject_expiry_limit = sec_def.reject_expiry_limit _info[ item. soap_action].sec_def.nonce_freshness_time = sec_def.nonce_freshness_time else: _info[item.soap_action].sec_def = ZATO_NONE result.add(item.url_path, _info) return result, columns
elif register == 'package' or register == 'resource': try: params = self._get_search_params(request.params) except ValueError, e: return self._finish_bad_request( gettext('Could not read parameters: %r' % e)) options = QueryOptions() for k, v in params.items(): if (k in DEFAULT_OPTIONS.keys()): options[k] = v options.update(params) options.username = c.user options.search_tags = False options.return_objects = False query_fields = MultiDict() for field, value in params.items(): field = field.strip() if field in DEFAULT_OPTIONS.keys() or \ field in IGNORE_FIELDS: continue values = [value] if isinstance(value, list): values = value for v in values: query_fields.add(field, v) if register == 'package': options.ref_entity_with_attr = 'id' if ver == '2' else 'name' try: backend = None