def test_xmodule_handler_dispatch(self): self.module.xmodule_handler(self.request, 'dispatch') self.module.handle_ajax.assert_called_with( 'dispatch', MultiDict(self.request.POST))
def test_it_returns_the_total_number_of_matching_annotations( self, pyramid_request): assert execute(pyramid_request, MultiDict(), self.PAGE_SIZE).total == 20
def parse(self): return mock.Mock(spec_set=[], return_value=MultiDict({'foo': 'bar'}))
from __future__ import unicode_literals import pytest from hypothesis import strategies as st from hypothesis import given from webob.multidict import MultiDict from memex.search import parser @pytest.mark.parametrize( "query_in,query_out", [ # user field ('user:luke', MultiDict([('user', 'luke')])), ('user:[email protected]', MultiDict([('user', '*****@*****.**')])), ('user:acct:[email protected]', MultiDict([('user', 'acct:[email protected]')])), ('user:luke user:alice', MultiDict([('user', 'luke'), ('user', 'alice')])), ('user:"******"', MultiDict([('user', 'luke and alice')])), ('user:"******"', MultiDict([('user', 'luke')])), ('USER:luke', MultiDict([('user', 'luke')])), # tag field ('tag:foo', MultiDict([('tag', 'foo')])), ('tag:foo tag:bar', MultiDict([('tag', 'foo'), ('tag', 'bar')])), ('tag:\'foo bar\'', MultiDict([('tag', 'foo bar')])), ('tag:"foo bar"', MultiDict([('tag', 'foo bar')])), ('tag:\'foobar\'', MultiDict([('tag', 'foobar')])),
def test_does_nothing_with_non_matching_queries(self, pyramid_request, unparse): query = MultiDict({'tag': 'foo'}) result = check_url(pyramid_request, query, unparse=unparse) assert result is None
def test_full_validate_valid(self, data): form = pypi.MetadataForm(MultiDict(data)) form.full_validate()
def test_successful_upload(self, tmpdir, monkeypatch, pyramid_config, db_request, has_signature): monkeypatch.setattr(tempfile, "tempdir", str(tmpdir)) pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() project = ProjectFactory.create() release = ReleaseFactory.create(project=project, version="1.0") RoleFactory.create(user=user, project=project) db_request.db.add( Classifier(classifier="Environment :: Other Environment"), ) filename = "{}-{}.tar.gz".format(project.name, release.version) db_request.POST = MultiDict({ "metadata_version": "1.2", "name": project.name, "version": release.version, "filetype": "sdist", "pyversion": "source", "md5_digest": "335c476dc930b959dda9ec82bd65ef19", "content": pretend.stub( filename=filename, file=io.BytesIO(b"A fake file."), ), }) db_request.POST.extend([ ("classifiers", "Environment :: Other Environment"), ]) if has_signature: db_request.POST["gpg_signature"] = pretend.stub( filename=filename + ".asc", file=io.BytesIO(b"-----BEGIN PGP SIGNATURE-----\n" b" This is a Fake Signature"), ) @pretend.call_recorder def storage_service_store(path, file_path): if file_path.endswith(".asc"): expected = (b"-----BEGIN PGP SIGNATURE-----\n" b" This is a Fake Signature") else: expected = b"A fake file." with open(file_path, "rb") as fp: assert fp.read() == expected storage_service = pretend.stub(store=storage_service_store) db_request.find_service = pretend.call_recorder( lambda svc: storage_service) resp = pypi.file_upload(db_request) assert resp.status_code == 200 assert db_request.find_service.calls == [pretend.call(IFileStorage)] assert len(storage_service.store.calls) == 2 if has_signature else 1 assert storage_service.store.calls[0] == pretend.call( os.path.join( "source", project.name[0], project.name, filename, ), mock.ANY, ) if has_signature: assert storage_service.store.calls[1] == pretend.call( os.path.join( "source", project.name[0], project.name, filename + ".asc", ), mock.ANY, ) # Ensure that a File object has been created. db_request.db.query(File) \ .filter((File.release == release) & (File.filename == filename)) \ .one() # Ensure that a Filename object has been created. db_request.db.query(Filename) \ .filter(Filename.filename == filename).one()
def minimal_params(self): return MultiDict( verb=self.verb, metadataPrefix='dummy', identifier='item', )
def test_repeated_argument(self): params = MultiDict(verb='a') params.add('repeated', 'b') params.add('repeated', 'b') self.assertRaises(BadArgument, views._check_params, params, ['repeated'])
def minimal_params(self): return MultiDict( verb=self.verb, metadataPrefix='dummy', # metadata prefix is required )
def minimal_params(self): """Return a multidict containing minimal request parameters that are needed to successfully call the view function. """ return MultiDict(verb=self.verb)
def test_03_search_url_multiple_words(self): fields = UnicodeMultiDict(MultiDict(url='e')) fields.add('url', 'f') urls = self.res_search(fields=fields) assert set([self.ef]) == urls, urls
def evaluation(self, data=None): ret = {'success': False} json_data = self.request.json_body if data is None else data if json_data is None: ret['msg'] = 'No data provided' return ret if validate_item_type(json_data.get('item', 'a')) == 'sh': self.db_item = Stakeholder self.db_taggroup = SH_Tag_Group self.db_tag = SH_Tag self.db_key = SH_Key self.db_value = SH_Value self.protocol = StakeholderProtocol(DBSession) else: self.db_item = Activity self.db_taggroup = A_Tag_Group self.db_tag = A_Tag self.db_key = A_Key self.db_value = A_Value self.protocol = ActivityProtocol(DBSession) # Make sure the json is valid if 'group_by' not in json_data: ret['msg'] = "Missing parameter 'group by': At least one column " "needs to be specified." return ret if not isinstance(json_data['group_by'], list): ret['msg'] = "Parameter 'group by' needs to be an array." return ret if 'attributes' not in json_data: ret['msg'] = "Missing attributes: No attributes were specified." return ret for attr in json_data['attributes']: test, msg = self._check_function(json_data['attributes'][attr], attr) if test is not True: ret['msg'] = msg return ret if 'locales' in json_data and not isinstance(json_data['locales'], list): ret['msg'] = "Parameter 'locales' needs to be an array." return ret translate_keys = json_data.get('translate', {}).get('keys', []) if translate_keys and not isinstance(translate_keys, list): ret['msg'] = "Parameter 'translate[\'keys\']' needs to be an " "array." return ret # for k in translate_keys: # if not isinstance(k, list): # ret['msg'] = "Value of 'translate[\'keys\']' needs to be " # "an array of arrays." # return ret a_ids = json_data.get('a_ids', []) if not isinstance(a_ids, list): ret['msg'] = "Parameter 'a_ids' needs to be an array." return ret # for i in a_ids: # if not isinstance(i, str): # ret['msg'] = "Entries of parameter 'a_ids' need to be " # "strings (the UUIDs of Activities)" # return ret sh_ids = json_data.get('sh_ids', []) if not isinstance(sh_ids, list): ret['msg'] = "Parameter 'sh_ids' needs to be an array." return ret # for i in sh_ids: # if not isinstance(i, str): # ret['msg'] = "Entries of parameter 'sh_ids' need to be " # "strings (the UUIDs of Stakeholders)" # return ret if self.db_item == Activity: this_id_filter = a_ids other_id_filter = sh_ids else: this_id_filter = sh_ids other_id_filter = a_ids this_filter = [] other_filter = [] if 'filter' in json_data: params = [] for filters in json_data.get('filter', '').split('&'): try: f = filters.split('=') if len(f) == 2: params.append((f[0], f[1])) except: pass # Simulate a request to send the filters req = DummyRequest() req.params = MultiDict(params) a_tag_filter, __, sh_tag_filter, __ = self.protocol._filter(req) if self.db_item == Activity: this_filter = a_tag_filter other_filter = sh_tag_filter else: this_filter = sh_tag_filter other_filter = a_tag_filter isInvolvementRequired = (self.db_item == Stakeholder or len(other_filter) + len(other_id_filter) > 0) # Collect all keys to be translated (values are translated in the # query) locales = ['default'] langs = [] locales.extend(json_data.get('locales', [])) translated_keys = {} exclude_from_translation = ['Activity', 'Stakeholder'] keys = [] for key, __ in json_data.get('attributes', {}).items(): if key not in exclude_from_translation and key not in keys: keys.append(key) for key in json_data.get('group_by', []): if key not in exclude_from_translation and key not in keys: keys.append(key) for key in translate_keys: for k in key: if k not in keys: keys.append(k) for l in locales: locale = l if l == 'default': locale = get_current_locale(self.request) db_lang = DBSession.query(Language).filter( Language.locale == locale).first() langs.append((l, db_lang)) translated_keys[l] = get_translated_db_keys( self.db_key, keys, db_lang) # Get groups groups_subqueries, groups_columns = self._get_group_by( json_data['group_by'], langs) # Get functions functions_subqueries, functions_columns = \ self._get_attribute_functions(json_data['attributes']) # Prepare basic query q = DBSession.query(*groups_columns + functions_columns). \ join(self.db_taggroup). \ join(self.db_item) # Join with further groups for g_sq in groups_subqueries[1:]: q = q.outerjoin(g_sq, g_sq.c.item_id == self.db_item.id) # Join with functions for f_sq in functions_subqueries: q = q.outerjoin(f_sq, f_sq.c.item_id == self.db_item.id) # Apply status filter (fix: active) q = q.filter(self.db_item.fk_status == 2) if (this_id_filter): q = q.filter(self.db_item.identifier.in_(this_id_filter)) # Apply filters filter_subqueries = self.protocol.Session.query( self.db_item.id.label('a_filter_id')) for x in this_filter: # Collect the IDs for each filter taggroups_sq = x.subquery() single_subquery = self.protocol.Session.query( self.db_item.id.label('a_filter_id') ). \ join(self.db_taggroup). \ join(taggroups_sq, taggroups_sq.c.a_filter_tg_id == self.db_taggroup.id). \ subquery() # Join each found ID with previously found IDs filter_subqueries = filter_subqueries. \ join(single_subquery, single_subquery.c.a_filter_id == self.db_item.id) filter_subqueries = filter_subqueries.subquery() q = q.join(filter_subqueries, filter_subqueries.c.a_filter_id == self.db_item.id) # Apply profile boundary filter if self.db_item == Activity: p = json_data.get('profile', get_current_profile(self.request)) profile = DBSession.query(Profile). \ filter(Profile.code == p). \ first() if profile is not None: q = q.filter( geofunctions.ST_Intersects(self.db_item.point, profile.geometry)) # Apply grouping and ordering q = q.group_by(*groups_columns). \ order_by(groups_columns[0]) if isInvolvementRequired: if self.db_item == Stakeholder: inv_subquery = DBSession.query( Involvement.fk_stakeholder.label('id') ). \ join(Activity). \ filter(Activity.fk_status == 2) p = json_data.get('profile', get_current_profile(self.request)) profile = DBSession.query(Profile). \ filter(Profile.code == p). \ first() if profile is not None: inv_subquery = inv_subquery.filter( geofunctions.ST_Intersects(Activity.point, profile.geometry)) other_db_item = Activity other_db_taggroup = A_Tag_Group else: inv_subquery = DBSession.query( Involvement.fk_activity.label('id') ). \ join(Stakeholder). \ filter(Stakeholder.fk_status == 2) other_db_item = Stakeholder other_db_taggroup = SH_Tag_Group if (other_id_filter): inv_subquery = inv_subquery.filter( other_db_item.identifier.in_(other_id_filter)) # Apply filters filter_subqueries = self.protocol.Session.query( other_db_item.id.label('a_filter_id')) for x in other_filter: # Collect the IDs for each filter taggroups_sq = x.subquery() try: single_subquery = self.protocol.Session.query( other_db_item.id.label('a_filter_id') ). \ join(other_db_taggroup). \ join(taggroups_sq, taggroups_sq.c.a_filter_tg_id == other_db_taggroup.id). \ subquery() except AttributeError: single_subquery = self.protocol.Session.query( other_db_item.id.label('a_filter_id') ). \ join(other_db_taggroup). \ join(taggroups_sq, taggroups_sq.c.sh_filter_tg_id == other_db_taggroup.id). \ subquery() # Join each found ID with previously found IDs filter_subqueries = filter_subqueries. \ join(single_subquery, single_subquery.c.a_filter_id == other_db_item.id) filter_subqueries = filter_subqueries.subquery() inv_subquery = inv_subquery.join( filter_subqueries, filter_subqueries.c.a_filter_id == other_db_item.id) inv_subquery = inv_subquery.subquery() q = q.filter(self.db_item.id.in_(select([inv_subquery.c.id]))) data = [] for res in q.all(): data = _handle_single_line(data, res, json_data.get('group_by'), json_data.get('attributes'), translated_keys) # Do a translation of groupable if available groupable_translated = [] for key in translate_keys: translations = [] for k in key: t = {'key': k, 'default': k} for locale, key_translations in translated_keys.items(): translation = (None if k not in exclude_from_translation else k) for k_t in key_translations: if len(k_t) >= 2 and k_t[0] == k: translation = k_t[1] t[locale] = translation translations.append(t) groupable_translated.append(translations) if len(groupable_translated): ret.update({'translate': {'keys': groupable_translated}}) ret.update({'success': True, 'data': data}) return ret
def test_with_metabolize(self): self.maxDiff = 100000 import tempfile from cgi import FieldStorage ms_data_file = tempfile.NamedTemporaryFile(mode="r+") ms_data_file.write('foo') ms_data_file.flush() msfield = FieldStorage() msfield.file = ms_data_file params = MultiDict(ionisation_mode=1, ms_intensity_cutoff=200000, msms_intensity_cutoff=10, abs_peak_cutoff=1000, precursor_mz_precision=0.005, max_broken_bonds=4, max_water_losses=1, mz_precision=5.0, mz_precision_abs=0.001, metabolize='on', scenario=[{'type': 'phase1', 'steps': '2'}, {'type': 'phase2', 'steps': '1'}], max_ms_level=3, structures='C1CCCC1 comp1', ms_data_file=msfield, structure_format='smiles', ms_data_format='mzxml', ) query = self.jobquery.allinone(params) expected_script = "{magma} read_ms_data --ms_data_format 'mzxml'" expected_script += " -i '1' -m '3' -a '1000.0'" expected_script += " -p '5.0' -q '0.001'" expected_script += " --precursor_mz_precision '0.005'" expected_script += " --call_back_url '/'" expected_script += " ms_data.dat {db}\n" expected_script += "{magma} add_structures -g -t 'smiles'" expected_script += " structures.dat {db}\n" expected_script += "{magma} metabolize -g --scenario scenario.csv" expected_script += " --call_back_url '/' {db}\n" expected_script += "{magma} annotate -c '200000.0'" expected_script += " -d '10.0' -b '4'" expected_script += " --max_water_losses '1' --ncpus '1' --call_back_url '/'" expected_script += " --fast {db}\n" expected_query = JobQuery(directory=self.jobdir, prestaged=['ms_data.dat', 'structures.dat', 'scenario.csv', ], script=expected_script, status_callback_url='/', ) self.assertEqual(query, expected_query) self.assertMultiLineEqual(params['structures'], self.fetch_file('structures.dat')) self.assertMultiLineEqual('foo', self.fetch_file('ms_data.dat')) self.assertMultiLineEqual( 'phase1,2\nphase2,1\n', self.fetch_file('scenario.csv'))
def test_upload_succeeds_with_wheel(self, tmpdir, monkeypatch, pyramid_config, db_request, plat): monkeypatch.setattr(tempfile, "tempdir", str(tmpdir)) pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() project = ProjectFactory.create() release = ReleaseFactory.create(project=project, version="1.0") RoleFactory.create(user=user, project=project) filename = "{}-{}-cp34-none-{}.whl".format( project.name, release.version, plat, ) db_request.POST = MultiDict({ "metadata_version": "1.2", "name": project.name, "version": release.version, "filetype": "bdist_wheel", "pyversion": "cp34", "md5_digest": "335c476dc930b959dda9ec82bd65ef19", "content": pretend.stub( filename=filename, file=io.BytesIO(b"A fake file."), ), }) @pretend.call_recorder def storage_service_store(path, file_path): with open(file_path, "rb") as fp: assert fp.read() == b"A fake file." storage_service = pretend.stub(store=storage_service_store) db_request.find_service = pretend.call_recorder( lambda svc: storage_service) resp = pypi.file_upload(db_request) assert resp.status_code == 200 assert db_request.find_service.calls == [pretend.call(IFileStorage)] assert storage_service.store.calls == [ pretend.call( os.path.join( "cp34", project.name[0], project.name, filename, ), mock.ANY, ), ] # Ensure that a File object has been created. db_request.db.query(File) \ .filter((File.release == release) & (File.filename == filename)) \ .one() # Ensure that a Filename object has been created. db_request.db.query(Filename) \ .filter(Filename.filename == filename).one()
def test_missing_required(self): self.assertRaises(BadArgument, views._check_params, MultiDict(verb='a'), ['param'])
def test_upload_succeeds_creates_release(self, pyramid_config, db_request): pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() project = ProjectFactory.create() RoleFactory.create(user=user, project=project) db_request.db.add( Classifier(classifier="Environment :: Other Environment"), ) db_request.db.add( Classifier(classifier="Programming Language :: Python"), ) filename = "{}-{}.tar.gz".format(project.name, "1.0") db_request.POST = MultiDict({ "metadata_version": "1.2", "name": project.name, "version": "1.0", "summary": "This is my summary!", "filetype": "sdist", "md5_digest": "335c476dc930b959dda9ec82bd65ef19", "content": pretend.stub( filename=filename, file=io.BytesIO(b"A fake file."), ), }) db_request.POST.extend([ ("classifiers", "Environment :: Other Environment"), ("classifiers", "Programming Language :: Python"), ("requires_dist", "foo"), ("requires_dist", "bar (>1.0)"), ("project_urls", "Test, https://example.com/"), ("requires_external", "Cheese (>1.0)"), ("provides", "testing"), ]) storage_service = pretend.stub(store=lambda path, content: None) db_request.find_service = lambda svc: storage_service resp = pypi.file_upload(db_request) assert resp.status_code == 200 # Ensure that a Release object has been created. release = (db_request.db.query(Release).filter( (Release.project == project) & (Release.version == "1.0")).one()) assert release.summary == "This is my summary!" assert release.classifiers == [ "Environment :: Other Environment", "Programming Language :: Python", ] assert set(release.requires_dist) == {"foo", "bar (>1.0)"} assert set(release.project_urls) == {"Test, https://example.com/"} assert set(release.requires_external) == {"Cheese (>1.0)"} assert set(release.provides) == {"testing"} # Ensure that a File object has been created. db_request.db.query(File) \ .filter((File.release == release) & (File.filename == filename)) \ .one() # Ensure that a Filename object has been created. db_request.db.query(Filename) \ .filter(Filename.filename == filename).one()
def test_invalid_argument(self): self.assertRaises(BadArgument, views._check_params, MultiDict(verb='a', invalid='b'))
def test_full_validate_invalid(self, data): form = pypi.MetadataForm(MultiDict(data)) with pytest.raises(ValidationError): form.full_validate()
def test_invalid_verb(self): request = testing.DummyRequest(params=MultiDict(verb='NoGoodVerb')) self.assertRaises(InvalidVerb, views.invalid_verb_view, request)
from __future__ import unicode_literals import pytest from hypothesis import strategies as st from hypothesis import given, settings from webob.multidict import MultiDict from h.search import parser @pytest.mark.parametrize( "query_in,query_out", [ # user field ("user:luke", MultiDict([("user", "luke")])), ("user:[email protected]", MultiDict([("user", "*****@*****.**")])), ("user:acct:[email protected]", MultiDict([("user", "acct:[email protected]")])), ("user:luke user:alice", MultiDict([("user", "luke"), ("user", "alice")])), ('user:"******"', MultiDict([("user", "luke and alice")])), ('user:"******"', MultiDict([("user", "luke")])), ("USER:luke", MultiDict([("user", "luke")])), # tag field ("tag:foo", MultiDict([("tag", "foo")])), ("tag:foo tag:bar", MultiDict([("tag", "foo"), ("tag", "bar")])), ("tag:'foo bar'", MultiDict([("tag", "foo bar")])), ('tag:"foo bar"', MultiDict([("tag", "foo bar")])), ("tag:'foobar'", MultiDict([("tag", "foobar")])), ("Tag:foo", MultiDict([("tag", "foo")])),
def test_repeated_verb(self): params = MultiDict() params.add('verb', self.verb) params.add('verb', self.verb) request = testing.DummyRequest(params=params) self.assertRaises(RepeatedVerb, self.function, request)
def test_it_raises_if_params_invalid(self): schema = QueryParamSchema() params = MultiDict({"int_field": "not-an-int"}) with pytest.raises(ValidationError): validate_query_params(schema, params)
def __init__(self, params): self.params = MultiDict(params)
def test_it_creates_a_search_query(self, pyramid_request, Search): pyramid_request.stats = mock.Mock() execute(pyramid_request, MultiDict(), self.PAGE_SIZE) Search.assert_called_once_with(pyramid_request, stats=pyramid_request.stats)
def test_php_compatible_mdict_return_list(): from webob.multidict import MultiDict mdict = MultiDict([("name", "foo"), ("g_id[]", "1"), ("g_id[]", "2")]) result = _callFUT(mdict) assert result == {"name": "foo", "g_id": ["1", "2"]}
def test_it_returns_the_aggregations(self, pyramid_request): result = execute(pyramid_request, MultiDict(), self.PAGE_SIZE) assert result.aggregations == mock.sentinel.aggregations
def put(self): item_id = int(self.request.matchdict['item_id']) payload = MultiDict(self.request.json_body) item = DBSession.query(Item).filter_by(id=item_id).one() transaction_date = None print payload # fetch status if payload.get('is_draft', False): status_name = 'DRAFTS' elif payload.get('status', None) and payload['status'] == 'archived': status_name = 'ARCHIVED' transaction_date = datetime.now() else: status_name = 'ONGOING' status = DBSession.query(ItemStatus).filter_by(name=status_name).one() # fetch new tags new_tags = [] ids_to_add = [ int(tag['id']) for tag in payload.get('tags', []) if tag.get('id', None) ] if ids_to_add: new_tags.extend( DBSession.query(ItemTag).filter( ItemTag.id.in_(ids_to_add)).all()) item.tags = new_tags # replace existing tags new_qty = int(payload.get('quantity', 1)) price = payload['price'] if payload['price'] else None item.name = payload['name'] item.type = payload['type'] item.trade_with = payload.get('trade_with', None) item.status = status item.price = price item.description = payload['description'] item.reason = payload.get('reason', None) item.transaction_date = transaction_date # adjust original quantity if new_qty > item.quantity: additional_quantity = new_qty - item.quantity item.original_quantity += additional_quantity elif new_qty < item.quantity: additional_quantity = item.quantity - new_qty item.original_quantity -= additional_quantity item.quantity = new_qty updating_fields = ('updating item: %s', 'name: %s', 'type: %s', 'status: %s', 'price: %s', 'quantity: %s', 'description: %s', 'reason: %s', 'tags: %s', 'transaction date: %s\n') logger.info('\n'.join(updating_fields) % (item_id, item.name, item.type, item.status, item.price, item.quantity, item.description, item.reason, item.tags, item.transaction_date)) DBSession.commit() return item.to_dict()
def test_does_not_redirect_to_group_page_if_group_does_not_exist(self, pyramid_request, unparse): query = MultiDict({'group': 'does_not_exist'}) assert check_url(pyramid_request, query, unparse=unparse) is None
def test_xmodule_handler_passed_data(self): self.module.xmodule_handler(self.request) self.module.handle_ajax.assert_called_with( None, MultiDict(self.request.POST))