示例#1
0
    def test_with_classifiers(self, monkeypatch, db_request, metrics, page):
        params = MultiDict([("q", "foo bar"), ("c", "foo :: bar"),
                            ("c", "fiz :: buz")])
        if page is not None:
            params["page"] = page
        db_request.params = params

        es_query = pretend.stub()
        db_request.es = pretend.stub()
        get_es_query = pretend.call_recorder(lambda *a, **kw: es_query)
        monkeypatch.setattr(views, "get_es_query", get_es_query)

        classifier1 = ClassifierFactory.create(classifier="foo :: bar")
        classifier2 = ClassifierFactory.create(classifier="foo :: baz")
        classifier3 = ClassifierFactory.create(classifier="fiz :: buz")

        project = ProjectFactory.create()
        release1 = ReleaseFactory.create(project=project)
        release1.created = datetime.date(2011, 1, 1)
        release1._classifiers.append(classifier1)
        release1._classifiers.append(classifier2)

        page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000)
        page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
        monkeypatch.setattr(views, "ElasticsearchPage", page_cls)

        url_maker = pretend.stub()
        url_maker_factory = pretend.call_recorder(lambda request: url_maker)
        monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)

        search_view = search(db_request)
        assert search_view == {
            "page":
            page_obj,
            "term":
            params.get("q", ""),
            "order":
            "",
            "applied_filters":
            params.getall("c"),
            "available_filters": [{
                "foo": {
                    classifier1.classifier.split(" :: ")[1]: {},
                    classifier2.classifier.split(" :: ")[1]: {},
                }
            }],
        }
        assert ("fiz", [classifier3.classifier
                        ]) not in search_view["available_filters"]
        assert page_cls.calls == [
            pretend.call(es_query, url_maker=url_maker, page=page or 1)
        ]
        assert url_maker_factory.calls == [pretend.call(db_request)]
        assert get_es_query.calls == [
            pretend.call(db_request.es, params.get("q"), "",
                         params.getall("c"))
        ]
        assert metrics.histogram.calls == [
            pretend.call("warehouse.views.search.results", 1000)
        ]
示例#2
0
    def test_with_a_query(self, monkeypatch, db_request, page):
        params = MultiDict({"q": "foo bar"})
        if page is not None:
            params["page"] = page
        db_request.params = params

        sort = pretend.stub()
        suggest = pretend.stub(
            sort=pretend.call_recorder(lambda *a, **kw: sort), )
        es_query = pretend.stub(
            suggest=pretend.call_recorder(lambda *a, **kw: suggest), )
        db_request.es = pretend.stub(
            query=pretend.call_recorder(lambda *a, **kw: es_query))

        page_obj = pretend.stub(page_count=(page or 1) + 10)
        page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
        monkeypatch.setattr(views, "ElasticsearchPage", page_cls)

        url_maker = pretend.stub()
        url_maker_factory = pretend.call_recorder(lambda request: url_maker)
        monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)

        assert search(db_request) == {
            "page": page_obj,
            "term": params.get("q", ''),
            "order": params.get("o", ''),
            "applied_filters": [],
            "available_filters": [],
        }
        assert page_cls.calls == [
            pretend.call(suggest, url_maker=url_maker, page=page or 1),
        ]
        assert url_maker_factory.calls == [pretend.call(db_request)]
        assert db_request.es.query.calls == [
            pretend.call(
                "multi_match",
                query="foo bar",
                fields=[
                    "author",
                    "author_email",
                    "description^5",
                    "download_url",
                    "home_page",
                    "keywords^5",
                    "license",
                    "maintainer",
                    "maintainer_email",
                    "normalized_name^10",
                    "platform",
                    "summary^5",
                ],
            ),
        ]
        assert es_query.suggest.calls == [
            pretend.call(
                name="name_suggestion",
                term={"field": "name"},
                text="foo bar",
            ),
        ]
示例#3
0
    def test_without_a_query(self, monkeypatch, db_request, page):
        params = MultiDict()
        if page is not None:
            params["page"] = page
        db_request.params = params

        es_query = pretend.stub()
        db_request.es = pretend.stub(query=lambda *a, **kw: es_query)

        page_obj = pretend.stub(page_count=(page or 1) + 10)
        page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
        monkeypatch.setattr(views, "ElasticsearchPage", page_cls)

        url_maker = pretend.stub()
        url_maker_factory = pretend.call_recorder(lambda request: url_maker)
        monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)

        assert search(db_request) == {
            "page": page_obj,
            "term": params.get("q", ''),
            "order": params.get("o", ''),
            "applied_filters": [],
            "available_filters": [],
        }
        assert page_cls.calls == [
            pretend.call(es_query, url_maker=url_maker, page=page or 1),
        ]
        assert url_maker_factory.calls == [pretend.call(db_request)]
示例#4
0
    def test_without_a_query(self, monkeypatch, db_request, page):
        params = MultiDict()
        if page is not None:
            params["page"] = page
        db_request.params = params

        es_query = pretend.stub()
        db_request.es = pretend.stub(query=lambda *a, **kw: es_query)

        page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000)
        page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
        monkeypatch.setattr(views, "ElasticsearchPage", page_cls)

        url_maker = pretend.stub()
        url_maker_factory = pretend.call_recorder(lambda request: url_maker)
        monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)

        assert search(db_request) == {
            "page": page_obj,
            "term": params.get("q", ''),
            "order": params.get("o", ''),
            "applied_filters": [],
            "available_filters": [],
        }
        assert page_cls.calls == [
            pretend.call(es_query, url_maker=url_maker, page=page or 1),
        ]
        assert url_maker_factory.calls == [pretend.call(db_request)]
示例#5
0
    def test_with_an_ordering(self, monkeypatch, db_request, page):
        params = MultiDict({"q": "foo bar", "o": "-created"})
        if page is not None:
            params["page"] = page
        db_request.params = params

        sort = pretend.stub()
        suggest = pretend.stub(
            sort=pretend.call_recorder(lambda *a, **kw: sort),
        )
        es_query = pretend.stub(
            suggest=pretend.call_recorder(lambda *a, **kw: suggest),
        )
        db_request.es = pretend.stub(
            query=pretend.call_recorder(lambda *a, **kw: es_query)
        )

        page_obj = pretend.stub(page_count=(page or 1) + 10)
        page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
        monkeypatch.setattr(views, "ElasticsearchPage", page_cls)

        url_maker = pretend.stub()
        url_maker_factory = pretend.call_recorder(lambda request: url_maker)
        monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)

        assert search(db_request) == {
            "page": page_obj,
            "term": params.get("q", ''),
            "order": params.get("o", ''),
            "applied_filters": [],
            "available_filters": [],
        }
        assert page_cls.calls == [
            pretend.call(sort, url_maker=url_maker, page=page or 1),
        ]
        assert url_maker_factory.calls == [pretend.call(db_request)]
        assert db_request.es.query.calls == [
            pretend.call(
                "multi_match",
                query="foo bar",
                fields=[
                    "name^2", "version", "author", "author_email",
                    "maintainer", "maintainer_email", "home_page", "license",
                    "summary", "description", "keywords", "platform",
                    "download_url",
                ],
            ),
        ]
        assert es_query.suggest.calls == [
            pretend.call(
                name="name_suggestion",
                term={"field": "name"},
                text="foo bar",
            ),
        ]
        assert suggest.sort.calls == [
            pretend.call("-created")
        ]
示例#6
0
    def test_with_an_ordering(self, monkeypatch, db_request, page, order,
                              expected):
        params = MultiDict({"q": "foo bar"})
        if page is not None:
            params["page"] = page
        if order is not None:
            params["o"] = order
        db_request.params = params

        sort = pretend.stub()
        suggest = pretend.stub(
            sort=pretend.call_recorder(lambda *a, **kw: sort),
        )
        es_query = pretend.stub(
            suggest=pretend.call_recorder(lambda *a, **kw: suggest),
        )
        db_request.es = pretend.stub(
            query=pretend.call_recorder(lambda *a, **kw: es_query)
        )

        page_obj = pretend.stub(page_count=(page or 1) + 10)
        page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
        monkeypatch.setattr(views, "ElasticsearchPage", page_cls)

        url_maker = pretend.stub()
        url_maker_factory = pretend.call_recorder(lambda request: url_maker)
        monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)

        assert search(db_request) == {
            "page": page_obj,
            "term": params.get("q", ''),
            "order": params.get("o", ''),
            "applied_filters": [],
            "available_filters": [],
        }
        assert page_cls.calls == [
            pretend.call(
                sort if order is not None else suggest,
                url_maker=url_maker,
                page=page or 1,
            ),
        ]
        assert url_maker_factory.calls == [pretend.call(db_request)]
        assert db_request.es.query.calls == [
            pretend.call(
                "dis_max",
                queries=self._gather_es_queries(params["q"])
            )
        ]
        assert es_query.suggest.calls == [
            pretend.call(
                "name_suggestion",
                params["q"],
                term={"field": "name"},
            ),
        ]
        assert suggest.sort.calls == [pretend.call(i) for i in expected]
示例#7
0
    def test_with_an_ordering(self, monkeypatch, db_request, page, order,
                              expected):
        params = MultiDict({"q": "foo bar"})
        if page is not None:
            params["page"] = page
        if order is not None:
            params["o"] = order
        db_request.params = params

        sort = pretend.stub()
        suggest = pretend.stub(
            sort=pretend.call_recorder(lambda *a, **kw: sort),
        )
        es_query = pretend.stub(
            suggest=pretend.call_recorder(lambda *a, **kw: suggest),
        )
        db_request.es = pretend.stub(
            query=pretend.call_recorder(lambda *a, **kw: es_query)
        )

        page_obj = pretend.stub(page_count=(page or 1) + 10)
        page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
        monkeypatch.setattr(views, "ElasticsearchPage", page_cls)

        url_maker = pretend.stub()
        url_maker_factory = pretend.call_recorder(lambda request: url_maker)
        monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)

        assert search(db_request) == {
            "page": page_obj,
            "term": params.get("q", ''),
            "order": params.get("o", ''),
            "applied_filters": [],
            "available_filters": [],
        }
        assert page_cls.calls == [
            pretend.call(
                sort if order is not None else suggest,
                url_maker=url_maker,
                page=page or 1,
            ),
        ]
        assert url_maker_factory.calls == [pretend.call(db_request)]
        assert db_request.es.query.calls == [
            pretend.call(
                "dis_max",
                queries=self._gather_es_queries(params["q"])
            )
        ]
        assert es_query.suggest.calls == [
            pretend.call(
                "name_suggestion",
                params["q"],
                term={"field": "name"},
            ),
        ]
        assert suggest.sort.calls == [pretend.call(i) for i in expected]
示例#8
0
    def test_with_a_single_char_query(self, monkeypatch, db_request, page):
        params = MultiDict({"q": "a"})
        if page is not None:
            params["page"] = page
        db_request.params = params

        sort = pretend.stub()
        suggest = pretend.stub(
            sort=pretend.call_recorder(lambda *a, **kw: sort),
        )
        es_query = pretend.stub(
            suggest=pretend.call_recorder(lambda *a, **kw: suggest),
        )
        db_request.es = pretend.stub(
            query=pretend.call_recorder(lambda *a, **kw: es_query)
        )

        page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000)
        page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
        monkeypatch.setattr(views, "ElasticsearchPage", page_cls)

        url_maker = pretend.stub()
        url_maker_factory = pretend.call_recorder(lambda request: url_maker)
        monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)

        assert search(db_request) == {
            "page": page_obj,
            "term": params.get("q", ''),
            "order": params.get("o", ''),
            "applied_filters": [],
            "available_filters": [],
        }
        assert page_cls.calls == [
            pretend.call(suggest, url_maker=url_maker, page=page or 1),
        ]
        assert url_maker_factory.calls == [pretend.call(db_request)]
        assert db_request.es.query.calls == [
            pretend.call(
                views.gather_es_queries(params["q"])
            )
        ]
        assert es_query.suggest.calls == [
            pretend.call(
                "name_suggestion",
                params["q"],
                term={"field": "name"},
            ),
        ]
        assert db_request.registry.datadog.histogram.calls == [
            pretend.call('warehouse.views.search.results', 1000)
        ]
示例#9
0
class MulticaseParentAction (object):
	def __init__ (self, websocket, params):
		self.websocket = websocket
		self.params = MultiDict(params)

	def handle (self):
		return self.params

	def check_and_process (self):
		if self.websocket not in State.clients_sockets:
			response = error_response('Forbidden')
			response['request_id'] = self.params.get('request_id')
			return response

		response = self.handle()
		if 'request_id' in self.params:
			response['request_id'] = self.params['request_id']

		return response

	# TODO: Move to the State?
	def call_client_action (self, character_id, action_name, params):
		data = json.dumps({'call_action': action_name, 'params': params}, cls = CustomJSONEncoder)
		websocket, _ = State.clients_by_character[character_id] # TODO: Validate if receiver is in the same session with sender
		websocket.send(data)

	def group_call_client_action (self, action_name, params, include_sender = False):
		data = json.dumps({'call_action': action_name, 'params': params}, cls = CustomJSONEncoder)

		character_id = State.clients_sockets[self.websocket]
		_, session_id = State.clients_by_character[character_id]

		for _, websocket in State.clients_by_session[session_id].viewitems():
			if include_sender or (websocket is not self.websocket):
				websocket.send(data)
示例#10
0
    def test_with_a_single_char_query(self, monkeypatch, db_request, page):
        params = MultiDict({"q": "a"})
        if page is not None:
            params["page"] = page
        db_request.params = params

        sort = pretend.stub()
        suggest = pretend.stub(
            sort=pretend.call_recorder(lambda *a, **kw: sort), )
        es_query = pretend.stub(
            suggest=pretend.call_recorder(lambda *a, **kw: suggest), )
        db_request.es = pretend.stub(
            query=pretend.call_recorder(lambda *a, **kw: es_query))

        page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000)
        page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
        monkeypatch.setattr(views, "ElasticsearchPage", page_cls)

        url_maker = pretend.stub()
        url_maker_factory = pretend.call_recorder(lambda request: url_maker)
        monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)

        assert search(db_request) == {
            "page": page_obj,
            "term": params.get("q", ''),
            "order": params.get("o", ''),
            "applied_filters": [],
            "available_filters": [],
        }
        assert page_cls.calls == [
            pretend.call(suggest, url_maker=url_maker, page=page or 1),
        ]
        assert url_maker_factory.calls == [pretend.call(db_request)]
        assert db_request.es.query.calls == [
            pretend.call("dis_max",
                         queries=self._gather_es_queries(params["q"]))
        ]
        assert es_query.suggest.calls == [
            pretend.call(
                "name_suggestion",
                params["q"],
                term={"field": "name"},
            ),
        ]
        assert db_request.registry.datadog.histogram.calls == [
            pretend.call('warehouse.views.search.results', 1000)
        ]
示例#11
0
    def test_does_not_remove_group_term_from_query_if_group_does_not_exist(
            self, pyramid_request, unparse):
        query = MultiDict({"group": "does_not_exist"})

        check_url(pyramid_request, query, unparse=unparse)

        assert query.get("group") == "does_not_exist"
        assert not unparse.called
示例#12
0
文件: query_test.py 项目: ziqizh/h
    def test_does_not_remove_group_term_from_query_if_group_does_not_exist(
            self, pyramid_request, unparse):
        query = MultiDict({'group': 'does_not_exist'})

        check_url(pyramid_request, query, unparse=unparse)

        assert query.get('group') == 'does_not_exist'
        assert not unparse.called
示例#13
0
    def test_with_exact_phrase_query(self, monkeypatch, db_request, metrics,
                                     page):
        params = MultiDict({"q": '"foo bar"'})
        if page is not None:
            params["page"] = page
        db_request.params = params

        sort = pretend.stub()
        suggest = pretend.stub(
            sort=pretend.call_recorder(lambda *a, **kw: sort))
        es_query = pretend.stub(
            suggest=pretend.call_recorder(lambda *a, **kw: suggest))
        db_request.es = pretend.stub(
            query=pretend.call_recorder(lambda *a, **kw: es_query))

        page_obj = pretend.stub(page_count=(page or 1) + 10,
                                item_count=(page or 1) + 10)
        page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
        monkeypatch.setattr(views, "ElasticsearchPage", page_cls)

        url_maker = pretend.stub()
        url_maker_factory = pretend.call_recorder(lambda request: url_maker)
        monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)

        assert search(db_request) == {
            "page": page_obj,
            "term": params.get("q", ""),
            "order": params.get("o", ""),
            "applied_filters": [],
            "available_filters": [],
        }
        assert page_cls.calls == [
            pretend.call(suggest, url_maker=url_maker, page=page or 1)
        ]
        assert url_maker_factory.calls == [pretend.call(db_request)]
        assert db_request.es.query.calls == [
            pretend.call(views.gather_es_queries(params["q"]))
        ]
        assert es_query.suggest.calls == [
            pretend.call("name_suggestion",
                         params["q"],
                         term={"field": "name"})
        ]
        assert metrics.histogram.calls == [
            pretend.call("warehouse.views.search.results", (page or 1) + 10)
        ]
示例#14
0
    def test_does_not_remove_group_term_from_query_if_group_does_not_exist(
        self, pyramid_request, unparse
    ):
        query = MultiDict({"group": "does_not_exist"})

        check_url(pyramid_request, query, unparse=unparse)

        assert query.get("group") == "does_not_exist"
        assert not unparse.called
示例#15
0
文件: query_test.py 项目: ziqizh/h
    def test_does_not_remove_user_term_from_query_if_user_does_not_exist(
            self, pyramid_request, unparse, user_service):
        query = MultiDict({'user': '******'})
        user_service.fetch.return_value = None

        check_url(pyramid_request, query, unparse=unparse)

        assert query.get('user') == 'jose'
        assert not unparse.called
示例#16
0
    def test_does_not_remove_group_term_from_query_if_group_does_not_exist(self,
                                                                           pyramid_request,
                                                                           unparse):
        query = MultiDict({'group': 'does_not_exist'})

        check_url(pyramid_request, query, unparse=unparse)

        assert query.get('group') == 'does_not_exist'
        assert not unparse.called
示例#17
0
    def test_does_not_remove_user_term_from_query_if_user_does_not_exist(
        self, pyramid_request, unparse, user_service
    ):
        query = MultiDict({"user": "******"})
        user_service.fetch.return_value = None

        check_url(pyramid_request, query, unparse=unparse)

        assert query.get("user") == "jose"
        assert not unparse.called
示例#18
0
    def collection_post(self):
        payload = MultiDict(self.request.json_body)

        # fetch user
        # TODO: assign currently logged-in user's id
        user = DBSession.query(User).filter_by(id=1).one()

        # fetch status
        status_name = 'DRAFTS' if payload.get('is_draft', False) else 'ONGOING'
        status = DBSession.query(ItemStatus).filter_by(name=status_name).one()

        qty = int(payload.get('quantity', 1))
        price = payload['price'] if payload.get(
            'price', None) and payload['price'] else None
        new_item = Item(user=user,
                        name=payload['name'],
                        type=payload['type'],
                        trade_with=payload.get('trade_with', None),
                        status=status,
                        price=price,
                        quantity=qty,
                        description=payload['description'],
                        reason=payload['reason'])

        # load assigned tags and extend new item's tags
        if payload.get('tags', None):
            tag_ids = [
                tag['id'] for tag in payload['tags'] if tag.get('id', None)
            ]
            tags = DBSession.query(ItemTag).filter(
                ItemTag.id.in_(tag_ids)).all()
            new_item.tags.extend(tags)

        DBSession.add(new_item)
        DBSession.commit()

        return new_item.to_dict()
示例#19
0
    def test_with_a_query(self, monkeypatch, db_request, metrics, page):
        params = MultiDict({"q": "foo bar"})
        if page is not None:
            params["page"] = page
        db_request.params = params

        db_request.es = pretend.stub()
        es_query = pretend.stub()
        get_es_query = pretend.call_recorder(lambda *a, **kw: es_query)
        monkeypatch.setattr(views, "get_es_query", get_es_query)

        page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000)
        page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
        monkeypatch.setattr(views, "ElasticsearchPage", page_cls)

        url_maker = pretend.stub()
        url_maker_factory = pretend.call_recorder(lambda request: url_maker)
        monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)

        assert search(db_request) == {
            "page": page_obj,
            "term": params.get("q", ""),
            "order": "",
            "applied_filters": [],
            "available_filters": [],
        }
        assert get_es_query.calls == [
            pretend.call(db_request.es, params.get("q"), "", [])
        ]
        assert page_cls.calls == [
            pretend.call(es_query, url_maker=url_maker, page=page or 1)
        ]
        assert url_maker_factory.calls == [pretend.call(db_request)]
        assert metrics.histogram.calls == [
            pretend.call("warehouse.views.search.results", 1000)
        ]
示例#20
0
文件: storage.py 项目: baverman/dsfs
def application(env, start_response):
    res = None
    path = env['PATH_INFO'].lstrip('/')
    method = env['REQUEST_METHOD']
    args = MultiDict(parse_qsl(env.get('QUERY_STRING', '')))

    parts = path.split('/')
    volume = args.get('volume')
    if not volume:
        volumes = cluster.get_volumes(args['key'])
        for v in volumes:
            if v.node is current_node and v.id in current_node.volumes:
                volume = v.id
                break

    if volume:
        v = current_node.volumes[volume]
        collection = parts[0]
        if method == 'PUT':
            return put_file(env, start_response,
                            v, collection, args['key'])
        elif method == 'GET':
            fname, meta = v.get(collection, args['key'])
            res = Response()
            res.charset = None
            res.headers['X-Sendfile'] = fname
            res.headers['X-Crc'] = str(meta['crc'])
            res.content_length = os.path.getsize(fname)
            if 'ct' in meta:
                res.content_type = meta['ct']
    else:
        collection = parts[0]
        key = args['key']
        v = volumes[0]
        res = HTTPTemporaryRedirect(
            location='http://{}/{}?key={}&volume={}'.format(v.node.id, collection, key, v.id))
        for v in volumes:
            res.headers.add(
                'X-Location',
                'http://{}/{}?key={}&volume={}'.format(v.node.id, collection, key, v.id))

    if not res:
        res = HTTPNotFound()

    return res(env, start_response)
示例#21
0
文件: headers.py 项目: rcharp/airform
class MimeHeaders(object):
    """Dictionary-like object that preserves the order and
    supports multiple values for the same key, knows
    whether it has been changed after the creation
    """

    def __init__(self, items=()):
        self._v = MultiDict([(normalize(key), remove_newlines(val))
                             for (key, val) in items])
        self.changed = False
        self.num_prepends = 0

    def __getitem__(self, key):
        v = self._v.get(normalize(key), None)
        if v is not None:
            return encodedword.decode(v)
        return None

    def __len__(self):
        return len(self._v)

    def __iter__(self):
        return iter(self._v)

    def __contains__(self, key):
        return normalize(key) in self._v

    def __setitem__(self, key, value):
        key = normalize(key)
        if key in self._v:
            self._v[key] = remove_newlines(value)
            self.changed = True
        else:
            self.prepend(key, remove_newlines(value))

    def __delitem__(self, key):
        del self._v[normalize(key)]
        self.changed = True

    def __nonzero__(self):
        return len(self._v) > 0

    def prepend(self, key, value):
        self._v._items.insert(0, (normalize(key), remove_newlines(value)))
        self.num_prepends += 1

    def add(self, key, value):
        """Adds header without changing the
        existing headers with same name"""
        self.prepend(key, value)

    def keys(self):
        """
        Returns the keys. (message header names)
        It remembers the order in which they were added, what
        is really important
        """
        return self._v.keys()

    def transform(self, fn, decode=False):
        """Accepts a function, getting a key, val and returning
        a new pair of key, val and applies the function to all
        header, value pairs in the message.
        """
        changed = [False]

        def wrapper(key, val):
            new_key, new_val = fn(key, val)
            if new_val != val or new_key != key:
                changed[0] = True
            return new_key, new_val

        v = MultiDict(wrapper(k, v) for k, v in self.iteritems(raw=not decode))
        if changed[0]:
            self._v = v
            self.changed = True

    def items(self):
        """
        Returns header,val pairs in the preserved order.
        """
        return list(self.iteritems())

    def iteritems(self, raw=False):
        """
        Returns iterator header,val pairs in the preserved order.
        """
        if raw:
            return self._v.iteritems()

        return iter([(x[0], encodedword.decode(x[1]))
                     for x in self._v.iteritems()])

    def get(self, key, default=None):
        """
        Returns header value (case-insensitive).
        """
        v = self._v.get(normalize(key), default)
        if v is not None:
            return encodedword.decode(v)
        return None

    def getraw(self, key, default=None):
        """
        Returns raw header value (case-insensitive, non-decoded.
        """
        return self._v.get(normalize(key), default)

    def getall(self, key):
        """
        Returns all header values by the given header name (case-insensitive).
        """
        v = self._v.getall(normalize(key))
        return [encodedword.decode(x) for x in v]

    def have_changed(self, ignore_prepends=False):
        """
        Tells whether someone has altered the headers after creation.
        """
        return self.changed or (self.num_prepends > 0 and not ignore_prepends)

    def __str__(self):
        return str(self._v)

    @classmethod
    def from_stream(cls, stream):
        """
        Takes a stream and reads the headers, decodes headers to unicode dict
        like object.
        """
        return cls(parse_stream(stream))

    def to_stream(self, stream, prepends_only=False):
        """
        Takes a stream and serializes headers in a mime format.
        """
        i = 0
        for h, v in self.iteritems(raw=True):
            if prepends_only and i == self.num_prepends:
                break
            i += 1
            try:
                h.encode('ascii')
            except UnicodeDecodeError:
                raise EncodingError("Non-ascii header name")
            stream.write("{0}: {1}\r\n".format(h, to_mime(h, v)))
    def GET(self, req):
        """
        Handles listing of in-progress multipart uploads,
        handles list objects request.
        """
        # any operations with multipart buckets are not allowed to user
        check_container_name_no_such_bucket_error(self.container_name)

        if 'uploads' in req.GET:
            return self.get_uploads(req)
        else:
            acl = req.GET.get('acl')
            params = MultiDict([('format', 'json')])
            max_keys = req.GET.get('max-keys')
            if (max_keys is not None and max_keys.isdigit()):
                max_keys = min(int(max_keys), MAX_BUCKET_LISTING)
            else:
                max_keys = MAX_BUCKET_LISTING
            params['limit'] = str(max_keys + 1)
            for param_name in ('marker', 'prefix', 'delimiter'):
                if param_name in req.GET:
                    params[param_name] = req.GET[param_name]

            req.GET.clear()
            req.GET.update(params)

            resp = req.get_response(self.app)
            status = resp.status_int
            body = resp.body

            if status != 200:
                if status == 401:
                    return get_err_response('AccessDenied')
                elif status == 404:
                    return get_err_response('InvalidBucketName')
                else:
                    return get_err_response('InvalidURI')

            if acl is not None:
                return get_acl(self.account_name)

            objects = json.loads(resp.body)
            body = ('<?xml version="1.0" encoding="UTF-8"?>'
                '<ListBucketResult '
                    'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
                '<Prefix>%s</Prefix>'
                '<Marker>%s</Marker>'
                '<Delimiter>%s</Delimiter>'
                '<IsTruncated>%s</IsTruncated>'
                '<MaxKeys>%s</MaxKeys>'
                '<Name>%s</Name>'
                '%s'
                '%s'
                '</ListBucketResult>' %
                (
                    xml_escape(params.get('prefix', '')),
                    xml_escape(params.get('marker', '')),
                    xml_escape(params.get('delimiter', '')),
                    'true' if len(objects) == (max_keys + 1) else 'false',
                    max_keys,
                    xml_escape(self.container_name),
                    "".join(['<Contents><Key>%s</Key><LastModified>%sZ</Last'\
                             'Modified><ETag>%s</ETag><Size>%s</Size><Storage'\
                             'Class>STANDARD</StorageClass></Contents>' %
                             (xml_escape(i['name']), i['last_modified'][:-3],
                                                         i['hash'], i['bytes'])
                            for i in objects[:max_keys] if 'subdir' not in i]),
                    "".join(['<CommonPrefixes><Prefix>%s</Prefix></Common'\
                             'Prefixes>' % xml_escape(i['subdir'])
                             for i in objects[:max_keys] if 'subdir' in i])))

            return Response(body=body, content_type='application/xml')
示例#23
0
class MimeHeaders(object):
    """Dictionary-like object that preserves the order and
    supports multiple values for the same key, knows
    whether it has been changed after the creation
    """

    def __init__(self, items=()):
        self.v = MultiDict(
            [(normalize(key), val) for (key, val) in items])
        self.changed = False

    def __getitem__(self, key):
        return self.v.get(normalize(key), None)

    def __len__(self):
        return len(self.v)

    def __iter__(self):
        return iter(self.v)

    def __contains__(self, key):
        return normalize(key) in self.v

    def __setitem__(self, key, value):
        self.v[normalize(key)] = _remove_newlines(value)
        self.changed = True

    def __delitem__(self, key):
        del self.v[normalize(key)]
        self.changed = True

    def __nonzero__(self):
        return len(self.v) > 0

    def prepend(self, key, val):
        self.v._items.insert(0, (key, _remove_newlines(val)))
        self.changed = True

    def add(self, key, value):
        """Adds header without changing the
        existing headers with same name"""

        self.v.add(normalize(key), _remove_newlines(value))
        self.changed = True

    def keys(self):
        """
        Returns the keys. (message header names)
        It remembers the order in which they were added, what
        is really important
        """
        return self.v.keys()

    def transform(self, fn):
        """Accepts a function, getting a key, val and returning
        a new pair of key, val and applies the function to all
        header, value pairs in the message.
        """

        changed = [False]
        def tracking_fn(key, val):
            new_key, new_val = fn(key, val)
            if new_val != val or new_key != key:
                changed[0] = True
            return new_key, new_val

        v = MultiDict(tracking_fn(key, val) for key, val in self.v.iteritems())
        if changed[0]:
            self.v = v
            self.changed = True


    def items(self):
        """
        Returns header,val pairs in the preserved order.
        """
        return list(self.iteritems())


    def iteritems(self):
        """
        Returns iterator header,val pairs in the preserved order.
        """
        return self.v.iteritems()


    def get(self, key, default=None):
        """
        Returns header value (case-insensitive).
        """
        return self.v.get(normalize(key), default)

    def getall(self, key):
        """
        Returns all header values by the given header name
        (case-insensitive)
        """
        return self.v.getall(normalize(key))

    def have_changed(self):
        """Tells whether someone has altered the headers
        after creation"""
        return self.changed

    def __str__(self):
        return str(self.v)

    @classmethod
    def from_stream(cls, stream):
        """Takes a stream and reads the headers,
        decodes headers to unicode dict like object"""
        return cls(parse_stream(stream))

    def to_stream(self, stream):
        """Takes a stream and serializes headers
        in a mime format"""

        for h, v in self.v.iteritems():
            try:
                h = h.encode('ascii')
            except UnicodeDecodeError:
                raise EncodingError("Non-ascii header name")
            stream.write("{0}: {1}\r\n".format(h, to_mime(h, v)))
示例#24
0
    def test_with_classifiers(self, monkeypatch, db_request, page):
        params = MultiDict([
            ("q", "foo bar"),
            ("c", "foo :: bar"),
            ("c", "fiz :: buz"),
        ])
        if page is not None:
            params["page"] = page
        db_request.params = params

        es_query = pretend.stub(
            suggest=pretend.call_recorder(lambda *a, **kw: es_query),
            filter=pretend.call_recorder(lambda *a, **kw: es_query),
            sort=pretend.call_recorder(lambda *a, **kw: es_query),
        )
        db_request.es = pretend.stub(
            query=pretend.call_recorder(lambda *a, **kw: es_query))

        classifier1 = ClassifierFactory.create(classifier="foo :: bar")
        classifier2 = ClassifierFactory.create(classifier="foo :: baz")
        classifier3 = ClassifierFactory.create(classifier="fiz :: buz")

        project = ProjectFactory.create()
        release1 = ReleaseFactory.create(project=project)
        release1.created = datetime.date(2011, 1, 1)
        release1._classifiers.append(classifier1)
        release1._classifiers.append(classifier2)

        page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000)
        page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
        monkeypatch.setattr(views, "ElasticsearchPage", page_cls)

        url_maker = pretend.stub()
        url_maker_factory = pretend.call_recorder(lambda request: url_maker)
        monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)

        search_view = search(db_request)
        assert search_view == {
            "page":
            page_obj,
            "term":
            params.get("q", ''),
            "order":
            params.get("o", ''),
            "applied_filters":
            params.getall("c"),
            "available_filters": [('foo', [
                classifier1.classifier,
                classifier2.classifier,
            ])],
        }
        assert (("fiz", [classifier3.classifier])
                not in search_view["available_filters"])
        assert page_cls.calls == [
            pretend.call(es_query, url_maker=url_maker, page=page or 1),
        ]
        assert url_maker_factory.calls == [pretend.call(db_request)]
        assert db_request.es.query.calls == [
            pretend.call(views.gather_es_queries(params["q"]))
        ]
        assert es_query.suggest.calls == [
            pretend.call(
                "name_suggestion",
                params["q"],
                term={"field": "name"},
            ),
        ]
        assert es_query.filter.calls == [
            pretend.call('terms', classifiers=['foo :: bar']),
            pretend.call('terms', classifiers=['fiz :: buz'])
        ]
示例#25
0
    def test_with_classifiers(self, monkeypatch, db_request, page):
        params = MultiDict([
            ("q", "foo bar"),
            ("c", "foo :: bar"),
            ("c", "fiz :: buz"),
        ])
        if page is not None:
            params["page"] = page
        db_request.params = params

        es_query = pretend.stub(
            suggest=pretend.call_recorder(lambda *a, **kw: es_query),
            filter=pretend.call_recorder(lambda *a, **kw: es_query),
            sort=pretend.call_recorder(lambda *a, **kw: es_query),
        )
        db_request.es = pretend.stub(
            query=pretend.call_recorder(lambda *a, **kw: es_query)
        )

        classifier1 = ClassifierFactory.create(classifier="foo :: bar")
        classifier2 = ClassifierFactory.create(classifier="foo :: baz")
        classifier3 = ClassifierFactory.create(classifier="fiz :: buz")

        project = ProjectFactory.create()
        release1 = ReleaseFactory.create(project=project)
        release1.created = datetime.date(2011, 1, 1)
        release1._classifiers.append(classifier1)
        release1._classifiers.append(classifier2)

        page_obj = pretend.stub(page_count=(page or 1) + 10)
        page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
        monkeypatch.setattr(views, "ElasticsearchPage", page_cls)

        url_maker = pretend.stub()
        url_maker_factory = pretend.call_recorder(lambda request: url_maker)
        monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)

        search_view = search(db_request)
        assert search_view == {
            "page": page_obj,
            "term": params.get("q", ''),
            "order": params.get("o", ''),
            "applied_filters": params.getall("c"),
            "available_filters": [
                ('foo', [
                    classifier1.classifier,
                    classifier2.classifier,
                ])
            ],
        }
        assert (
            ("fiz", [
                classifier3.classifier
            ]) not in search_view["available_filters"]
        )
        assert page_cls.calls == [
            pretend.call(es_query, url_maker=url_maker, page=page or 1),
        ]
        assert url_maker_factory.calls == [pretend.call(db_request)]
        assert db_request.es.query.calls == [
            pretend.call(
                "dis_max",
                queries=self._gather_es_queries(params["q"])
            )
        ]
        assert es_query.suggest.calls == [
            pretend.call(
                "name_suggestion",
                params["q"],
                term={"field": "name"},
            ),
        ]
        assert es_query.filter.calls == [
            pretend.call('terms', classifiers=['foo :: bar', 'fiz :: buz'])
        ]
示例#26
0
    def identify(self, environ: Any):
        u'''
        Override the parent's identifier to introduce a login counter
        (possibly along with a post-login page) and load the login counter into
        the ``environ``.

        '''
        request = Request(environ, charset=self.charset)

        path_info = environ[u'PATH_INFO']
        script_name = environ.get(u'SCRIPT_NAME') or u'/'
        query = request.GET

        if path_info == self.login_handler_path:
            # We are on the URL where repoze.who processes authentication. #
            # Let's append the login counter to the query string of the
            # 'came_from' URL. It will be used by the challenge below if
            # authorization is denied for this request.
            form = dict(request.POST)
            form.update(query)
            try:
                login = form[u'login']
                password = form[u'password']
            except KeyError:
                credentials = None
            else:
                if request.charset == u'us-ascii':
                    credentials = {
                        u'login': str(login),
                        u'password': str(password),
                    }
                else:
                    credentials = {u'login': login, u'password': password}

            try:
                credentials[u'max_age'] = form[u'remember']  # type: ignore
            except KeyError:
                pass

            referer = environ.get(u'HTTP_REFERER', script_name)
            destination = cast(str, form.get(u'came_from', referer))

            if self.post_login_url:
                # There's a post-login page, so we have to replace the
                # destination with it.
                destination = self._get_full_path(self.post_login_url,
                                                  environ)
                if u'came_from' in query:
                    # There's a referrer URL defined, so we have to pass it to
                    # the post-login page as a GET variable.
                    destination = self._insert_qs_variable(destination,
                                                           u'came_from',
                                                           query[u'came_from'])
            failed_logins = self._get_logins(environ, True)
            new_dest = self._set_logins_in_url(destination, failed_logins)
            environ[u'repoze.who.application'] = HTTPFound(location=new_dest)
            return credentials

        elif path_info == self.logout_handler_path:
            #    We are on the URL where repoze.who logs the user out.    #
            r = Request(environ)
            params = dict(list(r.GET.items()) + list(r.POST.items()))
            form = MultiDict(params)
            form.update(query)
            referer = environ.get(u'HTTP_REFERER', script_name)
            came_from = form.get(u'came_from', referer)
            # set in environ for self.challenge() to find later
            environ[u'came_from'] = came_from
            environ[u'repoze.who.application'] = HTTPUnauthorized()
            return None

        elif path_info == self.login_form_url or self._get_logins(environ):
            #  We are on the URL that displays the from OR any other page  #
            #   where the login counter is included in the query string.   #
            # So let's load the counter into the environ and then hide it from
            # the query string (it will cause problems in frameworks like TG2,
            # where this unexpected variable would be passed to the controller)
            environ[u'repoze.who.logins'] = self._get_logins(environ, True)
            # Hiding the GET variable in the environ:
            if self.login_counter_name in query:
                del query[self.login_counter_name]
                environ[u'QUERY_STRING'] = urlencode(query, doseq=True)
示例#27
0
    def put(self):
        item_id = int(self.request.matchdict['item_id'])
        payload = MultiDict(self.request.json_body)
        item = DBSession.query(Item).filter_by(id=item_id).one()
        transaction_date = None

        print payload

        # fetch status
        if payload.get('is_draft', False):
            status_name = 'DRAFTS'
        elif payload.get('status', None) and payload['status'] == 'archived':
            status_name = 'ARCHIVED'
            transaction_date = datetime.now()
        else:
            status_name = 'ONGOING'
        status = DBSession.query(ItemStatus).filter_by(name=status_name).one()

        # fetch new tags
        new_tags = []
        ids_to_add = [
            int(tag['id']) for tag in payload.get('tags', [])
            if tag.get('id', None)
        ]
        if ids_to_add:
            new_tags.extend(
                DBSession.query(ItemTag).filter(
                    ItemTag.id.in_(ids_to_add)).all())

        item.tags = new_tags  # replace existing tags
        new_qty = int(payload.get('quantity', 1))
        price = payload['price'] if payload['price'] else None

        item.name = payload['name']
        item.type = payload['type']
        item.trade_with = payload.get('trade_with', None)
        item.status = status
        item.price = price
        item.description = payload['description']
        item.reason = payload.get('reason', None)
        item.transaction_date = transaction_date

        # adjust original quantity
        if new_qty > item.quantity:
            additional_quantity = new_qty - item.quantity
            item.original_quantity += additional_quantity
        elif new_qty < item.quantity:
            additional_quantity = item.quantity - new_qty
            item.original_quantity -= additional_quantity

        item.quantity = new_qty

        updating_fields = ('updating item: %s', 'name: %s', 'type: %s',
                           'status: %s', 'price: %s', 'quantity: %s',
                           'description: %s', 'reason: %s', 'tags: %s',
                           'transaction date: %s\n')
        logger.info('\n'.join(updating_fields) %
                    (item_id, item.name, item.type, item.status, item.price,
                     item.quantity, item.description, item.reason, item.tags,
                     item.transaction_date))

        DBSession.commit()
        return item.to_dict()
    def get_uploads(self, req):
        """Handles listing of in-progress multipart uploads"""
        acl = req.GET.get('acl')
        params = MultiDict([('format', 'json')])
        max_uploads = req.GET.get('max-uploads')
        if (max_uploads is not None and max_uploads.isdigit()):
            max_uploads = min(int(max_uploads), MAX_UPLOADS_LISTING)
        else:
            max_uploads = MAX_UPLOADS_LISTING
        params['limit'] = str(max_uploads + 1)
        for param_name in ('key-marker', 'prefix', 'delimiter',
                                                       'upload-id-marker'):
            if param_name in req.GET:
                params[param_name] = req.GET[param_name]

        cont_name = MULTIPART_UPLOAD_PREFIX + self.container_name
        cont_path = "/v1/%s/%s/" % (self.account_name, cont_name)

        req.upath_info = cont_path
        req.GET.clear()
        req.GET.update(params)

        resp = req.get_response(self.app)
        status = resp.status_int

        if status != 200:
            if status == 401:
                return get_err_response('AccessDenied')
            elif status == 404:
                return get_err_response('InvalidBucketName')
            else:
                return get_err_response('InvalidURI')

        if acl is not None:
            return get_acl(self.account_name)

        objects = json.loads(resp.body)
        uploads = ''
        splited_name = ''

        for obj in objects:
            if obj['name'].endswith('/meta'):
                splited_name = obj['name'].split('/')
                uploads = uploads.join(
                                 "<Upload>"
                                 "<Key>%s</Key>"
                                 "<UploadId>%s</UploadId>"
                                 "<Initiator>"
                                 "<ID>%s</ID>"
                                 "<DisplayName>%s</DisplayName>"
                                 "</Initiator>"
                                 "<Owner>"
                                 "<ID>%s</ID>"
                                 "<DisplayName>%s</DisplayName>"
                                 "</Owner>"
                                 "<StorageClass>STANDARD</StorageClass>"
                                 "<Initiated>%sZ</Initiated>"
                                 "</Upload>" % (
                                 splited_name[0],
                                 splited_name[1],
                                 self.account_name,
                                 self.account_name,
                                 self.account_name,
                                 self.account_name,
                                 obj['last_modified'][:-3]))
            else:
                objects.remove(obj)

        #TODO: Currently there are less then max_uploads results
        # in a response; Amount of uploads == amount of meta files
        # received in a request for a list of objects in a bucket.

        if len(objects) == (max_uploads + 1):
            is_truncated = 'true'
            next_key_marker = splited_name[0]
            next_uploadId_marker = splited_name[1]
        else:
            is_truncated = 'false'
            next_key_marker = next_uploadId_marker = ''

        body = ('<?xml version="1.0" encoding="UTF-8"?>'
            '<ListMultipartUploadsResult '
                'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
            '<Bucket>%s</Bucket>'
            '<KeyMarker>%s</KeyMarker>'
            '<UploadIdMarker>%s</UploadIdMarker>'
            '<NextKeyMarker>%s</NextKeyMarker>'
            '<NextUploadIdMarker>%s</NextUploadIdMarker>'
            '<MaxUploads>%s</MaxUploads>'
            '<IsTruncated>%s</IsTruncated>'
            '%s'
            '</ListMultipartUploadsResult>' %
                (
                    xml_escape(self.container_name),
                    xml_escape(params.get('key-marker', '')),
                    xml_escape(params.get('upload-id-marker', '')),
                    next_key_marker,
                    next_uploadId_marker,
                    max_uploads,
                    is_truncated,
                    uploads
                )
            )
        return Response(body=body, content_type='application/xml')
示例#29
0
    def test_with_classifiers(self, monkeypatch, db_request, page):
        params = MultiDict([
            ("q", "foo bar"),
            ("c", "foo :: bar"),
            ("c", "fiz :: buz"),
        ])
        if page is not None:
            params["page"] = page
        db_request.params = params

        es_query = pretend.stub(
            suggest=pretend.call_recorder(lambda *a, **kw: es_query),
            filter=pretend.call_recorder(lambda *a, **kw: es_query),
            sort=pretend.call_recorder(lambda *a, **kw: es_query),
        )
        db_request.es = pretend.stub(
            query=pretend.call_recorder(lambda *a, **kw: es_query))

        classifier1 = ClassifierFactory.create(classifier="foo :: bar")
        classifier2 = ClassifierFactory.create(classifier="foo :: baz")
        classifier3 = ClassifierFactory.create(classifier="fiz :: buz")

        page_obj = pretend.stub(page_count=(page or 1) + 10)
        page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
        monkeypatch.setattr(views, "ElasticsearchPage", page_cls)

        url_maker = pretend.stub()
        url_maker_factory = pretend.call_recorder(lambda request: url_maker)
        monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)

        assert search(db_request) == {
            "page":
            page_obj,
            "term":
            params.get("q", ''),
            "order":
            params.get("o", ''),
            "applied_filters":
            params.getall("c"),
            "available_filters": [('fiz', [classifier3.classifier]),
                                  ('foo', [
                                      classifier1.classifier,
                                      classifier2.classifier,
                                  ])],
        }
        assert page_cls.calls == [
            pretend.call(es_query, url_maker=url_maker, page=page or 1),
        ]
        assert url_maker_factory.calls == [pretend.call(db_request)]
        assert db_request.es.query.calls == [
            pretend.call(
                "multi_match",
                query="foo bar",
                fields=[
                    "author",
                    "author_email",
                    "description^5",
                    "download_url",
                    "home_page",
                    "keywords^5",
                    "license",
                    "maintainer",
                    "maintainer_email",
                    "normalized_name^10",
                    "platform",
                    "summary^5",
                ],
            ),
        ]
        assert es_query.suggest.calls == [
            pretend.call(
                name="name_suggestion",
                term={"field": "name"},
                text="foo bar",
            ),
        ]
        assert es_query.filter.calls == [
            pretend.call('terms', classifiers=['foo :: bar', 'fiz :: buz'])
        ]
示例#30
0
    def view_one_click_buy(self):
        good_id = self.request.matchdict.get('id')
        good = Good.by_id(good_id)
        # TODO test good exists !
        if good is None:
            return HTTPNotFound('Товар не найден')
        if not good.active:
            return HTTPForbidden('Товар сня')
        appstruct = dict()
        user_logined = self.user is not None
        email = None
        if user_logined:
            email = self.user.email
            appstruct['email'] = email

        # TODO + поле name

        one_click_buy_schema = OneClickBuySchema()
        submit_button_name = 'form_good_one_click_buy_submit'

        payment_system_default = helpers.get_setting('payment_system_default')
        payment_systems_captions = get_payment_clients_captions()
        if payment_system_default is None:
            raise Exception('payment_system_default in config was not set!')
        else:
            payment_system_default_caption = payment_systems_captions.get(
                payment_system_default)
            if payment_system_default_caption is None:
                raise Exception('default_payment_system is not loaded!')

        appstruct['payment_system'] = payment_system_default

        one_click_buy_form = FormMod(
            one_click_buy_schema.bind(
                user_logined=user_logined,
                # TODO logout return url param
                logout_url=self.request.route_url('logout'),
                payment_systems=payment_systems_captions),
            buttons=[Button(name=submit_button_name, title='Приобрести')],
            # css_class='no-red-stars'
        )

        rendered_redirect_form = None

        if submit_button_name in self.request.params:
            post = MultiDict()
            post._items = list(self.request.POST.items())
            if user_logined:
                helpers.multidict_rm_values(post, 'email')
                if email is not None:
                    post.add('email', email)
            if post.get('payment_system') is None:
                post.add('payment_system', payment_system_default)

            payment_system = post.get('payment_system')
            controls = post.items()
            # TODO add email to controls if email is not None
            try:
                one_click_buy_form.validate(post.items())
            except deform.ValidationFailure as e:
                return dict(
                    good=good,
                    rendered_one_click_buy_form=e.render(),
                )
            # captch check!

            # self.request.params()
            user = self.user
            if email is None:
                email = self.request.params.get('email')

            if email is None or email == '':
                return HTTPBadRequest('No email specified')

            if user is None:
                user = User.by_email(email)

            if user is None:
                # TODO register to separate function
                user = User()
                user.email = email
                password = User.generate_password()
                user.set_password(password)
                error = db_save_model(user)
                if error is not None:
                    return self.db_error_response(error)

                helpers.send_html_mail(user.email, 'registered', {
                    'user_name': user.name,
                    'password': password
                })

            # TODO read http://docs.sqlalchemy.org/en/latest/orm/cascades.html#merge

            try:
                with transaction.manager:
                    new_order = Order(user_id=user.id)
                    DBSession.add(new_order)
                    new_order.alter_wanted_good_count(good.id, delta_count=1.0)
            except DBAPIError as error:
                return self.db_error_response(error)

            amount_to_pay = new_order.get_amount_to_pay()

            try:
                with transaction.manager:
                    DBSession.add(new_order)
                    # TODO generalize and move transaction make code somewhere
                    new_money_transaction = MoneyTransaction(
                        order_id=new_order.id,
                        user_id=user.id,
                        payment_system=payment_system,
                        shop_money_delta=amount_to_pay,
                        type=EnumMoneyTransactionType.buy)
                    DBSession.add(new_money_transaction)
                    DBSession.flush()
                    # money_transaction.init()
                    payment_client = payment_systems.get_payment_client_by_name(
                        payment_system)
                    if payment_client is None:
                        return HTTPInternalServerError('Error on payment init')
                    # working type definition!
                    """:type payment_client AbstractPaymentClient"""
                    status_redirect_form = payment_client.run_transaction(
                        new_money_transaction)
                    """:type status_redirect_form MoneyTransactionStatus"""
                    if status_redirect_form is None:
                        raise Exception('payment form not generated')
                    if type(status_redirect_form
                            ) is not MoneyTransactionStatus:
                        raise Exception(
                            'status_redirect_form is not MoneyTransactionStatus class'
                        )
                    if status_redirect_form.status != EnumMoneyTransactionStatus.redirect_to_payment_form:
                        raise Exception(
                            'status_redirect_form.status is not payment form')
                    rendered_redirect_form = status_redirect_form.render_post_form(
                    )

            except DBAPIError as error:
                return self.db_error_response(error)

            # redirect to payment
            # TODO payment system choose

            # try:
            #     with transaction.manager:
            #         new_order.payment_start()
            # except DBAPIError as error:
            #     return self.db_error_response(error)

        # TODO backlink param
        # or write backlink to the order class?
        return dict(
            good=good,
            rendered_one_click_buy_form=one_click_buy_form.render(appstruct),
            rendered_redirect_form=rendered_redirect_form)
示例#31
0
    def test_with_classifiers(self, monkeypatch, db_request, metrics, page):
        params = MultiDict([("q", "foo bar"), ("c", "foo :: bar"), ("c", "fiz :: buz")])
        if page is not None:
            params["page"] = page
        db_request.params = params

        es_query = pretend.stub(
            suggest=pretend.call_recorder(lambda *a, **kw: es_query),
            filter=pretend.call_recorder(lambda *a, **kw: es_query),
            query=pretend.call_recorder(lambda *a, **kw: es_query),
            sort=pretend.call_recorder(lambda *a, **kw: es_query),
        )
        db_request.es = pretend.stub(
            query=pretend.call_recorder(lambda *a, **kw: es_query)
        )

        classifier1 = ClassifierFactory.create(classifier="foo :: bar")
        classifier2 = ClassifierFactory.create(classifier="foo :: baz")
        classifier3 = ClassifierFactory.create(classifier="fiz :: buz")

        project = ProjectFactory.create()
        release1 = ReleaseFactory.create(project=project)
        release1.created = datetime.date(2011, 1, 1)
        release1._classifiers.append(classifier1)
        release1._classifiers.append(classifier2)

        page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000)
        page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
        monkeypatch.setattr(views, "ElasticsearchPage", page_cls)

        url_maker = pretend.stub()
        url_maker_factory = pretend.call_recorder(lambda request: url_maker)
        monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)

        search_view = search(db_request)
        assert search_view == {
            "page": page_obj,
            "term": params.get("q", ""),
            "order": params.get("o", ""),
            "applied_filters": params.getall("c"),
            "available_filters": [
                {
                    "foo": {
                        classifier1.classifier.split(" :: ")[1]: {},
                        classifier2.classifier.split(" :: ")[1]: {},
                    }
                }
            ],
        }
        assert ("fiz", [classifier3.classifier]) not in search_view["available_filters"]
        assert page_cls.calls == [
            pretend.call(es_query, url_maker=url_maker, page=page or 1)
        ]
        assert url_maker_factory.calls == [pretend.call(db_request)]
        assert db_request.es.query.calls == [
            pretend.call(views.gather_es_queries(params["q"]))
        ]
        assert es_query.suggest.calls == [
            pretend.call("name_suggestion", params["q"], term={"field": "name"})
        ]
        assert es_query.query.calls == [
            pretend.call("prefix", classifiers="foo :: bar"),
            pretend.call("prefix", classifiers="fiz :: buz"),
        ]
        assert metrics.histogram.calls == [
            pretend.call("warehouse.views.search.results", 1000)
        ]
示例#32
0
from wsgiref.util import setup_testing_defaults
from wsgiref.simple_server import make_server, demo_app
from urllib.parse import parse_qs
from webob import Request, Response

from webob.multidict import MultiDict

md = MultiDict()
md.add('a', 1)
md.add('b', 2)
md.add('b', 3)
md.add(1, 100)
md.add(1, 200)
md.add(2, 200)

#print(md)
print(md[1])
print(md.get('b'))
print(md.getall('b'))
示例#33
0
    def test_with_classifiers(self, monkeypatch, db_request, page):
        params = MultiDict([
            ("q", "foo bar"),
            ("c", "foo :: bar"),
            ("c", "fiz :: buz"),
        ])
        if page is not None:
            params["page"] = page
        db_request.params = params

        es_query = pretend.stub(
            suggest=pretend.call_recorder(lambda *a, **kw: es_query),
            filter=pretend.call_recorder(lambda *a, **kw: es_query),
            sort=pretend.call_recorder(lambda *a, **kw: es_query),
        )
        db_request.es = pretend.stub(
            query=pretend.call_recorder(lambda *a, **kw: es_query)
        )

        classifier1 = ClassifierFactory.create(classifier="foo :: bar")
        classifier2 = ClassifierFactory.create(classifier="foo :: baz")
        classifier3 = ClassifierFactory.create(classifier="fiz :: buz")

        page_obj = pretend.stub(page_count=(page or 1) + 10)
        page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
        monkeypatch.setattr(views, "ElasticsearchPage", page_cls)

        url_maker = pretend.stub()
        url_maker_factory = pretend.call_recorder(lambda request: url_maker)
        monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)

        assert search(db_request) == {
            "page": page_obj,
            "term": params.get("q", ''),
            "order": params.get("o", ''),
            "applied_filters": params.getall("c"),
            "available_filters": [
                ('fiz', [classifier3.classifier]),
                ('foo', [
                    classifier1.classifier,
                    classifier2.classifier,
                ])
            ],
        }
        assert page_cls.calls == [
            pretend.call(es_query, url_maker=url_maker, page=page or 1),
        ]
        assert url_maker_factory.calls == [pretend.call(db_request)]
        assert db_request.es.query.calls == [
            pretend.call(
                "multi_match",
                query="foo bar",
                fields=[
                    "name^2", "version", "author", "author_email",
                    "maintainer", "maintainer_email", "home_page", "license",
                    "summary", "description", "keywords", "platform",
                    "download_url",
                ],
            ),
        ]
        assert es_query.suggest.calls == [
            pretend.call(
                name="name_suggestion",
                term={"field": "name"},
                text="foo bar",
            ),
        ]
        assert es_query.filter.calls == [
            pretend.call('terms', classifiers=['foo :: bar', 'fiz :: buz'])
        ]