def test_view_does_not_fail_when_no_modified_date_is_passed(self): portal = self.layer['portal'] request = self.layer['request'] folder = self.layer['folder'] request.form['uid'] = IUUID(folder) feed_view = getMultiAdapter((portal, request), name='watcher-feed') feed_data = feed_view() # modify feed data data = json.loads(feed_data) for item in data['items']: item['modified'] = '' data = json.dumps(data) _portlet, portlet_hash = self._get_portlet_and_hash() self._expect_request().result(self._create_response( status_code=200, raw=data)) self.replay() request.form['hash'] = portlet_hash view = queryMultiAdapter((portal, request), name='watcher-load-data') self.assertNotEqual(view, None) self.assertEqual(type(json.loads(view())), dict)
def test_view_does_not_fail_when_no_modified_date_is_passed(self): portal = self.layer['portal'] request = self.layer['request'] folder = self.layer['folder'] request.form['uid'] = IUUID(folder) feed_view = getMultiAdapter((portal, request), name='watcher-feed') feed_data = feed_view() # modify feed data data = json.loads(feed_data) for item in data['items']: item['modified'] = '' data = json.dumps(data) _portlet, portlet_hash = self._get_portlet_and_hash() self._expect_request().result( self._create_response(status_code=200, raw=data)) self.replay() request.form['hash'] = portlet_hash view = queryMultiAdapter((portal, request), name='watcher-load-data') self.assertNotEqual(view, None) self.assertEqual(type(json.loads(view())), dict)
def __call__(self): portlet = self._get_portlet() try: data = self._get_data(portlet) except MaintenanceError: return '"MAINTENANCE"' data = self._localize_dates(data) return json.dumps(data)
def __call__(self): uid = self.request.get('uid') reference_catalog = getToolByName(self.context, 'reference_catalog') obj = reference_catalog.lookupObject(uid) if obj is not None: data = self.get_data(obj) else: data = None return json.dumps(data)
def search_catalog(self, target, query, limit=50, batching_start=0): path = '@@bridge-search-catalog' query['batching_start'] = batching_start data = {'query': json.dumps(query), 'limit': limit} response = self(target, path, data=data) data = json.loads(response.read()) total_length = int(response.headers.get( 'X-total_results_length', '0')) serializer = getUtility(IBrainSerializer) return serializer.deserialize_brains(data, total_length)
def __call__(self): uid = self.request.get("uid") reference_catalog = getToolByName(self.context, "reference_catalog") obj = reference_catalog.lookupObject(uid) data = self.get_data(obj) return json.dumps(data)
def _serialize_results(self, results): serializer = getUtility(IBrainSerializer) return json.dumps(serializer.serialize_brains(results))
def __call__(self): uid = self.request.get('uid') reference_catalog = getToolByName(self.context, 'reference_catalog') obj = reference_catalog.lookupObject(uid) data = self.get_data(obj) return json.dumps(data)