def reindex_batch(args, site): catalog = find_catalog(site) addr = catalog.document_map.address_for_docid new_index = catalog['new_texts'] to_index = new_index.to_index indexed = new_index.indexed l = new_index.n_to_index offset = l - len(to_index) batch = [] for i in xrange(min(BATCH_SIZE, len(to_index))): batch.append(to_index[i]) for i, docid in enumerate(batch): to_index.remove(docid) indexed.add(docid) path = addr(docid) if path is None: continue try: doc = find_model(site, path) except KeyError: log.warn("No object at path: %s", path) continue log.info("Reindexing (%d/%d) %s", i + offset + 1, l, path) new_index.index_doc(docid, doc) deactivate = getattr(doc, '_p_deactivate', None) if deactivate is not None: deactivate()
def test_registerModels(self): ob1 = object() ob2 = object() models = {'/ob1':ob1, '/ob2':ob2} from pyramid import testing testing.registerModels(models) from pyramid.interfaces import ITraverser adapter = self.registry.getAdapter(None, ITraverser) result = adapter({'PATH_INFO':'/ob1'}) self.assertEqual(result['context'], ob1) self.assertEqual(result['view_name'], '') self.assertEqual(result['subpath'], ()) self.assertEqual(result['traversed'], (u'ob1',)) self.assertEqual(result['virtual_root'], ob1) self.assertEqual(result['virtual_root_path'], ()) result = adapter({'PATH_INFO':'/ob2'}) self.assertEqual(result['context'], ob2) self.assertEqual(result['view_name'], '') self.assertEqual(result['subpath'], ()) self.assertEqual(result['traversed'], (u'ob2',)) self.assertEqual(result['virtual_root'], ob2) self.assertEqual(result['virtual_root_path'], ()) self.assertRaises(KeyError, adapter, {'PATH_INFO':'/ob3'}) from pyramid.traversal import find_model self.assertEqual(find_model(None, '/ob1'), ob1)
def add_page(context, request): params = request.params if 'save' in params: # Construct page page = Page(params['title'], params['body']) # Insert page into document tree path = params['path'] try: prev = find_model(context, path) # Replace an existing node for name, child in prev.items(): page[name] = child folder, page_name = prev.__parent__, prev.__name__ del folder[page_name] folder[page_name] = page except KeyError: # Insert page at new location path = filter(None, path.split('/')) page_name = path.pop(-1) folder = context for name in path: if name not in folder: folder[name] = Folder() folder = folder[name] folder[page_name] = page # Handle image upload image = params.get('image') if hasattr(image, 'file'): mimetype = guess_type(image.filename)[0] page.upload_image(mimetype, image.file) index_doc(page) return HTTPFound(request.resource_url(page)) elif 'cancel' in params: redirect_to = params['backto'] if not redirect_to: redirect_to = request.resource_url(context) return HTTPFound(redirect_to) # Show form backto = request.referer if backto is None: backto = '' return { 'title': 'New Page', 'img_url': None, 'page': Page('Page Title', ''), 'backto': backto, }
def _content_generator(root): catalog = find_catalog(root) #results = catalog.search(creation_date={'query': old_date, 'range': 'max'}) #address = catalog.document_map.address_for_docid #import pdb; pdb.set_trace() for path, docid in catalog.document_map.address_to_docid.items(): metadata = [] #path = address(docid) if not path: log.error("No path for object with docid %s", docid) continue try: context = find_model(root, path) except: log.error("Error when fetching object at path: %s", path) continue if not context: log.error("No object at path: %s", path) continue for field in metadata_fields: metadata.append(field[1](context)) yield metadata
def searchresults(context, request): catalog = find_catalog(context) text = request.params.get('text') batch_size = int(request.params.get('batch_size', 20)) batch_start = int(request.params.get('batch_start', 0)) sort_index = request.params.get('sort_index', None) reverse = bool(request.params.get('reverse', False)) message = '' if text is not None: if text: try: numdocs, docids = catalog.search(sort_index=sort_index, reverse=reverse, text=text) docids = list(docids) except ParseError: numdocs, docids = 0, [] else: numdocs, docids = 0, [] message = 'Bad query' else: numdocs, docids = 0, [] i = 0 batch = [] if numdocs > 0: for docid in docids: i += 1 if i > batch_start+ batch_size: break if i < batch_start: continue path = catalog.document_map.address_for_docid(docid) md = dict(catalog.document_map.get_metadata(docid)) if path.startswith('sphinx:'): scheme, rest = path.split(':', 1) if text.lower() in md['text'].lower(): firstpos = md['text'].lower().find(text.lower()) else: firstpos = 0 start = firstpos -150 if start < 0: start = 0 teaser = '%s ...' % md['text'][start:start+300] md['url'] = rest md['teaser'] = teaser else: model = find_model(context, path) url = resource_url(model, request) md['url'] = url batch.append(md) def _batchURL(query, batch_start=0): query['batch_start'] = batch_start return resource_url(context, request, request.view_name, query=query) batch_info = {} previous_start = batch_start - batch_size if previous_start < 0: previous_batch_info = None else: previous_end = previous_start + batch_size if previous_end > numdocs: previous_end = numdocs size = previous_end - previous_start previous_batch_info = {} query = {'text':text, 'reverse':reverse, 'batch_size':batch_size} previous_batch_info['url'] = _batchURL(query, previous_start) previous_batch_info['name'] = ( 'Previous %s entries (%s - %s)' % (size, previous_start+1, previous_end)) batch_info['previous_batch'] = previous_batch_info next_start = batch_start + batch_size if next_start >= numdocs: next_batch_info = None else: next_end = next_start + batch_size if next_end > numdocs: next_end = numdocs size = next_end - next_start next_batch_info = {} query = {'text':text, 'reverse':reverse, 'batch_size':batch_size} next_batch_info['url'] = _batchURL(query, next_start) next_batch_info['name'] = ( 'Next %s entries (%s - %s of %s)' % (size, next_start+1, next_end, numdocs)) batch_info['next_batch'] = next_batch_info batch_info['batching_required'] = next_batch_info or previous_batch_info return dict( api = API(context, request), batch = batch, batch_info = batch_info, numdocs = numdocs, message = message, )
def _callFUT(self, context, name): from pyramid.traversal import find_model return find_model(context, name)