def fetch_and_store(url): try: (title, text) = fetch_and_clean(url) except Exception: raise Http404(url) doc = SearchDocument() doc.url = url doc.title = title doc.text = text doc.save() return doc
text = doc.text except UnicodeDecodeError: raise except SearchDocument.DoesNotExist: pass except Exception, e: return HttpResponseServerError(str(e)) if not doc: if not text: return HttpResponseNotFound(str(url or uuid)) else: doc = SearchDocument() doc.text = text if title: doc.title = title if url: doc.url = url ua_string = request.META.get('HTTP_USER_AGENT') if ua_string is not None: doc.user_agent = ua_string[:255] doc.save() # The actual proxying: response = execute_search(doc, doctype) if isinstance(response, HttpResponse): return response
text = doc.text except UnicodeDecodeError: raise except SearchDocument.DoesNotExist: pass except Exception, e: return HttpResponseServerError(str(e)) if not doc: if not text: return HttpResponseNotFound(str(url or uuid)) else: doc = SearchDocument() doc.text = text if title: doc.title = title if url: doc.url = url ua_string = request.META.get('HTTP_USER_AGENT') if ua_string is not None: doc.user_agent = ua_string[:255] doc.save() # The actual proxying: response = execute_search(doc, doctype) if isinstance(response, HttpResponse): return response record_matches(doc, response)