def status(request): link = m.Link.objects.all().order_by('-created')[0] update = { 'wikipedia_url': link.wikipedia_page.url, 'wikipedia_page_title': link.wikipedia_page.title, 'target': link.target, 'website_name': link.website.name, 'website_url': link.website.url, 'created': rfc3339(link.created), 'linkypedia_url': 'http://' + request.get_host() + reverse("website_summary", args=[link.website.id]) } crawls = m.Crawl.objects.filter(finished=None).order_by('-started') if crawls.count() > 0: website = crawls[0].website crawl = { 'name': website.name, 'url': website.url, 'link': website.get_absolute_url() } update['current_crawl'] = crawl return HttpResponse(json.dumps(update, indent=2), mimetype='application/json')
def lookup(request): url = request.REQUEST.get('url', None) results = [] for link in m.Link.objects.filter(target=url): w = link.wikipedia_page result = { 'url': w.url, 'title': w.title, 'last_modified': rfc3339(w.last_modified) } results.append(result) return HttpResponse(json.dumps(results, indent=2), mimetype='application/json')
def status(request): link = m.Link.objects.all().order_by('-created')[0] update = { 'wikipedia_url': link.wikipedia_page.url, 'wikipedia_page_title': link.wikipedia_page.title, 'target': link.target, 'website_name': link.website.name, 'website_url': link.website.url, 'created': rfc3339(link.created), } crawls = m.Crawl.objects.filter(finished=None).order_by('-started') if crawls.count() > 0: website = crawls[0].website crawl = {'name': website.name, 'url': website.url, 'link': website.get_absolute_url()} update['current_crawl'] = crawl return HttpResponse(json.dumps(update, indent=2), mimetype='application/json')
def rfc3339_filter(d): return rfc3339(d)