def active(http_request): """ Get a list of currently active browsers. Return value ~~~~~~~~~~~~ * active list (browser version identifiers) Each identifier consists of 4 parts joined with underscores: * platform string (e.g. linux / windows / mac-os) * browser string (e.g. firefox / msie / opera / safari) * major int (major version, e.g. 1) * minor int (minor version, e.g. 5) """ factories = Factory.objects.filter(last_poll__gte=last_poll_timeout()) preload_foreign_keys(factories, operating_system__platform=True) browsers = Browser.objects.filter(factory__in=factories, active=True) preload_foreign_keys(browsers, factory=factories) results = set() for browser in browsers: platform_name = browser.factory.operating_system.platform.name browser_name = browser.browser_group.name major = str(browser.major) minor = str(browser.minor) name = '_'.join((platform_name, browser_name, major, minor)) name = name.lower().replace(' ', '-') results.add(name) results = list(results) results.sort() return results
def overview(http_request): """ Show statistics about pending requests. """ requests = Request.objects.filter(screenshot__isnull=True, request_group__expire__gt=datetime.now()) browser_requests = {} platform_ids = set() browser_group_ids = set() for request in requests: browser = (request.platform_id, request.browser_group_id, request.major, request.minor) browser_requests[browser] = browser_requests.get(browser, 0) + 1 platform_ids.add(request.platform_id) browser_group_ids.add(request.browser_group_id) platforms = dict([(p.id, p) for p in Platform.objects.filter(id__in=platform_ids)]) browser_groups = dict([(b.id, b) for b in BrowserGroup.objects.filter(id__in=browser_group_ids)]) browsers = Browser.objects.filter(browser_group__in=browser_group_ids, uploads_per_day__gt=0) preload_foreign_keys(browsers, factory__operating_system=True) browser_list = [] for key in browser_requests.keys(): platform_id, browser_group_id, major, minor = key uploads_per_hour, uploads_per_day = count_uploads(browsers, platform_id, browser_group_id, major, minor) browser_list.append( { "platform": platforms[platform_id], "browser_group": browser_groups[browser_group_id], "major": major, "minor": minor, "uploads_per_hour": uploads_per_hour or "", "uploads_per_day": uploads_per_day or "", "pending_requests": browser_requests[key], } ) return render_to_response("requests/overview.html", locals(), context_instance=RequestContext(http_request))
def recent_screenshots(user=None): """ Iterator for the most recent screenshots, one per website. """ screenshots = list(Screenshot.objects.recent(user)) preload_foreign_keys(screenshots, website=True) preload_foreign_keys(screenshots, browser__browser_group=True) for screenshot in screenshots: if (hasattr(settings, 'PROFANITIES_ALLOWED') and screenshot.website.profanities > settings.PROFANITIES_ALLOWED): # Hide screenshots that are not safe for work continue if (screenshot.browser.browser_group.unusual or screenshot.browser.browser_group.terminal): # Find a better representative for this website better = Screenshot.objects.filter( website=screenshot.website_id, browser__browser_group__unusual=False, browser__browser_group__terminal=False, uploaded__gte=screenshot.uploaded - timedelta(minutes=30)) better = better.order_by('-uploaded')[:1] if len(better): website = screenshot._website_cache screenshot = better[0] screenshot._website_cache = website yield screenshot
def details(http_request, request_group_id): """ Show details about the selected request group. """ request_group = get_object_or_404(RequestGroup, id=request_group_id) now = datetime.now() elapsed = now - request_group.submitted elapsed = elapsed.seconds + elapsed.days * 24 * 3600 website = request_group.website matching_browsers = request_group.matching_browsers() browser_groups = BrowserGroup.objects.all() preload_foreign_keys(matching_browsers, browser_group=browser_groups) requests = request_group.request_set.all() preload_foreign_keys(requests, browser_group=browser_groups) platform_queue_estimates = [] for platform in Platform.objects.all(): estimates = [] for request in requests: if request.platform_id == platform.id: status = request.status() if request_group.expire < now and not status: status = _("expired") if not status: estimate = request.queue_estimate(matching_browsers) if estimate is None: status = _("unavailable") else: seconds = max(180, estimate - elapsed) minutes = (seconds + 30) / 60 status = _("%(minutes)d min") % locals() estimates.append({"browser": request.browser_string(), "status": status}) if estimates: estimates.sort() platform_queue_estimates.append((platform, estimates)) return render_to_response("requests/details.html", locals(), context_instance=RequestContext(http_request))
def details(http_request, url): """ Show details for a selected website. """ page = 1 if url.isdigit(): website = get_object_or_404(Website, id=int(url)) if 'page' in http_request.GET: page = int(http_request.GET['page']) else: if http_request.META['QUERY_STRING']: url += '?' + http_request.META['QUERY_STRING'] url = normalize_url(url) # Replace ' ' with '%20' etc. try: website = Website.objects.get(url=url) except Website.DoesNotExist: return unknown_url(http_request, url) # Use caching to reduce number of SQL queries domain = website.domain browsers = Browser.objects.all() preload_foreign_keys(browsers, browser_group=True) factories = Factory.objects.all() preload_foreign_keys(factories, operating_system=True) request_groups = list(website.requestgroup_set.all()) paginator = Paginator(request_groups, 5, orphans=2) if page < 1 or page > paginator.num_pages: raise Http404('Requested page out of range.') request_group_list = paginator.page(page).object_list pages_list = [] if paginator.num_pages > 1: for number in range(1, paginator.num_pages + 1): extra_classes = '' if page == number: extra_classes = ' current' pages_list.append( u'<a class="page%s" href="%s?page=%d">%d</a>' % ( extra_classes, website.get_numeric_url(), number, number)) user_has_priority = False if 'shotserver04.priority' in settings.INSTALLED_APPS: if not http_request.user.is_anonymous(): user_has_priority = http_request.user.userpriority_set.filter( expire__gte=datetime.now()).count() if not user_has_priority: website_details_head_extra = """ <p class="admonition new"> <a href="/priority/">Support the Browsershots project!<br /> Get a month of priority processing for 10 Euros or 15 Dollars.</a> </p> """.strip() for index, request_group in enumerate(request_groups): request_group._http_request = http_request request_group._index = len(request_groups) - index request_group._browsers_cache = browsers request_group._factories_cache = factories request_group._website_cache = website request_group._website_cache._domain_cache = domain # Get other websites on the same domain domain_website_list = domain.website_set.exclude(id=website.id) return render_to_response('websites/details.html', locals(), context_instance=RequestContext(http_request))
def previews(self): """ Thumbnails of screenshots for this request group. """ screenshots = [] requests = self.request_set.filter(screenshot__isnull=False) # Preload browsers and factories from cache. self.preload_cache() preload_foreign_keys(requests, screenshot__browser=self._browsers_cache) preload_foreign_keys(requests, screenshot__factory=self._factories_cache) # Get screenshots and sort by id. screenshots = [(request.screenshot_id, request.screenshot) for request in requests] if screenshots: total_bytes = sum([screenshot.bytes or 0 for index, screenshot in screenshots]) screenshots.sort() max_height = max([screenshot.height * 80 / screenshot.width for index, screenshot in screenshots]) result = [screenshot.preview_div(height=max_height, caption=True) for index, screenshot in screenshots] if len(screenshots) > 1: result.append(self.zip_link(len(screenshots), total_bytes)) return mark_safe('\n'.join(result)) elif self.is_pending(): return mark_safe( u'<p class="admonition hint">%s<br />\n%s</p>' % ( _("Your screenshots will appear here when they are uploaded."), bracket_link(self.website.get_absolute_url(), _("[Reload this page] or bookmark it and come back later.")))) else: hint = _(u"Your screenshot requests have expired.") return mark_safe(u'<p class="admonition warning">%s</p>' % hint)
def user_report(http_request, username): user = get_object_or_404(User, username=username) if user != http_request.user and not http_request.user.is_staff: return error_page(http_request, _("Access denied"), _("Only staff members can see this page.")) request_groups_list = user.requestgroup_set.order_by('-submitted')[:50] preload_foreign_keys(request_groups_list, website=True) return render_to_response('status/user_report.html', locals(), context_instance=RequestContext(http_request))
def preload_cache(self): """ Load database objects to save many SQL queries later. """ if not hasattr(self, '_browsers_cache'): self._browsers_cache = Browser.objects.all() preload_foreign_keys(self._browsers_cache, browser_group=True) if not hasattr(self, '_factories_cache'): self._factories_cache = Factory.objects.all() preload_foreign_keys(self._factories_cache, operating_system=True)
def matching_browsers(self): """ Get active browsers that are compatible with this request group. """ factories = self.matching_factories() browsers = Browser.objects.filter(factory__in=factories, active=True) result = [] for browser in browsers: if not satisfies(browser.java_id, self.java_id): continue if not satisfies(browser.javascript_id, self.javascript_id): continue if not satisfies(browser.flash_id, self.flash_id): continue result.append(browser) preload_foreign_keys(result, factory=factories) return result
def previous_websites(requests): """ Show only websites with most requests. """ requests = requests[:200] # Limit sort effort. preload_foreign_keys(requests, request_group__website=True) websites = set() for request in requests: website = request.request_group.website if hasattr(website, 'request_count'): website.request_count += 1 else: website.request_count = 0 websites.add(website) websites = list(websites) if len(websites) <= 1: return websites websites.sort(key=lambda website: -website.request_count) return websites[:10] # Show only the most useful results.
def matching_browsers(self): """ Get active browsers that are compatible with this request group. """ factories = self.matching_factories() browsers = Browser.objects.filter( factory__in=factories, active=True) result = [] for browser in browsers: if not satisfies(browser.java_id, self.java_id): continue if not satisfies(browser.javascript_id, self.javascript_id): continue if not satisfies(browser.flash_id, self.flash_id): continue result.append(browser) preload_foreign_keys(result, factory=factories) return result
def overview(http_request): """ Show statistics about pending requests. """ requests = Request.objects.filter(screenshot__isnull=True, request_group__expire__gt=datetime.now()) browser_requests = {} platform_ids = set() browser_group_ids = set() for request in requests: browser = (request.platform_id, request.browser_group_id, request.major, request.minor) browser_requests[browser] = browser_requests.get(browser, 0) + 1 platform_ids.add(request.platform_id) browser_group_ids.add(request.browser_group_id) platforms = dict([(p.id, p) for p in Platform.objects.filter(id__in=platform_ids)]) browser_groups = dict([ (b.id, b) for b in BrowserGroup.objects.filter(id__in=browser_group_ids) ]) browsers = Browser.objects.filter(browser_group__in=browser_group_ids, uploads_per_day__gt=0) preload_foreign_keys(browsers, factory__operating_system=True) browser_list = [] for key in browser_requests.keys(): platform_id, browser_group_id, major, minor = key uploads_per_hour, uploads_per_day = count_uploads( browsers, platform_id, browser_group_id, major, minor) browser_list.append({ 'platform': platforms[platform_id], 'browser_group': browser_groups[browser_group_id], 'major': major, 'minor': minor, 'uploads_per_hour': uploads_per_hour or '', 'uploads_per_day': uploads_per_day or '', 'pending_requests': browser_requests[key], }) return render_to_response('requests/overview.html', locals(), context_instance=RequestContext(http_request))
def overview(http_request): """ List recently requested websites, with keyword search filter. """ request_group_list = RequestGroup.objects search_query = http_request.GET.get('q', '') for search in search_query.split(): if search.islower(): # Case insensitive search request_group_list = request_group_list.filter( website__url__icontains=search) else: # Case sensitive search if mixed case in query request_group_list = request_group_list.filter( website__url__contains=search) if http_request.user.is_anonymous(): request_group_list = request_group_list.filter(user__isnull=True) else: request_group_list = request_group_list.filter(user=http_request.user) request_group_list = request_group_list.order_by('-submitted')[:60] preload_foreign_keys(request_group_list, website=True) return render_to_response('websites/overview.html', locals(), context_instance=RequestContext(http_request))
def download_zip(http_request, request_group_id): """ Output a ZIP file containing all screenshots in a request group. """ request_group = get_object_or_404(RequestGroup, id=request_group_id) requests = request_group.request_set.filter(screenshot__isnull=False) preload_foreign_keys(requests, screenshot=True) temp = tempfile.TemporaryFile() archive = zipfile.ZipFile(temp, 'w', zipfile.ZIP_STORED) for request in requests: filename = storage.png_filename(request.screenshot.hashkey) if not os.path.exists(filename): continue archive.write(filename, str(request.screenshot.png_filename())) archive.close() # Send result to browser response = HttpResponse(FileWrapper(temp), content_type='application/zip') response['Content-Disposition'] = 'attachment' # ; filename=screenshots.zip response['Content-Length'] = temp.tell() temp.seek(0) return response
def details(http_request, request_group_id): """ Show details about the selected request group. """ request_group = get_object_or_404(RequestGroup, id=request_group_id) now = datetime.now() elapsed = now - request_group.submitted elapsed = elapsed.seconds + elapsed.days * 24 * 3600 website = request_group.website matching_browsers = request_group.matching_browsers() browser_groups = BrowserGroup.objects.all() preload_foreign_keys(matching_browsers, browser_group=browser_groups) requests = request_group.request_set.all() preload_foreign_keys(requests, browser_group=browser_groups) platform_queue_estimates = [] for platform in Platform.objects.all(): estimates = [] for request in requests: if request.platform_id == platform.id: status = request.status() if request_group.expire < now and not status: status = _("expired") if not status: estimate = request.queue_estimate(matching_browsers) if estimate is None: status = _("unavailable") else: seconds = max(180, estimate - elapsed) minutes = (seconds + 30) / 60 status = _("%(minutes)d min") % locals() estimates.append({ 'browser': request.browser_string(), 'status': status, }) if estimates: estimates.sort() platform_queue_estimates.append((platform, estimates)) return render_to_response('requests/details.html', locals(), context_instance=RequestContext(http_request))
def previews(self): """ Thumbnails of screenshots for this request group. """ screenshots = [] requests = self.request_set.filter(screenshot__isnull=False) # Preload browsers and factories from cache. self.preload_cache() preload_foreign_keys(requests, screenshot__browser=self._browsers_cache) preload_foreign_keys(requests, screenshot__factory=self._factories_cache) # Get screenshots and sort by id. screenshots = [(request.screenshot_id, request.screenshot) for request in requests] if screenshots: total_bytes = sum( [screenshot.bytes or 0 for index, screenshot in screenshots]) screenshots.sort() max_height = max([ screenshot.height * 80 / screenshot.width for index, screenshot in screenshots ]) result = [ screenshot.preview_div(height=max_height, caption=True) for index, screenshot in screenshots ] if len(screenshots) > 1: result.append(self.zip_link(len(screenshots), total_bytes)) return mark_safe('\n'.join(result)) elif self.is_pending(): return mark_safe(u'<p class="admonition hint">%s<br />\n%s</p>' % ( _("Your screenshots will appear here when they are uploaded."), bracket_link( self.website.get_absolute_url(), _("[Reload this page] or bookmark it and come back later.") ))) else: hint = _(u"Your screenshot requests have expired.") return mark_safe(u'<p class="admonition warning">%s</p>' % hint)
def download_zip(http_request, request_group_id): """ Output a ZIP file containing all screenshots in a request group. """ request_group = get_object_or_404(RequestGroup, id=request_group_id) requests = request_group.request_set.filter(screenshot__isnull=False) preload_foreign_keys(requests, screenshot=True) temp = tempfile.TemporaryFile() archive = zipfile.ZipFile(temp, 'w', zipfile.ZIP_STORED) for request in requests: filename = storage.png_filename(request.screenshot.hashkey) if not os.path.exists(filename): continue archive.write(filename, str(request.screenshot.png_filename())) archive.close() # Send result to browser response = HttpResponse(FileWrapper(temp), content_type='application/zip') response[ 'Content-Disposition'] = 'attachment' # ; filename=screenshots.zip response['Content-Length'] = temp.tell() temp.seek(0) return response
def matching_factories(self): """ Get active factories that are compatible with this request group. """ factories = Factory.objects.filter(last_poll__gte=last_poll_timeout()) result = [] for factory in factories: if self.width and self.height: if not factory.supports_screen_size(self.width, self.height): continue elif self.width: if not factory.supports_screen_width(self.width): continue elif self.height: if not factory.supports_screen_height(self.height): continue if self.bits_per_pixel: if not factory.supports_color_depth(self.bits_per_pixel): continue result.append(factory) preload_foreign_keys(result, operating_system=True) return result
def matching_factories(self): """ Get active factories that are compatible with this request group. """ factories = Factory.objects.filter( last_poll__gte=last_poll_timeout()) result = [] for factory in factories: if self.width and self.height: if not factory.supports_screen_size(self.width, self.height): continue elif self.width: if not factory.supports_screen_width(self.width): continue elif self.height: if not factory.supports_screen_height(self.height): continue if self.bits_per_pixel: if not factory.supports_color_depth(self.bits_per_pixel): continue result.append(factory) preload_foreign_keys(result, operating_system=True) return result
def queue_estimate(self): """ One-line info for estimated remaining queue wait. """ now = datetime.now() if self.priority > 0: min_seconds = 60 max_seconds = 180 link = u'<a href="/priority/">%s</a>' % capfirst(_("priority")) else: self.preload_cache() browsers = self.matching_browsers() preload_foreign_keys(browsers, browser_group=True) requests = self.request_set.filter(screenshot__isnull=True) preload_foreign_keys(requests, browser_group=True) elapsed = now - self.submitted elapsed = elapsed.seconds + elapsed.days * 24 * 3600 estimates = [] for request in requests: estimate = request.queue_estimate(browsers) if estimate: estimates.append(estimate - elapsed) if not estimates: return '' min_seconds = max(180, min(estimates) + 30) max_seconds = max(180, max(estimates) + 30) link = u'<a href="%s">%s</a>' % (self.get_absolute_url(), capfirst(_("details"))) if min_seconds == max_seconds: estimate = timeuntil(now + timedelta(seconds=min_seconds)) else: min_interval = timeuntil(now + timedelta(seconds=min_seconds)) max_interval = timeuntil(now + timedelta(seconds=max_seconds)) estimate = _("%(min_interval)s to %(max_interval)s") % locals() return mark_safe(u'<li>%s: %s (%s)</li>' % (capfirst(_("queue estimate")), estimate, link))
def queue_estimate(self): """ One-line info for estimated remaining queue wait. """ now = datetime.now() if self.priority > 0: min_seconds = 60 max_seconds = 180 link = u'<a href="/priority/">%s</a>' % capfirst(_("priority")) else: self.preload_cache() browsers = self.matching_browsers() preload_foreign_keys(browsers, browser_group=True) requests = self.request_set.filter(screenshot__isnull=True) preload_foreign_keys(requests, browser_group=True) elapsed = now - self.submitted elapsed = elapsed.seconds + elapsed.days * 24 * 3600 estimates = [] for request in requests: estimate = request.queue_estimate(browsers) if estimate: estimates.append(estimate - elapsed) if not estimates: return '' min_seconds = max(180, min(estimates) + 30) max_seconds = max(180, max(estimates) + 30) link = u'<a href="%s">%s</a>' % ( self.get_absolute_url(), capfirst(_("details"))) if min_seconds == max_seconds: estimate = timeuntil(now + timedelta(seconds=min_seconds)) else: min_interval = timeuntil(now + timedelta(seconds=min_seconds)) max_interval = timeuntil(now + timedelta(seconds=max_seconds)) estimate = _("%(min_interval)s to %(max_interval)s") % locals() return mark_safe(u'<li>%s: %s (%s)</li>' % ( capfirst(_("queue estimate")), estimate, link))
def details(http_request, name): """ Get detailed information about a screenshot factory. """ factory = get_object_or_404(Factory, name=name) factory_form = EditFactoryForm( 'submit_details' in http_request.POST and http_request.POST or None, instance=factory) screensize_form = ScreenSizeForm( 'add_size' in http_request.POST and http_request.POST or None) colordepth_form = ColorDepthForm( 'add_depth' in http_request.POST and http_request.POST or None) if http_request.POST: response = details_post(http_request, factory, factory_form, screensize_form, colordepth_form) if response: return response result = http_request.GET.get('result', '') if '_factory_' in result: factory_result = results.message(result) admin_logged_in = http_request.user.id == factory.admin_id browser_list = factory.browser_set.all() if not admin_logged_in: browser_list = browser_list.filter(active=True) preload_foreign_keys(browser_list, browser_group=True, engine=True, javascript=True, java=True, flash=True) browser_list = list(browser_list) browser_list.sort(key=lambda browser: (unicode(browser), browser.id)) if '_browser_' in result: result_id = int(result.split('_')[-1]) highlight = results.filter(browser_list, result_id) browser_result = results.message(result, highlight) screensize_list = factory.screensize_set.all() if '_screen_size_' in result: result_id = result.split('_')[-1] highlight = results.filter(screensize_list, result_id) screen_size_result = results.message(result) colordepth_list = factory.colordepth_set.all() if '_color_depth_' in result: result_id = int(result.split('_')[-1]) highlight = results.filter(colordepth_list, lambda c: c.bits_per_pixel == result_id) color_depth_result = results.message(result, result_id) screenshot_list = factory.screenshot_set.all() if len(screenshot_list.order_by()[:1]): q = Q(user__isnull=True) if not http_request.user.is_anonymous(): q |= Q(user=http_request.user) if hasattr(settings, 'PROFANITIES_ALLOWED'): q &= Q(website__profanities__lte=settings.PROFANITIES_ALLOWED) screenshot_list = screenshot_list.filter(q) screenshot_list = screenshot_list.order_by('-id')[:10] else: screenshot_list = [] preload_foreign_keys(screenshot_list, browser=browser_list) show_commands = admin_logged_in and True in [ bool(browser.command) for browser in browser_list] problems_list = ProblemReport.objects.filter( screenshot__factory=factory)[:10] preload_foreign_keys(problems_list, screenshot=True) for problem in problems_list: problem.help_available = problem.code in CODE_HELP_AVAILABLE if '_problem_report_' in result: result_id = int(result.split('_')[-1]) highlight = results.filter(problems_list, result_id) problem_report_result = u' '.join(( _("Thanks for your feedback!"), _("The administrator of this screenshot factory will be notified."))) errors_list = factory.factoryerror_set.all()[:10] for error in errors_list: error.help_available = error.code in CODE_HELP_AVAILABLE return render_to_response('factories/details.html', locals(), context_instance=RequestContext(http_request))
def status(http_request, request_group_id): """ List the status of all screenshot requests in a group. Arguments ~~~~~~~~~ * id int (request group id from requests.submit) Return value ~~~~~~~~~~~~ * status list (request status for each requested browser) The list will contain a dictionary for each browser, with the following entries: * browser string (browser name) * status string (pending / starting / loading / uploaded / failed) * seconds int (estimated or actual time between request and upload) * hashkey string (after the screenshot is uploaded) You can use the hashkey to generate the resulting PNG file URL, for example if the hashkey is beef1234: * http://api.browsershots.org/png/original/be/beef1234.png * http://api.browsershots.org/png/512/be/beef1234.png * http://api.browsershots.org/png/160/be/beef1234.png The /be/ part is the first two characters of the hashkey. Normally, the hashkey consists of 32 random lowercase hex characters. """ try: request_group = RequestGroup.objects.get(id=request_group_id) except RequestGroup.DoesNotExist: raise Fault(404, "Request group not found.") results = [] matching_browsers = request_group.matching_browsers() requests = request_group.request_set.all() preload_foreign_keys(requests, browser_group=True, platform=True) this_lock_timeout = lock_timeout() for request in requests: platform_name = request.platform.name.lower() browser_name = request.browser_group.name.lower() major = str(request.major) minor = str(request.minor) name = '_'.join((platform_name, browser_name, major, minor)) name = name.replace(' ', '-') hashkey = '' if request.screenshot_id is not None: status = 'uploaded' seconds = (request.screenshot.uploaded - request_group.submitted).seconds hashkey = request.screenshot.hashkey elif request.locked and request.locked < this_lock_timeout: status = 'failed' seconds = 0 elif request.redirected: status = 'loading' seconds = (request.redirected - request_group.submitted).seconds + 45 elif request.locked: status = 'starting' seconds = (request.locked - request_group.submitted).seconds + 60 else: status = 'pending' seconds = request.queue_estimate(matching_browsers) or 0 results.append({'browser': name, 'status': status, 'seconds': seconds, 'hashkey': hashkey, }) return results
def status(http_request, request_group_id): """ List the status of all screenshot requests in a group. Arguments ~~~~~~~~~ * id int (request group id from requests.submit) Return value ~~~~~~~~~~~~ * status list (request status for each requested browser) The list will contain a dictionary for each browser, with the following entries: * browser string (browser name) * status string (pending / starting / loading / uploaded / failed) * seconds int (estimated or actual time between request and upload) * hashkey string (after the screenshot is uploaded) You can use the hashkey to generate the resulting PNG file URL, for example if the hashkey is beef1234: * http://api.browsershots.org/png/original/be/beef1234.png * http://api.browsershots.org/png/512/be/beef1234.png * http://api.browsershots.org/png/160/be/beef1234.png The /be/ part is the first two characters of the hashkey. Normally, the hashkey consists of 32 random lowercase hex characters. """ try: request_group = RequestGroup.objects.get(id=request_group_id) except RequestGroup.DoesNotExist: raise Fault(404, "Request group not found.") results = [] matching_browsers = request_group.matching_browsers() requests = request_group.request_set.all() preload_foreign_keys(requests, browser_group=True, platform=True) this_lock_timeout = lock_timeout() for request in requests: platform_name = request.platform.name.lower() browser_name = request.browser_group.name.lower() major = str(request.major) minor = str(request.minor) name = '_'.join((platform_name, browser_name, major, minor)) name = name.replace(' ', '-') hashkey = '' if request.screenshot_id is not None: status = 'uploaded' seconds = (request.screenshot.uploaded - request_group.submitted).seconds hashkey = request.screenshot.hashkey elif request.locked and request.locked < this_lock_timeout: status = 'failed' seconds = 0 elif request.redirected: status = 'loading' seconds = (request.redirected - request_group.submitted).seconds + 45 elif request.locked: status = 'starting' seconds = (request.locked - request_group.submitted).seconds + 60 else: status = 'pending' seconds = request.queue_estimate(matching_browsers) or 0 results.append({ 'browser': name, 'status': status, 'seconds': seconds, 'hashkey': hashkey, }) return results
def start(http_request): """ Front page with URL input, browser chooser, and options. """ if (http_request.user.is_anonymous() and hasattr(settings, 'ALLOW_ANONYMOUS_REQUESTS') and not settings.ALLOW_ANONYMOUS_REQUESTS): url = '/' if http_request.META['QUERY_STRING']: url += '?' + http_request.META['QUERY_STRING'] return error_page(http_request, _("login required"), _("Anonymous screenshot requests are not allowed."), u'<a href="/accounts/login/?next=%s">%s</a>' % ( urllib.quote(url.encode('utf-8')), _("Please log in with your username and password."))) # Initialize forms. post = http_request.POST or None url_form = UrlForm(post) features_form = FeaturesForm(post) options_form = OptionsForm(post) special_form = SpecialForm(post) # Get available choices from database, with correct translations. active_factories = Factory.objects.filter( last_poll__gte=last_poll_timeout()) active_browsers = Browser.objects.filter( factory__in=active_factories, active=True) if not active_browsers: return error_page(http_request, _("out of service"), _("No active screenshot factories."), _("Please try again later.")) features_form.load_choices(active_browsers) options_form.load_choices(active_factories) # Validate posted data. valid_post = (url_form.is_valid() and options_form.is_valid() and features_form.is_valid() and special_form.is_valid()) # Preload some database entries for browser forms preload_foreign_keys(active_browsers, factory=active_factories, factory__operating_system=True, browser_group=True, engine=True) # Select browsers according to GET request selected_browsers = None if 'browsers' in http_request.GET: selected_browsers = http_request.GET['browsers'].split() # Browser forms for each platform. browser_forms = [] for platform in Platform.objects.all(): browser_form = BrowsersForm(active_browsers, platform, post, selected_browsers) browser_form.platform_name = \ unicode(platform).lower().replace(' ', '-') if browser_form.is_bound: browser_form.full_clean() if browser_form.fields: browser_forms.append(browser_form) valid_post = valid_post and browser_form.is_valid() browser_forms[0].is_first = True browser_forms[-1].is_last = True priority = 0 if valid_post: if (url_form.cleaned_data['shocksite_keywords'] > settings.SHOCKSITE_KEYWORDS_ALLOWED): # Ignore screenshot requests for shock sites. priority = -url_form.cleaned_data['shocksite_keywords'] elif 'shotserver04.priority' in settings.INSTALLED_APPS: # Get priority processing for domain or user. from shotserver04.priority import domain_priority, user_priority priority = max(domain_priority(url_form.cleaned_data['domain']), user_priority(http_request.user)) usage_limited = check_usage_limits( http_request, priority, url_form.cleaned_data['website'], url_form.cleaned_data['domain']) if usage_limited: valid_post = False if not valid_post: # Show HTML form. if 'url' in http_request.GET: url_form.fields['url'].initial = http_request.GET['url'] multi_column(browser_forms) selectors = mark_safe(',\n'.join([ SELECTOR_TEMPLATE % (plus_minus, capfirst(text)) for text, plus_minus in selector_pairs(browser_forms)])) news_list = NewsItem.objects.all()[:10] sponsors_list = Sponsor.objects.filter(front_page=True) show_special_form = http_request.user.is_authenticated() return render_to_response('start/start.html', locals(), context_instance=RequestContext(http_request)) # Create screenshot requests and redirect to website overview. expire = datetime.now() + timedelta(minutes=30) values = { 'website': url_form.cleaned_data['website'], 'ip': http_request.META['REMOTE_ADDR'], 'user': None, } if http_request.user.is_authenticated(): values['user'] = http_request.user values.update(options_form.cleaned_data) values.update(features_form.cleaned_data) values.update(special_form.cleaned_data) match_values = {} for key in values: if values[key] is None: match_values[key + '__isnull'] = True else: match_values[key] = values[key] existing = RequestGroup.objects.filter( expire__gte=datetime.now(), **match_values).order_by('-submitted') if (len(existing) and existing[0].request_set.filter(screenshot__isnull=True).count()): # Previous request group is still pending, reuse it. request_group = existing[0] request_group.update_fields(expire=expire) if priority > request_group.priority: request_group.update_fields(priority=priority) else: request_group = RequestGroup.objects.create( expire=expire, priority=priority, **values) for browser_form in browser_forms: create_platform_requests( request_group, browser_form.platform, browser_form, priority) # Make sure that the redirect will show the new request group transaction.commit() # return render_to_response('debug.html', locals(), # context_instance=RequestContext(http_request)) return HttpResponseRedirect(values['website'].get_absolute_url())