def serve(request, project_name, path, document_root=None,override_permission=""): """ Serve static file for a given project. This is meant as a replacement for the inefficient debug only 'django.views.static.serve' way of serving files under /media urls. """ if document_root == None: document_root = settings.MEDIA_ROOT path = posixpath.normpath(unquote(path)) path = path.lstrip('/') newpath = '' for part in path.split('/'): if not part: # Strip empty path components. continue drive, part = os.path.splitdrive(part) head, part = os.path.split(part) if part in (os.curdir, os.pardir): # Strip '.' and '..' in path. continue newpath = os.path.join(newpath, part).replace('\\', '/') if newpath and path != newpath: return HttpResponseRedirect(newpath) fullpath = os.path.join(document_root,project_name, newpath) storage = DefaultStorage() if not storage.exists(fullpath): # On case sensitive filesystems you can have problems if the project # nameurl in the url is not exactly the same case as the filepath. # find the correct case for projectname then. # Also case sensitive file systems are weird. # who would ever want to have a folder 'data' and 'Data' contain # different files? projectlist = ComicSite.objects.filter(short_name=project_name) if projectlist == []: raise Http404(_("project '%s' does not exist" % project_name )) project_name = projectlist[0].short_name fullpath = os.path.join(document_root,project_name, newpath) if not storage.exists(fullpath): raise Http404(_('"%(path)s" does not exist') % {'path': fullpath}) if can_access(request.user,path,project_name,override_permission): f = storage.open(fullpath, 'rb') file = File(f) # create django file object # Do not offer to save images, but show them directly return serve_file(request, file, save_as=True) else: return HttpResponseForbidden("This file is not available without " "credentials")
def serve(request, challenge_short_name, path, document_root=None): """ Serve static file for a given project. This is meant as a replacement for the inefficient debug only 'django.views.static.serve' way of serving files under /media urls. """ if document_root is None: document_root = settings.MEDIA_ROOT path = posixpath.normpath(unquote(path)) path = path.lstrip('/') newpath = '' for part in path.split('/'): if not part: # Strip empty path components. continue drive, part = os.path.splitdrive(part) head, part = os.path.split(part) if part in (os.curdir, os.pardir): # Strip '.' and '..' in path. continue newpath = os.path.join(newpath, part).replace('\\', '/') if newpath and path != newpath: return HttpResponseRedirect(newpath) fullpath = os.path.join(document_root, challenge_short_name, newpath) storage = DefaultStorage() if not storage.exists(fullpath): # On case sensitive filesystems you can have problems if the project # nameurl in the url is not exactly the same case as the filepath. # find the correct case for projectname then. projectlist = Challenge.objects.filter( short_name__iexact=challenge_short_name) if not projectlist: raise Http404("project '%s' does not exist" % challenge_short_name) challenge_short_name = projectlist[0].short_name fullpath = os.path.join(document_root, challenge_short_name, newpath) if not storage.exists(fullpath): raise Http404('"%(path)s" does not exist' % {'path': fullpath}) if can_access(request.user, path, challenge_short_name): try: f = storage.open(fullpath, 'rb') file = File(f) # create django file object except IOError: return HttpResponseForbidden("This is not a file") # Do not offer to save images, but show them directly return serve_file(file, save_as=True) else: return HttpResponseForbidden("This file is not available without " "credentials")
def get(self, request, **kwargs): entity_id = kwargs.get('entity_id') current_object = self.get_object(entity_id) if current_object is None and self.slugToEntityIdRedirect and getattr( request, 'version', 'v1') == 'v2': return self.get_slug_to_entity_id_redirect( kwargs.get('entity_id', None)) elif current_object is None: return Response(status=status.HTTP_404_NOT_FOUND) image_prop = getattr(current_object, self.prop) if not bool(image_prop): return Response(status=status.HTTP_404_NOT_FOUND) image_type = request.query_params.get('type', 'original') if image_type not in ['original', 'png']: raise ValidationError(u"invalid image type: {}".format(image_type)) image_url = image_prop.url filename, ext = os.path.splitext(image_prop.name) basename = os.path.basename(filename) dirname = os.path.dirname(filename) version_suffix = getattr(settings, 'CAIROSVG_VERSION_SUFFIX', '1') new_name = '{dirname}/converted{version}/{basename}.png'.format( dirname=dirname, basename=basename, version=version_suffix) storage = DefaultStorage() if image_type == 'original': image_url = image_prop.url elif image_type == 'png' and ext == '.svg': if not storage.exists(new_name): with storage.open(image_prop.name, 'rb') as input_svg: svg_buf = StringIO.StringIO() out_buf = StringIO.StringIO() cairosvg.svg2png(file_obj=input_svg, write_to=svg_buf) img = Image.open(svg_buf) img.thumbnail((400, 400)) img.save(out_buf, format=image_type) storage.save(new_name, out_buf) image_url = storage.url(new_name) elif ext != '.png': # attempt to use PIL to do desired image conversion if not storage.exists(new_name): with storage.open(image_prop.name, 'rb') as input_svg: out_buf = StringIO.StringIO() img = Image.open(input_svg) img.save(out_buf, format=image_type) storage.save(new_name, out_buf) image_url = storage.url(new_name) return redirect(image_url)
class StorageWrapper: def __init__(self): self.storage = DefaultStorage() def save(self, filename: str, file_like): return self.storage.save(filename, file_like) def list(self, ext: str = None): _, files = self.storage.listdir("/") if ext: files = [f for f in files if f.endswith(ext)] return files def exists(self, filename: str): return self.storage.exists(filename) def find(self, filename: str) -> Optional[str]: for file_ in self.list(): if file_ == filename: return file_ return None def write_to(self, filename: str, callback: Any): f = self.storage.open(filename) callback(f.read()) f.close() def remove(self, filename: str): self.storage.delete(filename)
def cached_cover_url(self): if self.cover_url: digest = md5(self.cover_url).hexdigest() store = DefaultStorage() storage_name = "imdb/image/{}".format(digest) if store.exists(storage_name): return "{}{}".format(settings.MEDIA_URL, storage_name) return reverse('movie_cover', kwargs={'movieID': self.imdb_id})
def cached_large_cover_url(self): if self.large_cover_url: digest = md5(self.large_cover_url).hexdigest() store = DefaultStorage() storage_name = "imdb/image/{}".format(digest) if store.exists(storage_name): return "{}{}".format(settings.MEDIA_URL, storage_name) return "{}?size=full".format(reverse('movie_cover', kwargs={'movieID': self.imdb_id}))
def download_image(self, image_url): store = DefaultStorage() storage_name = 'imdb/image/{}'.format(md5(image_url).hexdigest()) if not store.exists(storage_name): r = requests.get(image_url, stream=True) if r.status_code == 200: r.raw.decode_content = True store.save(storage_name, r.raw) return storage_name
def handle(self, *args, **options): # save the placeholder image to storage if needed store = DefaultStorage() placeholder_storage_name = "placeholder/badge-failed.svg" if not store.exists(placeholder_storage_name): with open(os.path.join(TOP_DIR, 'apps', 'mainsite', 'static', 'badgr-ui', 'images', 'badge-failed.svg'), 'r') as fh: store.save(placeholder_storage_name, fh) report = { 'total': 0, 'saved': 0, 'placeholders_saved': 0, 'status_codes': {}, 'ioerrors': [], 'no_image_url': [], 'json_error': [] } badgeclasses_missing_images = BadgeClass.objects.filter(image='') report['total'] = len(badgeclasses_missing_images) self.stdout.write("Processing {} badgeclasses missing images...".format(report['total'])) for badgeclass in badgeclasses_missing_images: try: original_json = json.loads(badgeclass.original_json) except ValueError: report['json_error'].append(badgeclass.pk) else: remote_image_url = original_json.get('image', None) if remote_image_url: try: status_code, image = fetch_remote_file_to_storage(remote_image_url, upload_to=badgeclass.image.field.upload_to) except IOError as e: self.stdout.write("IOError fetching '{}': {}".format(remote_image_url, str(e))) report['ioerrors'].append((remote_image_url, str(e))) else: report['status_codes'][status_code] = report['status_codes'].get(status_code, []) + [remote_image_url] if status_code == 200: badgeclass.image = image badgeclass.save() report['saved'] += 1 self.stdout.write("Saved missing image for badgeclass(pk={}) from '{}'".format(badgeclass.pk, remote_image_url)) continue # shortcircuit failure handling at end of loop else: self.stdout.write("Http error fetching '{}': {}".format(remote_image_url, status_code)) else: report['no_image_url'].append(badgeclass.pk) self.stdout.write("Unable to determine an image url for badgeclass(pk={})".format(badgeclass.pk)) # all errors should fall through to here if not badgeclass.image: report['placeholders_saved'] += 1 badgeclass.image = placeholder_storage_name badgeclass.save() self.stdout.write(json.dumps(report, indent=2))
def diagnostics(request): from django.conf import settings from post_office import mail ping_socket_url = ( request.build_absolute_uri(f'{reverse("tracker:index_all")}ws/ping/') .replace('https:', 'wss:') .replace('http:', 'ws:') ) celery_socket_url = ( request.build_absolute_uri(f'{reverse("tracker:index_all")}ws/celery/') .replace('https:', 'wss:') .replace('http:', 'ws:') ) if request.method == 'POST': test_email_form = TestEmailForm(data=request.POST) if test_email_form.is_valid(): mail.send( [test_email_form.cleaned_data['email']], f'webmaster@{request.get_host().split(":")[0]}', subject='Test Email', message='If you got this, email is set up correctly.', ) messages.info( request, 'Test email queued. Check Post Office models for status.' ) else: test_email_form = TestEmailForm() try: storage = DefaultStorage() output = storage.save(f'testfile_{int(time.time())}', BytesIO(b'test file')) storage.open(output).read() assert storage.exists(output) storage.delete(output) storage_works = True except Exception as e: storage_works = e return render( request, 'admin/tracker/diagnostics.html', { 'is_secure': request.is_secure(), 'test_email_form': test_email_form, 'ping_socket_url': ping_socket_url, 'celery_socket_url': celery_socket_url, 'storage_works': storage_works, 'HAS_CELERY': getattr(settings, 'HAS_CELERY', False), }, )
def serve_fullpath(*, fullpath): storage = DefaultStorage() if not (os.path.abspath(fullpath) == fullpath) or not storage.exists(fullpath): raise Http404("File not found.") try: f = storage.open(fullpath, "rb") file = File(f) return serve_file(file, save_as=True) except IOError: raise Http404("File not found.")
def image_view(request, path=None, options=None): if not path: raise Http404('No path provided') # Grab the default storage, to build the URL storage = DefaultStorage() # Optionaly check if the file exists in the storage # Depending on your storage class, this might not be implemented or performs something outrageous like loading # the entire file into memory if getattr(settings, 'RSZIO_CHECK_EXISTS', False) and not storage.exists(path): raise Http404('Image not found in storage') # Get the full URL for the image original_url = storage.url(path) # Use urllip to pull out the host and path parsed_url = urlparse(original_url) # Build the URL url = 'https://rsz.io/{host}{path}'.format( host=parsed_url.hostname, path=parsed_url.path, ) # Build the rsz.io parameters try: parameters = process_options(options) except: # KISS: if invalid parameters are passed, raise a 404 raise Http404('Invalid rsz.io options') # Grab the image rszio_response = requests.get(url, parameters) # Return buffer_image = BytesIO(rszio_response.content) buffer_image.seek(0) response = HttpResponse( buffer_image, content_type=rszio_response.headers['content-type']) # Set cache headers if hasattr(settings, 'RSZIO_CACHE_CONTROL'): try: response['Cache-Control'] = 'max-age={}'.format( int(settings.RSZIO_CACHE_CONTROL)) except: response['Cache-Control'] = settings.RSZIO_CACHE_CONTROL return response
def serve_fullpath(*, fullpath): storage = DefaultStorage() if not (os.path.abspath(fullpath) == fullpath) or not storage.exists( fullpath ): raise Http404("File not found.") try: f = storage.open(fullpath, "rb") file = File(f) return serve_file(file, save_as=True) except IOError: raise Http404("File not found.")
def fetch_remote_file_to_storage(remote_url, upload_to=''): """ Fetches a remote url, and stores it in DefaultStorage :return: (status_code, new_storage_name) """ store = DefaultStorage() r = requests.get(remote_url, stream=True) if r.status_code == 200: name, ext = os.path.splitext(urllib.parse.urlparse(r.url).path) storage_name = '{upload_to}/cached/{filename}{ext}'.format( upload_to=upload_to, filename=hashlib.md5(remote_url.encode()).hexdigest(), ext=ext) if not store.exists(storage_name): buf = io.BytesIO(r.content) store.save(storage_name, buf) return r.status_code, storage_name return r.status_code, None
def fetch_remote_file_to_storage(remote_url, upload_to=''): """ Fetches a remote url, and stores it in DefaultStorage :return: (status_code, new_storage_name) """ store = DefaultStorage() r = requests.get(remote_url, stream=True) if r.status_code == 200: name, ext = os.path.splitext(urlparse.urlparse(r.url).path) storage_name = '{upload_to}/cached/{filename}{ext}'.format( upload_to=upload_to, filename=hashlib.md5(remote_url).hexdigest(), ext=ext) if not store.exists(storage_name): buf = StringIO.StringIO(r.content) store.save(storage_name, buf) return r.status_code, storage_name return r.status_code, None
def handle(self, *args, **options): dry_run = options.pop('dry_run', False) storage = DefaultStorage() for image_file in ImageFile.objects.all(): if storage.exists(image_file.photo.name): continue print(f'{image_file.photo.name} not found, remove imagefile {image_file.id}') for image_to_file in ImageToFile.objects.filter(file=image_file): try: print(f'delete image {image_to_file.image.id}') if not dry_run: image_to_file.image.delete() except Image.DoesNotExist: pass if not dry_run: image_file.delete()
def get_previous_result_data(self): storage = DefaultStorage() result_json_filename = os.getenv(self.settings.input_filename_env, None) result_json_value = ( {} if result_json_filename else json.loads(os.getenv(self.settings.input_json_env, "{}")) ) if not result_json_filename and not result_json_value: raise ImproperlyConfigured( "Either {} (result file path/name) or {} (raw JSON value) need to be set".format( self.settings.input_filename_env, self.settings.input_json_env ) ) # If the update JSON is specified in an env var instead of a file, return that right away if result_json_value and not result_json_filename: # If raw result output was provided, just return the "results" portion if "results" in result_json_value and "updated" in result_json_value["results"]: return result_json_value["results"] return result_json_value # Try to find the result at one of several paths based on the value of the filename env var possible_xpro_result_file_paths = [ result_json_filename, join_path(RESULT_JSON_DIR_PATH, result_json_filename), join_path( RESULT_JSON_DIR_PATH, "{}.json".format(result_json_filename), ), ] for path in possible_xpro_result_file_paths: if storage.exists(path): with storage.open(path) as f: # If the file is found, parse the JSON and return return json.loads(f.read()) raise ImproperlyConfigured( "Could not find an xPro result JSON file at any of these paths: {}\n(env var {}={})".format( str(possible_xpro_result_file_paths), self.settings.input_filename_env, result_json_filename, ) )
def get_or_create_image_file(stream) -> ImageFile: logger.debug('begin save_image_file') stream.seek(0) s = sha1() if hasattr(stream, 'chunks'): for chunk in stream.chunks(): s.update(chunk) else: s.update(stream.read()) sha1_hash = s.hexdigest() storage = DefaultStorage() try: image_file = ImageFile.objects.get(sha1=sha1_hash) if not storage.exists(image_file.photo.name): stream.seek(0) storage.save(image_file.photo.name, stream) except ImageFile.DoesNotExist: stream.seek(0) try: image = PImage.open(stream) except (UnidentifiedImageError, OSError): raise InvalidImageFile() image_file = ImageFile() image_file.sha1 = sha1_hash image_file.width = image.width image_file.height = image.height image_file_ext = '.' + FORMAT_EXT[image.format] if image.format else '' image_file.photo.name = '%s/%s/%s%s' % (sha1_hash[0:2], sha1_hash[2:4], sha1_hash[4:], image_file_ext) image_file.format = image.format stream.seek(0, 2) image_file.file_size = stream.tell() stream.seek(0) storage.save(image_file.photo.name, stream) if hasattr(stream, 'name'): image_file.origin_filename = stream.name image_file.save() return image_file
def handle(self, *args, **options) -> None: no_delete = options['no_delete'] contents = self.make_file_contents() storage = DefaultStorage() self.stdout.write( f'Saving test file via {settings.DEFAULT_FILE_STORAGE}...\n') filename = storage.save('storetestfile_test_file.txt', BytesIO(contents)) self.stdout.write(f'Test file saved as "{filename}".\n') try: self.stdout.write('Validating test file...\n') assert storage.exists(filename) self.stdout.write('Test file exists.\n') with storage.open(filename) as f: assert f.read() == contents self.stdout.write('Test file contents are valid.\n') finally: if no_delete: self.stdout.write(f'Please delete "{filename}" manually.') else: self.stdout.write('Deleting test file...\n') storage.delete(filename) self.stdout.write('Done!\n')
def get(self, request, **kwargs): entity_id = kwargs.get('entity_id') current_object = self.get_object(entity_id) if current_object is None and self.slugToEntityIdRedirect and getattr( request, 'version', 'v1') == 'v2': return self.get_slug_to_entity_id_redirect( kwargs.get('entity_id', None)) elif current_object is None: return Response(status=status.HTTP_404_NOT_FOUND) image_prop = getattr(current_object, self.prop) if not bool(image_prop): return Response(status=status.HTTP_404_NOT_FOUND) image_type = request.query_params.get('type', 'original') if image_type not in ['original', 'png']: raise ValidationError("invalid image type: {}".format(image_type)) supported_fmts = {'square': (1, 1), 'wide': (1.91, 1)} image_fmt = request.query_params.get('fmt', 'square').lower() if image_fmt not in list(supported_fmts.keys()): raise ValidationError("invalid image format: {}".format(image_fmt)) image_url = image_prop.url filename, ext = os.path.splitext(image_prop.name) basename = os.path.basename(filename) dirname = os.path.dirname(filename) version_suffix = getattr(settings, 'CAIROSVG_VERSION_SUFFIX', '1') new_name = '{dirname}/converted{version}/{basename}{fmt_suffix}.png'.format( dirname=dirname, basename=basename, version=version_suffix, fmt_suffix="-{}".format(image_fmt) if image_fmt != 'square' else "") storage = DefaultStorage() if image_type == 'original' and image_fmt == 'square': image_url = image_prop.url elif ext == '.svg': if not storage.exists(new_name): png_buf = None with storage.open(image_prop.name, 'rb') as input_svg: if getattr(settings, 'SVG_HTTP_CONVERSION_ENABLED', False): max_square = getattr(settings, 'IMAGE_FIELD_MAX_PX', 400) png_buf = convert_svg_to_png(input_svg.read(), max_square, max_square) # If conversion using the HTTP service fails, try falling back to python solution if not png_buf: png_buf = io.BytesIO() input_svg.seek(0) try: cairosvg.svg2png(file_obj=input_svg, write_to=png_buf) except IOError: return redirect( storage.url(image_prop.name) ) # If conversion fails, return existing file. img = Image.open(png_buf) img = fit_image_to_height(img, supported_fmts[image_fmt]) out_buf = io.BytesIO() img.save(out_buf, format='png') storage.save(new_name, out_buf) image_url = storage.url(new_name) else: if not storage.exists(new_name): with storage.open(image_prop.name, 'rb') as input_png: out_buf = io.BytesIO() # height and width set to the Height and Width of the original badge img = Image.open(input_png) img = fit_image_to_height(img, supported_fmts[image_fmt]) img.save(out_buf, format='png') storage.save(new_name, out_buf) image_url = storage.url(new_name) return redirect(image_url)
def localfileexists(self): storage = DefaultStorage() return storage.exists(self.file.path)
def handle(self, *args, **options): files = UniqueFile.objects.exclude(remote_id__isnull=True).exclude( remote_id=0).filter(type__in=["application"], ext=".pdf") # print [(f.filename, f.questions.all()) for f in files] """ for q in Question.objects.all(): if q.video and not q.video.remote_id: print "Storing remote ID for %s" % q.title text = q.text try: q.drupal.pull() except RESTError, e: print "Unable to pull file" print q.drupal.api.response q.text = text q.save() """ # for f in UniqueFile.objects.exclude(remote_id__isnull=True).exclude( # remote_id=0).filter(type__in=["video", "image"]): # f.s3d=False # f.save() for f in UniqueFile.objects.exclude(remote_id__isnull=True).exclude( remote_id=0).filter(type__in=["application"], ext=".pdf"): s3storage = DefaultStorage() lstorage = UniqueFile.storage if s3storage.exists("sources/%s" % f.filename): # print "File already exists on S3" # f.s3d=True # f.save() continue try: lpath = f.local_paths[0] except IndexError: qns = list(f.questions.all()) qs = [str(q) for q in qns] if f and qs: print[(l.title, [m.title for m in l.modules.all()]) for l in q.lessons.all()] print "No local path for %s in %s" % (f.checksum, ",".join(qs)) print else: print "Orphan file: %s" % f continue try: with lstorage.open(lpath, 'rb') as lofl: data = lofl.read() except IOError: qs = [str(q) for q in f.questions.all()] if qs: print "Error reading local file for %s in %s" % ( f.checksum, qs) print "-- Trying to download ..." f.drupal.pull() f.save() try: with lstorage.open(lpath, 'rb') as lofl: data = lofl.read() except IOError: print "-- Still no good ... abort." continue else: print "Error reading orphan file (WTF) %s" % f.checksum continue with s3storage.open("sources/%s" % f.filename, 'wb') as s3fl: qs = [str(q) for q in f.questions.all()] print "Writing file %s in %s" % (f.checksum, qs) s3fl.write(data)
class TestAPI(TmpMediaRootMixin, UK2015ExamplesMixin, WebTest): def setUp(self): super().setUp() person_extra = PersonExtraFactory.create(base__id="2009", base__name="Tessa Jowell") dulwich_not_stand = PersonExtraFactory.create(base__id="4322", base__name="Helen Hayes") edinburgh_candidate = PersonExtraFactory.create( base__id="818", base__name="Sheila Gilmore") edinburgh_winner = PersonExtraFactory.create( base__id="5795", base__name="Tommy Sheppard") edinburgh_may_stand = PersonExtraFactory.create( base__id="5163", base__name="Peter McColl") MembershipFactory.create( person=person_extra.base, post=self.dulwich_post_extra.base, on_behalf_of=self.labour_party_extra.base, post_election=self.dulwich_post_extra_pee, ) MembershipFactory.create( person=person_extra.base, organization=self.labour_party_extra.base, post_election=self.edinburgh_east_post_extra_pee, ) MembershipFactory.create( person=dulwich_not_stand.base, post=self.dulwich_post_extra.base, on_behalf_of=self.labour_party_extra.base, post_election=self.dulwich_post_extra_pee_earlier, ) dulwich_not_stand.not_standing.add(self.election) MembershipFactory.create( person=edinburgh_winner.base, post=self.edinburgh_east_post_extra.base, on_behalf_of=self.labour_party_extra.base, elected=True, post_election=self.edinburgh_east_post_extra_pee, ) MembershipFactory.create( person=edinburgh_candidate.base, post=self.edinburgh_east_post_extra.base, on_behalf_of=self.labour_party_extra.base, post_election=self.edinburgh_east_post_extra_pee, ) MembershipFactory.create( person=edinburgh_may_stand.base, post=self.edinburgh_east_post_extra.base, on_behalf_of=self.labour_party_extra.base, post_election=self.edinburgh_east_post_extra_pee_earlier, ) self.storage = DefaultStorage() def test_api_basic_response(self): response = self.app.get("/api/v0.9/") self.assertEqual(response.status_code, 200) json = response.json self.assertEqual(json["persons"], "http://localhost:80/api/v0.9/persons/") self.assertEqual(json["organizations"], "http://localhost:80/api/v0.9/organizations/") self.assertEqual(json["elections"], "http://localhost:80/api/v0.9/elections/") self.assertEqual(json["posts"], "http://localhost:80/api/v0.9/posts/") persons_resp = self.app.get("/api/v0.9/persons/") self.assertEqual(persons_resp.status_code, 200) organizations_resp = self.app.get("/api/v0.9/organizations/") self.assertEqual(organizations_resp.status_code, 200) elections_resp = self.app.get("/api/v0.9/elections/") self.assertEqual(elections_resp.status_code, 200) posts_resp = self.app.get("/api/v0.9/posts/") self.assertEqual(posts_resp.status_code, 200) def test_api_errors(self): response = self.app.get("/api/", expect_errors=True) self.assertEqual(response.status_code, 404) response = self.app.get("/api/v0.8", expect_errors=True) self.assertEqual(response.status_code, 404) response = self.app.get("/api/v0.9/person/", expect_errors=True) self.assertEqual(response.status_code, 404) response = self.app.get("/api/v0.9/persons/4000/", expect_errors=True) self.assertEqual(response.status_code, 404) response = self.app.post("/api/v0.9/persons/", {}, expect_errors=True) self.assertEqual(response.status_code, 403) def test_api_persons(self): persons_resp = self.app.get("/api/v0.9/persons/") persons = persons_resp.json self.assertEqual(persons["count"], len(persons["results"])) self.assertEqual(persons["count"], 5) def test_api_person(self): person_resp = self.app.get("/api/v0.9/persons/2009/") self.assertEqual(person_resp.status_code, 200) person = person_resp.json self.assertEqual(person["id"], 2009) self.assertEqual(person["name"], "Tessa Jowell") memberships = sorted(person["memberships"], key=lambda m: m["role"]) self.assertEqual(len(memberships), 2) self.assertEqual(memberships[1]["role"], "Candidate") self.assertEqual(len(person["versions"]), 0) def test_api_organizations(self): organizations_resp = self.app.get("/api/v0.9/organizations/") organizations = organizations_resp.json self.assertEqual(organizations["count"], len(organizations["results"])) self.assertEqual(organizations["count"], 7) def test_api_organization(self): organizations_resp = self.app.get("/api/v0.9/organizations/") organizations = organizations_resp.json organization_url = None for organization in organizations["results"]: if organization["id"] == "party:53": organization_url = organization["url"] break organization_resp = self.app.get(organization_url) self.assertEqual(organization_resp.status_code, 200) organization = organization_resp.json self.assertEqual(organization["id"], "party:53") self.assertEqual(organization["name"], "Labour Party") def test_api_elections(self): elections_resp = self.app.get("/api/v0.9/elections/") elections = elections_resp.json self.assertEqual(elections["count"], len(elections["results"])) self.assertEqual(elections["count"], 3) def test_api_election(self): elections_resp = self.app.get("/api/v0.9/elections/") elections = elections_resp.json election_url = None for election in elections["results"]: if election["id"] == "2015": election_url = election["url"] break election_resp = self.app.get(election_url) self.assertEqual(election_resp.status_code, 200) election = election_resp.json self.assertEqual(election["id"], "2015") self.assertEqual(election["name"], "2015 General Election") def test_api_posts(self): posts_resp = self.app.get("/api/v0.9/posts/") posts = posts_resp.json self.assertEqual(posts["count"], len(posts["results"])) self.assertEqual(posts["count"], 5) def test_api_post(self): posts_resp = self.app.get("/api/v0.9/posts/") posts = posts_resp.json post_url = None for post in posts["results"]: if post["id"] == "65808": post_url = post["url"] break self.assertTrue(post_url) post_resp = self.app.get(post_url) self.assertEqual(post_resp.status_code, 200) post = post_resp.json self.assertEqual(post["id"], "65808") self.assertEqual(post["label"], "Member of Parliament for Dulwich and West Norwood") def test_api_person_redirects(self): PersonRedirect.objects.create(old_person_id="1234", new_person_id="42") PersonRedirect.objects.create(old_person_id="5678", new_person_id="12") person_redirects_resp = self.app.get("/api/v0.9/person_redirects/") person_redirects = person_redirects_resp.json self.assertEqual(person_redirects["results"][0]["old_person_id"], 1234) self.assertEqual(person_redirects["results"][0]["new_person_id"], 42) self.assertEqual(person_redirects["results"][1]["old_person_id"], 5678) self.assertEqual(person_redirects["results"][1]["new_person_id"], 12) def test_api_person_redirect(self): PersonRedirect.objects.create(old_person_id="1234", new_person_id="42") url = "/api/v0.9/person_redirects/1234/" person_redirect_resp = self.app.get(url) person_redirect = person_redirect_resp.json self.assertEqual(person_redirect["old_person_id"], 1234) self.assertEqual(person_redirect["new_person_id"], 42) def test_api_version_info(self): version_resp = self.app.get("/version.json") self.assertEqual(version_resp.status_code, 200) info = version_resp.json self.assertEqual(info["users_who_have_edited"], 0) self.assertEqual(info["interesting_user_actions"], 0) LoggedAction.objects.create(action_type="set-candidate-not-elected") LoggedAction.objects.create(action_type="edit-candidate") version_resp = self.app.get("/version.json") info = version_resp.json self.assertEqual(info["interesting_user_actions"], 1) def test_api_cors_headers(self): resp = self.app.get("/api/v0.9/", headers={"Origin": b"http://example.com"}) self.assertTrue("Access-Control-Allow-Origin" in resp.headers) self.assertEqual(resp.headers["Access-Control-Allow-Origin"], "*") resp = self.app.get("/", headers={"Origin": b"http://example.com"}) self.assertFalse("Access-Control-Allow-Origin" in resp.headers) def test_api_jsonp_response(self): response = self.app.get("/api/v0.9/?format=jsonp&callback=test") self.assertEqual(response.status_code, 200) self.assertTrue(response.text.startswith("test(")) @patch( "candidates.management.commands.candidates_cache_api_to_directory.datetime" ) def test_persons_api_to_directory(self, mock_datetime): # current # timestamped # timestamped mock_datetime.now.return_value = datetime(2017, 5, 14, 12, 33, 5, 0) target_directory = settings.MEDIA_ROOT call_command( "candidates_cache_api_to_directory", page_size="3", url_prefix="https://example.com/media/api-cache-for-wcivf/", ) expected_leafname = "2017-05-14T12:33:05" expected_timestamped_directory = join(settings.MEDIA_ROOT, "cached-api", expected_leafname) expected_path = join("cached-api", "latest") self.assertTrue(self.storage.exists(expected_timestamped_directory)) self.assertTrue(self.storage.exists(expected_path)) # Check that the files in that directory are as expected: entries = self.storage.listdir(expected_timestamped_directory)[1] persons_1_leafname = "persons-000001.json" persons_2_leafname = "persons-000002.json" posts_1_leafname = "posts-000001.json" posts_2_leafname = "posts-000002.json" self.assertEqual( set(entries), { persons_1_leafname, persons_2_leafname, posts_1_leafname, posts_2_leafname, }, ) # Get the data from those pages: with self.storage.open( join(expected_timestamped_directory, persons_1_leafname)) as f: persons_1_data = json.loads(f.read().decode('utf8')) with self.storage.open( join(expected_timestamped_directory, persons_2_leafname)) as f: persons_2_data = json.loads(f.read().decode('utf8')) with self.storage.open( join(expected_timestamped_directory, posts_1_leafname)) as f: posts_1_data = json.loads(f.read().decode('utf8')) with self.storage.open( join(expected_timestamped_directory, posts_2_leafname)) as f: posts_2_data = json.loads(f.read().decode('utf8')) # Check the previous and next links are as we expect: self.assertEqual( persons_1_data["next"], "https://example.com/media/api-cache-for-wcivf/{}/{}".format( expected_leafname, persons_2_leafname), ) self.assertEqual(persons_1_data["previous"], None) self.assertEqual(persons_2_data["next"], None) self.assertEqual( persons_2_data["previous"], "https://example.com/media/api-cache-for-wcivf/{}/{}".format( expected_leafname, persons_1_leafname), ) self.assertEqual( posts_1_data["next"], "https://example.com/media/api-cache-for-wcivf/{}/{}".format( expected_leafname, posts_2_leafname), ) self.assertEqual(posts_1_data["previous"], None) self.assertEqual(posts_2_data["next"], None) self.assertEqual( posts_2_data["previous"], "https://example.com/media/api-cache-for-wcivf/{}/{}".format( expected_leafname, posts_1_leafname), ) # Check that the URL of the first person is as expected, # as well as it being the right person: first_person = persons_1_data["results"][0] self.assertEqual(first_person["id"], 818) self.assertEqual(first_person["name"], "Sheila Gilmore") self.assertEqual( first_person["url"], "https://candidates.democracyclub.org.uk/api/v0.9/persons/818/?format=json", ) # Similarly, check that the URL of the first post is as expected: first_post = posts_1_data["results"][0] self.assertEqual(first_post["id"], self.edinburgh_east_post_extra.slug) self.assertEqual(first_post["label"], "Member of Parliament for Edinburgh East") self.assertEqual( first_post["url"], "https://candidates.democracyclub.org.uk/api/v0.9/posts/14419/?format=json", ) def _setup_cached_api_directory(self, dir_list): """ Saves a tmp file in settings.MEDIA_ROOT, called `.keep` in each directory in dir_list. """ for d in dir_list: self.storage.save(join("cached-api", d, ".keep"), ContentFile(".")) @patch( "candidates.management.commands.candidates_cache_api_to_directory.datetime" ) def test_cache_api_to_directory_prune(self, mock_datetime): # Need to make sure datetime.strptime still works: mock_datetime.strptime.side_effect = datetime.strptime mock_datetime.now.return_value = datetime(2017, 5, 14, 12, 33, 5, 0) expected_to_prune = [ "2017-05-12T08:00:00", "2017-05-12T10:00:00", "2017-05-12T12:00:00", ] expected_to_keep = [ "2017-05-14T08:00:00", "2017-05-14T10:00:00", "2017-05-14T12:00:00", "2017-05-14T12:33:05", ] self._setup_cached_api_directory(expected_to_keep + expected_to_prune) self.assertTrue( self.storage.exists( join("cached-api", "2017-05-12T08:00:00", ".keep"))) call_command( "candidates_cache_api_to_directory", page_size="3", url_prefix="https://example.com/media/api-cache-for-wcivf/", prune=True, ) for dir_name in expected_to_keep: self.assertTrue( self.storage.exists(join("cached-api", dir_name, ".keep"))) for dir_name in expected_to_prune: self.assertFalse( self.storage.exists(join("cached-api", dir_name, ".keep"))) @patch( "candidates.management.commands.candidates_cache_api_to_directory.datetime" ) def test_cache_api_to_directory_prune_four_old(self, mock_datetime): # Need to make sure datetime.strptime still works: mock_datetime.strptime.side_effect = datetime.strptime mock_datetime.now.return_value = datetime(2017, 5, 14, 12, 33, 5, 0) expected_to_prune = ["2017-05-12T06:00:00"] expected_to_keep = [ "2017-05-12T08:00:00", "2017-05-12T10:00:00", "2017-05-12T12:00:00", "2017-05-14T12:33:05", ] self._setup_cached_api_directory(expected_to_keep + expected_to_prune) self.assertTrue( self.storage.exists( join("cached-api", "2017-05-12T06:00:00", ".keep"))) self.assertTrue( self.storage.exists( join("cached-api", "2017-05-12T08:00:00", ".keep"))) call_command( "candidates_cache_api_to_directory", page_size="3", url_prefix="https://example.com/media/api-cache-for-wcivf/", prune=True, ) # Even though all of those directories are more than 36 # hours old, they should all be kept because they're the # most recent 4: for dir_name in expected_to_keep: self.assertTrue( self.storage.exists(join("cached-api", dir_name, ".keep"))) for dir_name in expected_to_prune: self.assertFalse( self.storage.exists(join("cached-api", dir_name, ".keep")))
def get(self, request, **kwargs): entity_id = kwargs.get('entity_id') current_object = self.get_object(entity_id) if current_object is None and self.slugToEntityIdRedirect and getattr(request, 'version', 'v1') == 'v2': return self.get_slug_to_entity_id_redirect(kwargs.get('entity_id', None)) elif current_object is None: return Response(status=status.HTTP_404_NOT_FOUND) image_prop = getattr(current_object, self.prop) if not bool(image_prop): return Response(status=status.HTTP_404_NOT_FOUND) image_type = request.query_params.get('type', 'original') if image_type not in ['original', 'png']: raise ValidationError(u"invalid image type: {}".format(image_type)) supported_fmts = { 'square': (1, 1), 'wide': (1.91, 1) } image_fmt = request.query_params.get('fmt', 'square').lower() if image_fmt not in supported_fmts.keys(): raise ValidationError(u"invalid image format: {}".format(image_fmt)) image_url = image_prop.url filename, ext = os.path.splitext(image_prop.name) basename = os.path.basename(filename) dirname = os.path.dirname(filename) version_suffix = getattr(settings, 'CAIROSVG_VERSION_SUFFIX', '1') new_name = '{dirname}/converted{version}/{basename}{fmt_suffix}.png'.format( dirname=dirname, basename=basename, version=version_suffix, fmt_suffix="-{}".format(image_fmt) if image_fmt != 'square' else "" ) storage = DefaultStorage() def _fit_to_height(img, ar, height=400): img.thumbnail((height,height)) new_size = (int(ar[0]*height), int(ar[1]*height)) new_img = Image.new("RGBA", new_size) new_img.paste(img, ((new_size[0] - height)/2, (new_size[1] - height)/2)) new_img.show() return new_img if image_type == 'original' and image_fmt == 'square': image_url = image_prop.url elif ext == '.svg': if not storage.exists(new_name): with storage.open(image_prop.name, 'rb') as input_svg: svg_buf = StringIO.StringIO() out_buf = StringIO.StringIO() cairosvg.svg2png(file_obj=input_svg, write_to=svg_buf) img = Image.open(svg_buf) img = _fit_to_height(img, supported_fmts[image_fmt]) img.save(out_buf, format='png') storage.save(new_name, out_buf) image_url = storage.url(new_name) else: if not storage.exists(new_name): with storage.open(image_prop.name, 'rb') as input_svg: out_buf = StringIO.StringIO() img = Image.open(input_svg) img = _fit_to_height(img, supported_fmts[image_fmt]) img.save(out_buf, format='png') storage.save(new_name, out_buf) image_url = storage.url(new_name) return redirect(image_url)
def fetch_remote_file_to_storage(remote_url, upload_to='', allowed_mime_types=()): """ Fetches a remote url, and stores it in DefaultStorage :return: (status_code, new_storage_name) """ SVG_MIME_TYPE = 'image/svg+xml' if not allowed_mime_types: raise SuspiciousFileOperation("allowed mime types must be passed in") magic_strings = None content = None status_code = None if _is_data_uri(remote_url): # data:[<MIME-type>][;charset=<encoding>][;base64],<data> # finds the end of the substring 'base64' adds one more to get the comma as well. base64_image_from_data_uri = remote_url[ (re.search('base64', remote_url).end()) + 1:] content = decoded_test = base64.b64decode(base64_image_from_data_uri) magic_strings = puremagic.magic_string(decoded_test) status_code = 200 store = DefaultStorage() if magic_strings is None: r = requests.get(remote_url, stream=True) if r.status_code == 200: magic_strings = puremagic.magic_string(r.content) content = r.content status_code = r.status_code if magic_strings and content: derived_mime_type = None derived_ext = None stripped_svg_string = None for magic_string in magic_strings: if getattr(magic_string, 'mime_type', None) in allowed_mime_types: derived_mime_type = getattr(magic_string, 'mime_type', None) derived_ext = getattr(magic_string, 'extension', None) break if not derived_mime_type and re.search( b'<svg', content[:1024]) and content.strip()[-6:] == b'</svg>': derived_mime_type = SVG_MIME_TYPE derived_ext = '.svg' if derived_mime_type == SVG_MIME_TYPE: stripped_svg_element = ET.fromstring(content) scrubSvgElementTree(stripped_svg_element) stripped_svg_string = ET.tostring(stripped_svg_element) if derived_mime_type not in allowed_mime_types: raise SuspiciousFileOperation( "{} is not an allowed mime type for upload".format( derived_mime_type)) if not derived_ext: raise SuspiciousFileOperation( "could not determine a file extension") string_to_write_to_file = stripped_svg_string or content storage_name = '{upload_to}/cached/{filename}{ext}'.format( upload_to=upload_to, filename=hashlib.sha256(string_to_write_to_file).hexdigest(), ext=derived_ext) if not store.exists(storage_name): buf = io.BytesIO(string_to_write_to_file) store.save(storage_name, buf) return status_code, storage_name return status_code, None
def get(self, request, **kwargs): entity_id = kwargs.get('entity_id') current_object = self.get_object(entity_id) if current_object is None and self.slugToEntityIdRedirect and getattr(request, 'version', 'v1') == 'v2': return self.get_slug_to_entity_id_redirect(kwargs.get('entity_id', None)) elif current_object is None: return Response(status=status.HTTP_404_NOT_FOUND) image_prop = getattr(current_object, self.prop) if not bool(image_prop): return Response(status=status.HTTP_404_NOT_FOUND) image_type = request.query_params.get('type', 'original') if image_type not in ['original', 'png']: raise ValidationError("invalid image type: {}".format(image_type)) supported_fmts = { 'square': (1, 1), 'wide': (1.91, 1) } image_fmt = request.query_params.get('fmt', 'square').lower() if image_fmt not in list(supported_fmts.keys()): raise ValidationError("invalid image format: {}".format(image_fmt)) image_url = image_prop.url filename, ext = os.path.splitext(image_prop.name) basename = os.path.basename(filename) dirname = os.path.dirname(filename) version_suffix = getattr(settings, 'CAIROSVG_VERSION_SUFFIX', '1') new_name = '{dirname}/converted{version}/{basename}{fmt_suffix}.png'.format( dirname=dirname, basename=basename, version=version_suffix, fmt_suffix="-{}".format(image_fmt) if image_fmt != 'square' else "" ) storage = DefaultStorage() def _fit_dimension(new_size, desired_height): return int(math.floor((new_size - desired_height)/2)) def _fit_to_height(img, ar, height=400): img.thumbnail((height,height)) new_size = (int(ar[0]*height), int(ar[1]*height)) new_img = Image.new("RGBA", new_size) new_img.paste(img, (_fit_dimension(new_size[0], height), _fit_dimension(new_size[1], height))) new_img.show() return new_img if image_type == 'original' and image_fmt == 'square': image_url = image_prop.url elif ext == '.svg': if not storage.exists(new_name): with storage.open(image_prop.name, 'rb') as input_svg: svg_buf = io.BytesIO() out_buf = io.BytesIO() try: cairosvg.svg2png(file_obj=input_svg, write_to=svg_buf) except IOError: return redirect(storage.url(image_prop.name)) # If conversion fails, return existing file. img = Image.open(svg_buf) img = _fit_to_height(img, supported_fmts[image_fmt]) img.save(out_buf, format='png') storage.save(new_name, out_buf) image_url = storage.url(new_name) else: if not storage.exists(new_name): with storage.open(image_prop.name, 'rb') as input_svg: out_buf = io.BytesIO() img = Image.open(input_svg) img = _fit_to_height(img, supported_fmts[image_fmt]) img.save(out_buf, format='png') storage.save(new_name, out_buf) image_url = storage.url(new_name) return redirect(image_url)
class TestAPI(TestUserMixin, TmpMediaRootMixin, UK2015ExamplesMixin, WebTest): def setUp(self): super().setUp() person = PersonFactory.create(id=2009, name="Tessa Jowell") PersonImage.objects.update_or_create_from_file( EXAMPLE_IMAGE_FILENAME, "images/imported.jpg", person, defaults={ "md5sum": "md5sum", "copyright": "example-license", "uploading_user": self.user, "user_notes": "Here's an image...", "is_primary": True, "source": "Found on the candidate's Flickr feed", }, ) dulwich_not_stand = PersonFactory.create(id=4322, name="Helen Hayes") edinburgh_candidate = PersonFactory.create(id="818", name="Sheila Gilmore") edinburgh_winner = PersonFactory.create(id="5795", name="Tommy Sheppard") edinburgh_may_stand = PersonFactory.create(id="5163", name="Peter McColl") MembershipFactory.create( person=person, post=self.dulwich_post, party=self.labour_party, ballot=self.dulwich_post_ballot, ) MembershipFactory.create(person=person, ballot=self.edinburgh_east_post_ballot) MembershipFactory.create( person=dulwich_not_stand, post=self.dulwich_post, party=self.labour_party, ballot=self.dulwich_post_ballot_earlier, ) dulwich_not_stand.not_standing.add(self.election) MembershipFactory.create( person=edinburgh_winner, post=self.edinburgh_east_post, party=self.labour_party, elected=True, ballot=self.edinburgh_east_post_ballot, ) MembershipFactory.create( person=edinburgh_candidate, post=self.edinburgh_east_post, party=self.labour_party, ballot=self.edinburgh_east_post_ballot, ) MembershipFactory.create( person=edinburgh_may_stand, post=self.edinburgh_east_post, party=self.labour_party, ballot=self.edinburgh_east_post_ballot_earlier, ) self.storage = DefaultStorage() def test_api_basic_response(self): response = self.app.get("/api/v0.9/") self.assertEqual(response.status_code, 200) json = response.json self.assertEqual(json["persons"], "http://testserver/api/v0.9/persons/") self.assertEqual(json["organizations"], "http://testserver/api/v0.9/organizations/") self.assertEqual(json["elections"], "http://testserver/api/v0.9/elections/") self.assertEqual(json["posts"], "http://testserver/api/v0.9/posts/") persons_resp = self.app.get("/api/v0.9/persons/") self.assertEqual(persons_resp.status_code, 200) organizations_resp = self.app.get("/api/v0.9/organizations/") self.assertEqual(organizations_resp.status_code, 200) elections_resp = self.app.get("/api/v0.9/elections/") self.assertEqual(elections_resp.status_code, 200) posts_resp = self.app.get("/api/v0.9/posts/") self.assertEqual(posts_resp.status_code, 200) def test_api_home(self): response = self.app.get("/api/") self.assertEqual(response.status_code, 302) response = self.app.get("/api/docs/") self.assertEqual(response.status_code, 200) response = self.app.get("/api/v0.8", expect_errors=True) self.assertEqual(response.status_code, 404) response = self.app.get("/api/v0.9/person/", expect_errors=True) self.assertEqual(response.status_code, 404) response = self.app.get("/api/v0.9/persons/4000/", expect_errors=True) self.assertEqual(response.status_code, 404) response = self.app.post("/api/v0.9/persons/", params={}, expect_errors=True) self.assertEqual(response.status_code, 403) def test_api_persons(self): persons_resp = self.app.get("/api/v0.9/persons/") persons = persons_resp.json self.assertEqual(persons["count"], len(persons["results"])) self.assertEqual(persons["count"], 5) def test_api_person(self): person_resp = self.app.get("/api/v0.9/persons/2009/") self.assertEqual(person_resp.status_code, 200) person = person_resp.json self.assertEqual(person["id"], 2009) self.assertEqual(person["name"], "Tessa Jowell") memberships = sorted(person["memberships"], key=lambda m: m["role"]) self.assertEqual(len(memberships), 2) self.assertEqual(memberships[1]["role"], "Candidate") self.assertEqual(len(person["versions"]), 0) def _make_legacy_parties(self): """ It used to be that political parties were stored on the "Organization" model. for maintaining v0.9 API compatibility we've not deleted them from that model (yet), so let's make the test data support this legacy case """ from candidates.tests.factories import OrganizationFactory from candidates.tests.uk_examples import EXAMPLE_PARTIES for party in EXAMPLE_PARTIES: p = OrganizationFactory(slug=party["legacy_slug"], name=party["name"]) def test_api_legacy_organizations_with_parties(self): self._make_legacy_parties() organizations_resp = self.app.get("/api/v0.9/organizations/") organizations = organizations_resp.json self.assertEqual(organizations["count"], len(organizations["results"])) self.assertEqual(organizations["count"], 7) def test_api_legacy_organization_with_parties(self): self._make_legacy_parties() organizations_resp = self.app.get("/api/v0.9/organizations/") organizations = organizations_resp.json organization_url = None for organization in organizations["results"]: if organization["id"] == "party:53": organization_url = organization["url"] break organization_resp = self.app.get(organization_url) self.assertEqual(organization_resp.status_code, 200) organization = organization_resp.json self.assertEqual(organization["id"], "party:53") self.assertEqual(organization["name"], "Labour Party") def test_api_elections(self): elections_resp = self.app.get("/api/v0.9/elections/") elections = elections_resp.json self.assertEqual(elections["count"], len(elections["results"])) self.assertEqual(elections["count"], 3) def test_api_elections_without_orgs(self): # Regression test that we can serve elections without an organzation self.election.organization = None self.election.save() elections_resp = self.app.get("/api/v0.9/elections/", expect_errors=True) self.assertEqual(elections_resp.status_code, 200) def test_api_election(self): elections_resp = self.app.get("/api/v0.9/elections/") elections = elections_resp.json election_url = None for election in elections["results"]: if election["id"] == "parl.2015-05-07": election_url = election["url"] break election_resp = self.app.get(election_url) self.assertEqual(election_resp.status_code, 200) election = election_resp.json self.assertEqual(election["id"], "parl.2015-05-07") self.assertEqual(election["name"], "2015 General Election") def test_api_posts(self): posts_resp = self.app.get("/api/v0.9/posts/") posts = posts_resp.json self.assertEqual(posts["count"], len(posts["results"])) self.assertEqual(posts["count"], 5) def test_api_post(self): posts_resp = self.app.get("/api/v0.9/posts/") posts = posts_resp.json post_url = None for post in posts["results"]: if post["id"] == "65808": post_url = post["url"] break self.assertTrue(post_url) post_resp = self.app.get(post_url) self.assertEqual(post_resp.status_code, 200) post = post_resp.json self.assertEqual(post["id"], "65808") self.assertEqual(post["label"], "Member of Parliament for Dulwich and West Norwood") def test_api_person_redirects(self): PersonRedirect.objects.create(old_person_id="1234", new_person_id="42") PersonRedirect.objects.create(old_person_id="5678", new_person_id="12") person_redirects_resp = self.app.get("/api/v0.9/person_redirects/") person_redirects = person_redirects_resp.json self.assertEqual(person_redirects["results"][0]["old_person_id"], 1234) self.assertEqual(person_redirects["results"][0]["new_person_id"], 42) self.assertEqual(person_redirects["results"][1]["old_person_id"], 5678) self.assertEqual(person_redirects["results"][1]["new_person_id"], 12) def test_api_person_redirect(self): PersonRedirect.objects.create(old_person_id="1234", new_person_id="42") url = "/api/v0.9/person_redirects/1234/" person_redirect_resp = self.app.get(url) person_redirect = person_redirect_resp.json self.assertEqual(person_redirect["old_person_id"], 1234) self.assertEqual(person_redirect["new_person_id"], 42) def test_api_version_info(self): version_resp = self.app.get("/version.json") self.assertEqual(version_resp.status_code, 200) info = version_resp.json self.assertEqual(info["users_who_have_edited"], 0) self.assertEqual(info["interesting_user_actions"], 0) LoggedAction.objects.create(action_type="set-candidate-not-elected") LoggedAction.objects.create(action_type="edit-candidate") version_resp = self.app.get("/version.json") info = version_resp.json self.assertEqual(info["interesting_user_actions"], 1) def test_api_cors_headers(self): resp = self.app.get("/api/v0.9/", headers={"Origin": b"http://example.com"}) self.assertTrue("Access-Control-Allow-Origin" in resp.headers) self.assertEqual(resp.headers["Access-Control-Allow-Origin"], "*") resp = self.app.get("/", headers={"Origin": b"http://example.com"}) self.assertFalse("Access-Control-Allow-Origin" in resp.headers) def test_api_jsonp_response(self): response = self.app.get("/api/v0.9/?format=jsonp&callback=test") self.assertEqual(response.status_code, 200) self.assertTrue(response.text.startswith("test(")) @patch( "candidates.management.commands.candidates_cache_api_to_directory.datetime" ) def test_persons_api_to_directory(self, mock_datetime): # current # timestamped # timestamped mock_datetime.now.return_value = datetime(2017, 5, 14, 12, 33, 5, 0) target_directory = settings.MEDIA_ROOT call_command( "candidates_cache_api_to_directory", page_size="3", url_prefix="https://example.com/media/api-cache-for-wcivf", ) expected_leafname = "2017-05-14T12:33:05" expected_timestamped_directory = join(settings.MEDIA_ROOT, "cached-api", expected_leafname) expected_path = join("cached-api", "latest") self.assertTrue(self.storage.exists(expected_timestamped_directory)) self.assertTrue(self.storage.exists(expected_path)) # Check that the files in that directory are as expected: entries = self.storage.listdir(expected_timestamped_directory)[1] people_1_leafname = "people-000001.json" people_2_leafname = "people-000002.json" ballot_1_leafname = "ballots-000001.json" ballot_2_leafname = "ballots-000002.json" ballot_3_leafname = "ballots-000003.json" self.assertEqual( set(entries), { people_1_leafname, people_2_leafname, ballot_1_leafname, ballot_2_leafname, ballot_3_leafname, }, ) # Get the data from those pages: with self.storage.open( join(expected_timestamped_directory, people_1_leafname)) as f: persons_1_data = json.loads(f.read().decode("utf8")) with self.storage.open( join(expected_timestamped_directory, people_2_leafname)) as f: persons_2_data = json.loads(f.read().decode("utf8")) # Check the previous and next links are as we expect: self.assertEqual( persons_1_data["next"], "https://example.com/media/api-cache-for-wcivf/{}/{}".format( expected_leafname, people_2_leafname), ) self.assertEqual(persons_1_data["previous"], None) self.assertEqual(persons_2_data["next"], None) self.assertEqual( persons_2_data["previous"], "https://example.com/media/api-cache-for-wcivf/{}/{}".format( expected_leafname, people_1_leafname), ) # Check that the URL of the first person is as expected, # as well as it being the right person: first_person = persons_1_data["results"][0] self.assertEqual(first_person["id"], 818) self.assertEqual(first_person["name"], "Sheila Gilmore") self.assertEqual( first_person["url"], "https://candidates.democracyclub.org.uk/api/next/people/818/?format=json", ) def _setup_cached_api_directory(self, dir_list): """ Saves a tmp file in settings.MEDIA_ROOT, called `.keep` in each directory in dir_list. """ for d in dir_list: self.storage.save(join("cached-api", d, ".keep"), ContentFile(".")) @patch( "candidates.management.commands.candidates_cache_api_to_directory.datetime" ) def test_cache_api_to_directory_prune(self, mock_datetime): # Need to make sure datetime.strptime still works: mock_datetime.strptime.side_effect = datetime.strptime mock_datetime.now.return_value = datetime(2017, 5, 14, 12, 33, 5, 0) expected_to_prune = [ "2017-05-12T08:00:00", "2017-05-12T10:00:00", "2017-05-12T12:00:00", ] expected_to_keep = [ "2017-05-14T08:00:00", "2017-05-14T10:00:00", "2017-05-14T12:00:00", "2017-05-14T12:33:05", ] self._setup_cached_api_directory(expected_to_keep + expected_to_prune) self.assertTrue( self.storage.exists( join("cached-api", "2017-05-12T08:00:00", ".keep"))) call_command( "candidates_cache_api_to_directory", page_size="3", url_prefix="https://example.com/media/api-cache-for-wcivf/", prune=True, ) for dir_name in expected_to_keep: self.assertTrue( self.storage.exists(join("cached-api", dir_name, ".keep"))) for dir_name in expected_to_prune: self.assertFalse( self.storage.exists(join("cached-api", dir_name, ".keep"))) @patch( "candidates.management.commands.candidates_cache_api_to_directory.datetime" ) def test_cache_api_to_directory_prune_four_old(self, mock_datetime): # Need to make sure datetime.strptime still works: mock_datetime.strptime.side_effect = datetime.strptime mock_datetime.now.return_value = datetime(2017, 5, 14, 12, 33, 5, 0) expected_to_prune = ["2017-05-12T06:00:00"] expected_to_keep = [ "2017-05-12T08:00:00", "2017-05-12T10:00:00", "2017-05-12T12:00:00", "2017-05-14T12:33:05", ] self._setup_cached_api_directory(expected_to_keep + expected_to_prune) self.assertTrue( self.storage.exists( join("cached-api", "2017-05-12T06:00:00", ".keep"))) self.assertTrue( self.storage.exists( join("cached-api", "2017-05-12T08:00:00", ".keep"))) call_command( "candidates_cache_api_to_directory", page_size="3", url_prefix="https://example.com/media/api-cache-for-wcivf/", prune=True, ) # Even though all of those directories are more than 36 # hours old, they should all be kept because they're the # most recent 4: for dir_name in expected_to_keep: self.assertTrue( self.storage.exists(join("cached-api", dir_name, ".keep"))) for dir_name in expected_to_prune: self.assertFalse( self.storage.exists(join("cached-api", dir_name, ".keep"))) def test_legacy_redirects(self): req = self.app.get("/api/v0.9/elections/2010/") self.assertEqual(req.status_code, 301) self.assertEqual(req.location, "/api/v0.9/elections/parl.2010-05-06/") req = self.app.get("/api/v0.9/elections/2010.json") self.assertEqual(req.status_code, 301) self.assertEqual(req.location, "/api/v0.9/elections/parl.2010-05-06.json") def test_legacy_contact_details(self): person = PersonFactory() PersonIdentifier.objects.create(value_type="twitter_username", value="Froglet4MP", person=person) req = self.app.get("/api/v0.9/persons/{}/".format(person.pk)) person_json = req.json self.assertTrue("contact_details" in person_json) self.assertEqual(person_json["contact_details"][0]["contact_type"], "twitter") self.assertEqual(person_json["contact_details"][0]["value"], "Froglet4MP")