def test_create_status_url(): domain = "http://domain.tld" urlpath = "/rp/jobs/" hexes = ["".join([choice(hexdigits) for i in range(32)]) for j in range(6)] def _check(port=None): for hx in hexes: url = urlparse(create_status_url(hx)) assert domain == "%s://%s" % (url.scheme, url.hostname) if port is None: assert url.port is port else: assert url.port == port assert "%s%s/" % (urlpath, hx) == url.path url = urlparse(create_status_url(hx, sub_path="%s/" % hx[:10])) assert domain == "%s://%s" % (url.scheme, url.hostname) if port is None: assert url.port is port else: assert url.port == port assert "%s%s%s/" % (urlpath, "%s/" % hx[:10], hx) == url.path with patch.object(settings, "CANONICAL_PORT", "2323"): _check(port=2323) for port in ("80", "443", ""): with patch.object(settings, "CANONICAL_PORT", port): _check() with pytest.raises(ValueError) as err: create_status_url("something", "something-without-a-slash") assert isinstance(err.value, ValueError)
def _check(port=None): for hx in hexes: url = urlparse(create_status_url(hx)) assert domain == "%s://%s" % (url.scheme, url.hostname) if port is None: assert url.port is port else: assert url.port == port assert "%s%s/" % (urlpath, hx) == url.path url = urlparse(create_status_url(hx, sub_path="%s/" % hx[:10])) assert domain == "%s://%s" % (url.scheme, url.hostname) if port is None: assert url.port is port else: assert url.port == port assert "%s%s%s/" % (urlpath, "%s/" % hx[:10], hx) == url.path
def test_create_ints(self): data, response = self._create_ints() expected = dict(self.defaults) expected.update({k: data[k] for k in data}) expected["url"] = create_status_url(fake_pipeline_id, sub_path="regulations/") self._stock_response_check(expected, response.data) return expected
def test_create_ints(self): data, response = self._create_ints() expected = dict(self.defaults) expected.update({k: data[k] for k in data}) expected["url"] = create_status_url( fake_pipeline_id, sub_path="regulations/") self._stock_response_check(expected, response.data) return expected
def _check(port=None): for hx in hexes: url = urlparse(create_status_url(hx)) assert domain == "{0}://{1}".format(url.scheme, url.hostname) if port is None: assert url.port is port else: assert url.port == port assert "{0}{1}/".format(urlpath, hx) == url.path url = urlparse( create_status_url(hx, sub_path="{0}/".format(hx[:10]))) assert domain == "{0}://{1}".format(url.scheme, url.hostname) if port is None: assert url.port is port else: assert url.port == port assert "{0}{1}{2}/".format(urlpath, "{0}/".format(hx[:10]), hx) == url.path
def _check(port=None): for hx in hexes: url = urlparse(create_status_url(hx)) assert domain == "{0}://{1}".format(url.scheme, url.hostname) if port is None: assert url.port is port else: assert url.port == port assert "{0}{1}/".format(urlpath, hx) == url.path url = urlparse(create_status_url( hx, sub_path="{0}/".format(hx[:10]))) assert domain == "{0}://{1}".format(url.scheme, url.hostname) if port is None: assert url.port is port else: assert url.port == port assert "{0}{1}{2}/".format( urlpath, "{0}/".format(hx[:10]), hx) == url.path
def test_create(self): file_data = self._create_file() data = { "file_hexhash": file_data["hexhash"], "notification_email": "*****@*****.**" } response = self._postjson(data) expected = dict(self.defaults) expected.update({k: data[k] for k in data}) expected["url"] = create_status_url(fake_pipeline_id, sub_path="notices/") self._stock_response_check(expected, response.data) return expected
def test_create(self): file_data = self._create_file() data = { "file_hexhash": file_data["hexhash"], "notification_email": "*****@*****.**" } response = self._postjson(data) expected = dict(self.defaults) expected.update({k: data[k] for k in data}) expected["url"] = create_status_url( fake_pipeline_id, sub_path="notices/") self._stock_response_check(expected, response.data) return expected
def test_create_status_url(): domain = "http://domain.tld" urlpath = "/rp/jobs/" hexes = ["".join([choice(hexdigits) for i in range(32)]) for j in range(6)] def _check(port=None): for hx in hexes: url = urlparse(create_status_url(hx)) assert domain == "{0}://{1}".format(url.scheme, url.hostname) if port is None: assert url.port is port else: assert url.port == port assert "{0}{1}/".format(urlpath, hx) == url.path url = urlparse(create_status_url( hx, sub_path="{0}/".format(hx[:10]))) assert domain == "{0}://{1}".format(url.scheme, url.hostname) if port is None: assert url.port is port else: assert url.port == port assert "{0}{1}{2}/".format( urlpath, "{0}/".format(hx[:10]), hx) == url.path with patch.object(settings, "CANONICAL_PORT", "2323"): _check(port=2323) for port in ("80", "443", ""): with patch.object(settings, "CANONICAL_PORT", port): _check() with pytest.raises(ValueError) as err: create_status_url("something", "something-without-a-slash") assert isinstance(err.value, ValueError)
def create(self, request, *args, **kwargs): """ Overrides the ``create`` method of ``mixins.CreateModelMixin`` in order to add the new job to the Redis queue. Side effects Via ``queue_eregs_job`` and ``PipelineJobSerializer.save``, alters the redis queue and the DB. :arg HttpRequest request: the incoming request. :rtype: Response :returns: JSON or HTML of the information about the job (status 201), or about why the job couldn't be added (status 400). """ serialized = self.get_serializer(data=request.data) serialized.is_valid(raise_exception=True) eregs_args = self.build_eregs_args(serialized.validated_data) job = queue_eregs_job(eregs_args, timeout=60 * 30, result_ttl=-1) # Paranoia--validate the values we provide: job_id = job.id for validator in serialized.get_fields()["job_id"].validators: validator(job_id) statusurl = create_status_url(job_id, sub_path=self.sub_path) for validator in serialized.get_fields()["url"].validators: validator(statusurl) if serialized.validated_data.get("notification_email"): queue_notification_email( job, statusurl, serialized.validated_data["notification_email"]) serialized.save(job_id=job_id, url=statusurl, destination=eregs_site_api_url) headers = self.get_success_headers(serialized.data) # Adding the Refresh header here so that the browser does the # user-friendly thing of redirecting the user to the page for the # newly-created object, even though use of the Refresh header is # frowned upon in some circles. # # Not using redirect via 302 or 303 so that non-browser users get the # 201 status code they expect upon a successful POST. # # I'm open to debate on this decision. headers["Refresh"] = "0;url={0}".format(statusurl) return Response(serialized.data, status=status.HTTP_201_CREATED, headers=headers)
def create(self, request, *args, **kwargs): """ Overrides the ``create`` method of ``mixins.CreateModelMixin`` in order to add the new job to the Redis queue. Side effects Via ``queue_eregs_job`` and ``PipelineJobSerializer.save``, alters the redis queue and the DB. :arg HttpRequest request: the incoming request. :rtype: Response :returns: JSON or HTML of the information about the job (status 201), or about why the job couldn't be added (status 400). """ serialized = self.get_serializer(data=request.data) serialized.is_valid(raise_exception=True) eregs_args = self.build_eregs_args(serialized.validated_data) job = queue_eregs_job(eregs_args, timeout=60 * 30, result_ttl=-1) # Paranoia--validate the values we provide: job_id = job.id for validator in serialized.get_fields()["job_id"].validators: validator(job_id) statusurl = create_status_url(job_id, sub_path=self.sub_path) for validator in serialized.get_fields()["url"].validators: validator(statusurl) if serialized.validated_data.get("notification_email"): queue_notification_email( job, statusurl, serialized.validated_data["notification_email"]) serialized.save(job_id=job_id, url=statusurl, destination=eregs_site_api_url) headers = self.get_success_headers(serialized.data) # Adding the Refresh header here so that the browser does the # user-friendly thing of redirecting the user to the page for the # newly-created object, even though use of the Refresh header is # frowned upon in some circles. # # Not using redirect via 302 or 303 so that non-browser users get the # 201 status code they expect upon a successful POST. # # I'm open to debate on this decision. headers["Refresh"] = "0;url=%s" % statusurl return Response(serialized.data, status=status.HTTP_201_CREATED, headers=headers)
def test_create_strings(self): data = { "cfr_title": "0", "cfr_part": "0", "notification_email": "*****@*****.**" } response = self._postjson(data) expected = dict(self.defaults) expected.update({k: data[k] for k in data}) # Even if the input is a str, the return values should be ints: expected["cfr_title"] = int(expected["cfr_title"]) expected["cfr_part"] = int(expected["cfr_part"]) expected["url"] = create_status_url(fake_pipeline_id, sub_path="regulations/") self._stock_response_check(expected, response.data)
def test_create_strings(self): data = { "cfr_title": "0", "cfr_part": "0", "notification_email": "*****@*****.**" } response = self._postjson(data) expected = dict(self.defaults) expected.update({k: data[k] for k in data}) # Even if the input is a str, the return values should be ints: expected["cfr_title"] = int(expected["cfr_title"]) expected["cfr_part"] = int(expected["cfr_part"]) expected["url"] = create_status_url( fake_pipeline_id, sub_path="regulations/") self._stock_response_check(expected, response.data)