def fake_requests_for_mapit(url):
    """Return reduced MapIt output for some known URLs"""
    if url == urljoin(settings.MAPIT_BASE_URL, '/postcode/sw1a1aa'):
        status_code = 200
        json_result = sw1a1aa_result
    elif url == urljoin(settings.MAPIT_BASE_URL, '/postcode/se240ag'):
        status_code = 200
        json_result = se240ag_result
    elif url == urljoin(settings.MAPIT_BASE_URL, '/postcode/cb28rq'):
        status_code = 404
        json_result = {
            "code": 404,
            "error": "No Postcode matches the given query."
        }
    elif url == urljoin(settings.MAPIT_BASE_URL, '/postcode/foobar'):
        status_code = 400
        json_result = {
            "code": 400,
            "error": "Postcode 'FOOBAR' is not valid."
        }
    else:
        raise Exception("URL that hasn't been mocked yet: " + url)
    return Mock(**{
        'json.return_value': json_result,
        'status_code': status_code
    })
Esempio n. 2
0
    def test_delete_elections_with_matches(self, mock_requests):
        # import some data
        mock_requests.get.side_effect = create_mock_with_fixtures({
            urljoin(EE_BASE_URL, "/api/elections/?current=True"):
            local_highland,
            urljoin(EE_BASE_URL, "/api/elections/?current=1&deleted=1"):
            no_results,
        })
        call_command("uk_create_elections_from_every_election")
        self.assertEqual(
            every_election.PostExtraElection.objects.all().count(), 1)
        self.assertEqual(every_election.YNRElection.objects.all().count(), 1)

        # now we've switched the fixtures round
        # so the records we just imported are deleted in EE
        mock_requests.get.side_effect = create_mock_with_fixtures({
            urljoin(EE_BASE_URL, "/api/elections/?current=True"):
            no_results,
            urljoin(EE_BASE_URL, "/api/elections/?current=1&deleted=1"):
            local_highland,
        })
        call_command("uk_create_elections_from_every_election")

        # we should delete the records we just imported
        self.assertEqual(
            every_election.PostExtraElection.objects.all().count(), 0)
        self.assertEqual(every_election.YNRElection.objects.all().count(), 0)
Esempio n. 3
0
def fake_requests_for_mapit(url):
    """Return reduced MapIt output for some known URLs"""
    if url == urljoin(settings.MAPIT_BASE_URL, '/postcode/sw1a1aa'):
        status_code = 200
        json_result = sw1a1aa_result
    elif url == urljoin(settings.MAPIT_BASE_URL, '/postcode/se240ag'):
        status_code = 200
        json_result = se240ag_result
    elif url == urljoin(settings.MAPIT_BASE_URL, '/postcode/cb28rq'):
        status_code = 404
        json_result = {
            "code": 404,
            "error": "No Postcode matches the given query."
        }
    elif url == urljoin(settings.MAPIT_BASE_URL, '/postcode/foobar'):
        status_code = 400
        json_result = {
            "code": 400,
            "error": "Postcode 'FOOBAR' is not valid."
        }
    else:
        raise Exception("URL that hasn't been mocked yet: " + url)
    return Mock(**{
        'json.return_value': json_result,
        'status_code': status_code
    })
Esempio n. 4
0
    def test_delete_elections_invalid_input_insert_and_delete(
            self, mock_requests):
        # import some data
        # just so we've got a non-empty DB
        mock_requests.get.side_effect = (
            fake_requests_each_type_of_election_on_one_day)
        call_command("uk_create_elections_from_every_election")
        self.assertEqual(
            every_election.PostExtraElection.objects.all().count(), 15)
        self.assertEqual(every_election.YNRElection.objects.all().count(), 10)

        # this simulates a situation where EE reports
        # the same election/s as deleted and not deleted
        # this makes no sense and shouldn't happen but
        # if it does we should not delete anything
        mock_requests.get.side_effect = create_mock_with_fixtures({
            urljoin(EE_BASE_URL, "/api/elections/?current=True"):
            local_highland,
            urljoin(EE_BASE_URL, "/api/elections/?current=1&deleted=1"):
            local_highland,
        })

        # make sure we throw an exception
        with self.assertRaises(Exception):
            call_command("uk_create_elections_from_every_election")

        # we should also roll back the whole transaction
        # so nothing is inserted or deleted
        self.assertEqual(
            every_election.PostExtraElection.objects.all().count(), 15)
        self.assertEqual(every_election.YNRElection.objects.all().count(), 10)
Esempio n. 5
0
    def test_delete_elections_with_related_membership(self, mock_requests):
        # import some data
        mock_requests.get.side_effect = create_mock_with_fixtures({
            urljoin(EE_BASE_URL, "/api/elections/?current=True"):
            local_highland,
            urljoin(EE_BASE_URL, "/api/elections/?current=1&deleted=1"):
            no_results,
        })
        call_command("uk_create_elections_from_every_election")
        self.assertEqual(
            every_election.PostExtraElection.objects.all().count(), 1)
        self.assertEqual(every_election.YNRElection.objects.all().count(), 1)

        # create a membership which references the PEE we just imported
        MembershipFactory(
            person=PersonFactory.create(id=2009, name="Tessa Jowell"),
            post_election=every_election.PostExtraElection.objects.all()[0],
        )

        # now we've switched the fixtures round
        # so the records we just imported are deleted in EE
        mock_requests.get.side_effect = create_mock_with_fixtures({
            urljoin(EE_BASE_URL, "/api/elections/?current=True"):
            no_results,
            urljoin(EE_BASE_URL, "/api/elections/?current=1&deleted=1"):
            local_highland,
        })
        # make sure we throw an exception
        with self.assertRaises(Exception):
            call_command("uk_create_elections_from_every_election")

        # we should also roll back the whole transaction so nothing is deleted
        self.assertEqual(
            every_election.PostExtraElection.objects.all().count(), 1)
        self.assertEqual(every_election.YNRElection.objects.all().count(), 1)
Esempio n. 6
0
    def test_delete_elections_no_matches(self, mock_requests):
        # import some data
        # just so we've got a non-empty DB
        mock_requests.get.side_effect = (
            fake_requests_each_type_of_election_on_one_day)
        call_command("uk_create_elections_from_every_election")
        self.assertEqual(
            every_election.PostExtraElection.objects.all().count(), 15)
        self.assertEqual(every_election.YNRElection.objects.all().count(), 10)

        # now we're going to delete some stuff
        # but none of the elections in the
        # local_highland fixture
        # match anything we just imported
        mock_requests.get.side_effect = create_mock_with_fixtures({
            urljoin(EE_BASE_URL, "/api/elections/?current=True"):
            no_results,
            urljoin(EE_BASE_URL, "/api/elections/?current=1&deleted=1"):
            local_highland,
        })
        # this should finish cleanly without complaining
        call_command("uk_create_elections_from_every_election")

        # nothing in our DB should have changed
        self.assertEqual(
            every_election.PostExtraElection.objects.all().count(), 15)
        self.assertEqual(every_election.YNRElection.objects.all().count(), 10)
Esempio n. 7
0
def fake_requests_for_every_election(url, *args, **kwargs):
    """Return reduced EE output for some known URLs"""

    EE_BASE_URL = getattr(
        settings, "EE_BASE_URL", "https://elections.democracyclub.org.uk/")
    if url == urljoin(EE_BASE_URL,
                      '/api/elections/?postcode=se240ag'):
        status_code = 200
        json_result = ee_se240ag_result
    elif url == urljoin(EE_BASE_URL,
                      '/api/elections/?postcode=sw1a1aa'):
        status_code = 200
        json_result = ee_sw1a1aa_result
    elif url == urljoin(EE_BASE_URL, '/api/elections/?postcode=cb28rq'):
        status_code = 404
        json_result = {
            "code": 404,
            "error": "No Postcode matches the given query."
        }
    else:
        raise Exception("URL that hasn't been mocked yet: " + url)
    return Mock(**{
        'json.return_value': json_result,
        'status_code': status_code
    })
def fake_requests_for_every_election(url, *args, **kwargs):
    """Return reduced EE output for some known URLs"""

    EE_BASE_URL = getattr(settings, "EE_BASE_URL",
                          "https://elections.democracyclub.org.uk/")

    if url == urljoin(EE_BASE_URL, '/api/elections/?postcode=se240ag'):
        status_code = 200
        json_result = ee_se240ag_result
    elif url == urljoin(EE_BASE_URL, '/api/elections/?postcode=se240xx'):
        status_code = 400
        json_result = {"detail": "Unknown postcode"}
    elif url == urljoin(EE_BASE_URL,
                        '/api/elections/?coords=-0.143207%2C51.5'):
        status_code = 200
        json_result = ee_se240ag_result
    elif url == urljoin(EE_BASE_URL, '/api/elections/?postcode=sw1a1aa'):
        status_code = 200
        json_result = ee_sw1a1aa_result
    elif url == urljoin(EE_BASE_URL, '/api/elections/?postcode=cb28rq'):
        status_code = 404
        json_result = {
            "code": 404,
            "error": 'The url "{}" couldn’t be found'.format(url)
        }
    else:
        raise Exception("URL that hasn't been mocked yet: " + url)
    return Mock(**{
        'json.return_value': json_result,
        'status_code': status_code
    })
    def test_import_post_duplicate_slugs(self, mock_requests):
        """
        Regression test to check that a post with duplicate names

        :param mock_requests:
        :return:
        """
        mock_requests.get.side_effect = create_mock_with_fixtures({
            urljoin(
                EE_BASE_URL,
                "/api/elections/?poll_open_date__gte=2018-01-03",
            ):
            duplicate_post_names,
            urljoin(
                EE_BASE_URL,
                "/api/elections/?deleted=1&poll_open_date__gte=2018-01-03",
            ):
            local_highland,
        })
        call_command("uk_create_elections_from_every_election")
        post_a, post_b = Post.objects.all().order_by("-slug",
                                                     "-organization__name")

        self.assertEqual(post_a.slug, "st-michaels")
        self.assertEqual(post_a.identifier, "SUR:st-michaels")
        self.assertEqual(post_a.start_date, "2019-05-02")
        self.assertEqual(post_a.organization.name,
                         "Surrey Heath Borough Council")

        self.assertEqual(post_b.slug, "st-michaels")
        self.assertEqual(post_b.identifier, "ALL:st-michaels")
        self.assertEqual(post_a.start_date, "2019-05-02")
        self.assertEqual(post_b.organization.name, "Allerdale Borough Council")
Esempio n. 10
0
def fake_requests_for_every_election(url, *args, **kwargs):
    """Return reduced EE output for some known URLs"""

    EE_BASE_URL = getattr(settings, "EE_BASE_URL",
                          "https://elections.democracyclub.org.uk/")
    result = None

    if url == urljoin(EE_BASE_URL, 'api/elections/?current=True'):
        status_code = 200
        result = current_elections

    if url == urljoin(
            EE_BASE_URL,
            '/api/elections/?current=True&limit=100&offset=100'):  # noqa
        status_code = 200
        result = current_elections_page_2

    if url == urljoin(
            EE_BASE_URL,
            '/api/elections/?current=True&poll_open_date=2019-01-17'):  # noqa
        status_code = 200
        result = each_type_of_election_on_one_day

    if not result:
        raise Exception("URL that hasn't been mocked yet: " + url)

    return Mock(**{'json.return_value': result, 'status_code': status_code})
    def test_delete_elections_invalid_input_non_empty_election(
            self, mock_requests):
        # import some data
        mock_requests.get.side_effect = create_mock_with_fixtures({
            urljoin(
                EE_BASE_URL,
                "/api/elections/?poll_open_date__gte=2018-01-03",
            ):
            local_highland,
            urljoin(
                EE_BASE_URL,
                "/api/elections/?deleted=1&poll_open_date__gte=2018-01-03",
            ):
            no_results,
        })
        call_command("uk_create_elections_from_every_election")
        self.assertEqual(every_election.Ballot.objects.all().count(), 1)
        self.assertEqual(every_election.YNRElection.objects.all().count(), 1)

        # this simulates a situation where EE reports
        # a ballot as current but its parent election as
        # deleted this makes no sense and shouldn't happen
        # but if it does we should not delete anything
        current_elections = {
            "count": 1,
            "next": None,
            "previous": None,
            "results": [local_highland["results"][1]],
        }
        deleted_elections = {
            "count": 1,
            "next": None,
            "previous": None,
            "results": [local_highland["results"][0]],
        }
        mock_requests.get.side_effect = create_mock_with_fixtures({
            urljoin(
                EE_BASE_URL,
                "/api/elections/?poll_open_date__gte=2018-01-03",
            ):
            current_elections,
            urljoin(
                EE_BASE_URL,
                "/api/elections/?deleted=1&poll_open_date__gte=2018-01-03",
            ):
            deleted_elections,
        })

        # make sure we throw an exception
        with self.assertRaises(Exception):
            call_command("uk_create_elections_from_every_election")

        # we should also roll back the whole transaction
        # so nothing is inserted or deleted
        self.assertEqual(every_election.Ballot.objects.all().count(), 1)
        self.assertEqual(every_election.YNRElection.objects.all().count(), 1)
def get_areas_from_postcode(postcode):
    cache_key = 'mapit-postcode:' + postcode
    cached_result = cache.get(cache_key)
    if cached_result:
        return cached_result
    url = urljoin(settings.MAPIT_BASE_URL, '/postcode/' + urlquote(postcode))
    r = requests.get(url, headers={'User-Agent': 'scraper/sym', })
    if r.status_code == 200:
        mapit_result = r.json()
        areas = get_known_area_types(mapit_result['areas'])
        cache.set(cache_key, areas, settings.MAPIT_CACHE_SECONDS)
        return areas
    elif r.status_code == 400:
        mapit_result = r.json()
        raise BadPostcodeException(mapit_result['error'])
    elif r.status_code == 404:
        raise BadPostcodeException(
            _('The postcode “{0}” couldn’t be found').format(
                postcode
            )
        )
    else:
        raise UnknownMapitException(
            _('Unknown MapIt error for postcode "{0}"').format(
                postcode
            )
        )
Esempio n. 13
0
 def get_logout_url(self, redirect_url=None):
     """Generates CAS logout URL"""
     url = urllib_parse.urljoin(self.server_url, 'logout')
     if redirect_url:
         param_name = self._get_logout_redirect_parameter_name()
         url += '?' + urllib_parse.urlencode({param_name: redirect_url})
     return url
Esempio n. 14
0
    def verify_ticket(self, ticket):
        """Verifies CAS 2.0+ XML-based authentication ticket."""
        try:
            from xml.etree import ElementTree
        except ImportError:
            from elementtree import ElementTree

        user = None
        pgtiou = None

        params = [('ticket', ticket), ('service', self.service_url)]
        if self.proxy_callback:
            params.append(('pgtUrl', self.proxy_callback))

        url = (urllib_parse.urljoin(self.server_url, 'serviceValidate') + '?' +
               urllib_parse.urlencode(params))
        page = urlopen(url)
        try:
            response = page.read()
            tree = ElementTree.fromstring(response)
            if tree[0].tag.endswith('authenticationSuccess'):
                for element in tree[0]:
                    if element.tag.endswith('user'):
                        user = element.text
                    elif element.tag.endswith('proxyGrantingTicket'):
                        pgtiou = element.text
                return user, None, pgtiou
            else:
                return None, None, None
        finally:
            page.close()
Esempio n. 15
0
def _verify_cas3(ticket, service):
    """Verifies CAS 3.0+ XML-based authentication ticket and returns extended attributes.

    Returns username on success and None on failure.
    """

    try:
        from xml.etree import ElementTree
    except ImportError:
        from elementtree import ElementTree

    params = [('ticket', ticket), ('service', service)]
    url = (urllib_parse.urljoin(settings.CAS_SERVER_URL, 'proxyValidate') + '?' +
           urllib_parse.urlencode(params))
    page = urlopen(url)
    try:
        user = None
        attributes = {}
        response = page.read()
        tree = ElementTree.fromstring(response)
        if tree[0].tag.endswith('authenticationSuccess'):
            for element in tree[0]:
               if element.tag.endswith('user'):
                    user = element.text
               elif element.tag.endswith('attributes'):
                    for attribute in element:
                        attributes[attribute.tag.split("}").pop()] = attribute.text
        return user, attributes
    finally:
        page.close()
Esempio n. 16
0
 def load_data(self, code):
     data = requests.get(
         urljoin(
             self.mapit_base_url,
             "areas/{code}".format(mapit_base_url=self.mapit_base_url,
                                   code=code))).json()
     self.update(data)
Esempio n. 17
0
 def format_elections_html_url(self):
     endpoint = "mgElectionElectionAreaResults.aspx"
     url = urljoin(self.base_domain, endpoint)
     url = "https://{}?Page=all&EID=".format(url)
     if self.http_only:
         url = url.replace("https://", "http://")
     return url
Esempio n. 18
0
 def get_logout_url(self, redirect_url=None):
     """Generates CAS logout URL"""
     url = urllib_parse.urljoin(self.server_url, 'logout')
     if redirect_url:
         param_name = self._get_logout_redirect_parameter_name()
         url += '?' + urllib_parse.urlencode({param_name: redirect_url})
     return url
 def upload_images(self, emblems, party_extra):
     content_type = ContentType.objects.get_for_model(party_extra)
     sort_emblems(emblems, party_extra.slug)
     primary = True
     for emblem in emblems:
         emblem_id = str(emblem['Id'])
         ntf = NamedTemporaryFile(delete=False)
         image_url = urljoin(base_emblem_url, emblem_id)
         r = requests.get(image_url)
         with open(ntf.name, 'w') as f:
             f.write(r.content)
         mime_type = self.mime_type_magic.from_file(ntf.name)
         extension = mimetypes.guess_extension(mime_type)
         leafname = 'Emblem_{0}{1}'.format(emblem_id, extension)
         desired_storage_path = join('images', leafname)
         fname = join(emblem_directory, leafname)
         move(ntf.name, fname)
         md5sum = get_file_md5sum(fname)
         ImageExtra.objects.update_or_create_from_file(
             fname,
             desired_storage_path,
             md5sum=md5sum,
             defaults={
                 'uploading_user':None,
                 'md5sum': md5sum,
                 'notes': emblem['MonochromeDescription'],
                 'base__source': 'The Electoral Commission',
                 'base__is_primary': primary,
                 'base__object_id': party_extra.id,
                 'base__content_type_id': content_type.id,
             }
         )
         primary = False
Esempio n. 20
0
def get_areas_from_postcode(postcode):
    cache_key = 'mapit-postcode:' + postcode
    cached_result = cache.get(cache_key)
    if cached_result:
        return cached_result
    url = urljoin(settings.MAPIT_BASE_URL, '/postcode/' + urlquote(postcode))
    r = requests.get(url)
    if r.status_code == 200:
        mapit_result = r.json()
        areas = get_known_area_types(mapit_result['areas'])
        cache.set(cache_key, areas, settings.MAPIT_CACHE_SECONDS)
        return areas
    elif r.status_code == 400:
        mapit_result = r.json()
        raise BadPostcodeException(mapit_result['error'])
    elif r.status_code == 404:
        raise BadPostcodeException(
            _('The postcode “{0}” couldn’t be found').format(
                postcode
            )
        )
    else:
        raise UnknownMapitException(
            _('Unknown MapIt error for postcode "{0}"').format(
                postcode
            )
        )
def get_areas_from_coords(coords):
    url = urljoin(
        settings.MAPIT_BASE_URL,
        '/point/4326/' + urlquote(coords)
        )

    cache_key = 'mapit-postcode:' + coords
    cached_result = cache.get(cache_key)
    if cached_result:
        return cached_result

    r = requests.get(url, headers={'User-Agent': 'scraper/sym', })
    if r.status_code == 200:
        mapit_result = r.json()
        areas = get_known_area_types(mapit_result)
        cache.set(url, areas, settings.MAPIT_CACHE_SECONDS)
        return areas
    elif r.status_code == 400:
        mapit_result = r.json()
        raise BadCoordinatesException(mapit_result['error'])
    elif r.status_code == 404:
        raise BadCoordinatesException(
            'The coordinates "{0}" could not be found'.format(
                coords
            )
        )
    else:
        raise UnknownMapitException(
            'Unknown MapIt error for coordinates "{0}"'.format(
                coords
            )
        )
Esempio n. 22
0
def get_areas_from_postcode(original_postcode):
    postcode = re.sub(r'(?ms)\s*', '', original_postcode.lower())
    if re.search(r'[^a-z0-9]', postcode):
        raise BadPostcodeException(
            _('There were disallowed characters in "{0}"').format(
                original_postcode))
    cache_key = 'mapit-postcode:' + postcode
    cached_result = cache.get(cache_key)
    if cached_result:
        return cached_result
    url = urljoin(settings.MAPIT_BASE_URL,
                  '/postcode/{0}'.format(urlquote(postcode)))
    r = requests.get(url)
    if r.status_code == 200:
        mapit_result = r.json()
        result = get_known_area_types(mapit_result['areas'])
        cache.set(cache_key, result, settings.MAPIT_CACHE_SECONDS)
        return result
    elif r.status_code == 400:
        mapit_result = r.json()
        raise BadPostcodeException(mapit_result['error'])
    elif r.status_code == 404:
        raise BadPostcodeException(
            _('The postcode “{0}” couldn’t be found').format(
                original_postcode))
    else:
        raise UnknownMapitException(
            _('Unknown MapIt error for postcode "{0}"').format(
                original_postcode))
def get_areas_from_postcode(original_postcode):
    postcode = re.sub(r'(?ms)\s*', '', original_postcode.lower())
    if re.search(r'[^a-z0-9]', postcode):
        raise BadPostcodeException(
            _('There were disallowed characters in "{0}"').format(original_postcode)
        )
    cache_key = 'mapit-postcode:' + postcode
    cached_result = cache.get(cache_key)
    if cached_result:
        return cached_result
    url = urljoin(settings.MAPIT_BASE_URL,
                  '/postcode/{0}'.format(urlquote(postcode)))
    r = requests.get(url, headers={'User-Agent': 'scraper/sym', })
    if r.status_code == 200:
        mapit_result = r.json()
        result = get_known_area_types(mapit_result['areas'])
        cache.set(cache_key, result, settings.MAPIT_CACHE_SECONDS)
        return result
    elif r.status_code == 400:
        mapit_result = r.json()
        raise BadPostcodeException(mapit_result['error'])
    elif r.status_code == 404:
        raise BadPostcodeException(
            _('The postcode “{0}” couldn’t be found').format(original_postcode)
        )
    else:
        raise UnknownMapitException(
            _('Unknown MapIt error for postcode "{0}"').format(
                original_postcode
            )
        )
Esempio n. 24
0
 def upload_images(self, emblems, party_extra):
     content_type = ContentType.objects.get_for_model(party_extra)
     sort_emblems(emblems, party_extra.slug)
     primary = True
     for emblem in emblems:
         emblem_id = str(emblem['Id'])
         ntf = NamedTemporaryFile(delete=False)
         image_url = urljoin(base_emblem_url, emblem_id)
         r = requests.get(image_url)
         with open(ntf.name, 'w') as f:
             f.write(r.content)
         mime_type = self.mime_type_magic.from_file(ntf.name)
         extension = mimetypes.guess_extension(mime_type)
         leafname = 'Emblem_{0}{1}'.format(emblem_id, extension)
         desired_storage_path = join('images', leafname)
         fname = join(emblem_directory, leafname)
         move(ntf.name, fname)
         md5sum = get_file_md5sum(fname)
         ImageExtra.objects.update_or_create_from_file(
             fname,
             desired_storage_path,
             md5sum=md5sum,
             base__object_id=party_extra.id,
             base__content_type_id=content_type.id,
             defaults={
                 'uploading_user': None,
                 'notes': emblem['MonochromeDescription'],
                 'base__source': 'The Electoral Commission',
                 'base__is_primary': primary,
             })
         primary = False
Esempio n. 25
0
def get_areas_from_coords(coords):
    url = urljoin(
        settings.MAPIT_BASE_URL,
        '/point/4326/' + urlquote(coords)
        )

    cache_key = 'mapit-postcode:' + coords
    cached_result = cache.get(cache_key)
    if cached_result:
        return cached_result

    r = requests.get(url)
    if r.status_code == 200:
        mapit_result = r.json()
        areas = get_known_area_types(mapit_result)
        cache.set(url, areas, settings.MAPIT_CACHE_SECONDS)
        return areas
    elif r.status_code == 400:
        mapit_result = r.json()
        raise BadCoordinatesException(mapit_result['error'])
    elif r.status_code == 404:
        raise BadCoordinatesException(
            'The coordinates "{0}" could not be found'.format(
                coords
            )
        )
    else:
        raise UnknownMapitException(
            'Unknown MapIt error for coordinates "{0}"'.format(
                coords
            )
        )
Esempio n. 26
0
    def verify_ticket(self, ticket):
        """Verifies CAS 2.0+ XML-based authentication ticket.

        Returns username on success and None on failure.
        """
        try:
            from xml.etree import ElementTree
        except ImportError:
            from elementtree import ElementTree

        user = None
        pgtiou = None

        params = [('ticket', ticket), ('service', self.service_url)]
        if self.proxy_callback:
            params.append(('pgtUrl', self.proxy_callback))

        url = (urllib_parse.urljoin(self.server_url, 'serviceValidate') + '?' +
               urllib_parse.urlencode(params))
        page = urlopen(url)
        try:
            response = page.read()
            tree = ElementTree.fromstring(response)
            if tree[0].tag.endswith('authenticationSuccess'):
                for element in tree[0]:
                    if element.tag.endswith('user'):
                        user = element.text
                    elif element.tag.endswith('proxyGrantingTicket'):
                        pgtiou = element.text
                return user, None, pgtiou
            else:
                return None, None, None
        finally:
            page.close()
Esempio n. 27
0
def get_wmc_from_postcode(original_postcode):
    postcode = re.sub(r'(?ms)\s*', '', original_postcode.lower())
    if re.search(r'[^a-z0-9]', postcode):
        raise BadPostcodeException(
            _('There were disallowed characters in "{0}"').format(
                original_postcode))
    cached_result = cache.get(postcode)
    if cached_result:
        return cached_result
    url = urljoin(settings.MAPIT_BASE_URL,
                  '/postcode/{0}'.format(urlquote(postcode)))
    r = requests.get(url)
    if r.status_code == 200:
        mapit_result = r.json()
        wmc = mapit_result.get('shortcuts', {}).get('WMC')
        if not wmc:
            raise NoConstituencyForPostcodeException(
                _('No constituency found for the postcode "{0}"').format(
                    original_postcode))
        cache.set(postcode, wmc, settings.MAPIT_CACHE_SECONDS)
        return wmc
    elif r.status_code == 400:
        mapit_result = r.json()
        raise BadPostcodeException(mapit_result['error'])
    elif r.status_code == 404:
        raise BadPostcodeException(
            _('The postcode “{0}” couldn’t be found').format(
                original_postcode))
    else:
        raise UnknownMapitException(
            _('Unknown MapIt error for postcode "{0}"').format(
                original_postcode))
Esempio n. 28
0
 def format_elections_index_url(self):
     endpoint = "mgManageElectionResults.aspx"
     url = urljoin(self.base_domain, endpoint)
     url = "https://{}".format(url)
     if self.http_only:
         url = url.replace("https://", "http://")
     return url
Esempio n. 29
0
 def get_verification_response(self, ticket):
     params = [('ticket', ticket), ('service', self.service_url)]
     if self.proxy_callback:
         params.append(('pgtUrl', self.proxy_callback))
     base_url = urllib_parse.urljoin(self.server_url, 'proxyValidate')
     url = base_url + '?' + urllib_parse.urlencode(params)
     page = urlopen(url)
     return page.read()
Esempio n. 30
0
 def get_verification_response(self, ticket):
     params = [('ticket', ticket), ('service', self.service_url)]
     if self.proxy_callback:
         params.append(('pgtUrl', self.proxy_callback))
     base_url = urllib_parse.urljoin(self.server_url, 'proxyValidate')
     url = base_url + '?' + urllib_parse.urlencode(params)
     page = urlopen(url)
     return page.read()
Esempio n. 31
0
    def __make_absolute__(self, path):
        """
        When `relative_paths` is True, fully qualify path with API Path.
        """
        if path:
            if SWAGGER_SETTINGS.get('relative_paths', False):
                path = urljoin(SWAGGER_SETTINGS.get('api_path'), path)

            return path
Esempio n. 32
0
def get_areas_from_postcode(original_postcode):
    postcode = re.sub(r'(?ms)\s*', '', original_postcode.lower())
    if re.search(r'[^a-z0-9]', postcode):
        raise BadPostcodeException(
            _('There were disallowed characters in "{0}"').format(original_postcode)
        )
    cache_key = 'mapit-postcode:' + postcode
    cached_result = cache.get(cache_key)
    if cached_result:
        return cached_result
    url = urljoin(settings.MAPIT_BASE_URL,
                  '/postcode/{0}'.format(urlquote(postcode)))
    r = requests.get(url)
    if r.status_code == 200:
        mapit_result = r.json()
        result = []
        for mapit_area in mapit_result['areas'].values():
            areas = Area.objects.filter(
                    extra__type__name=mapit_area['type'],
                    identifier=format_code_from_area(mapit_area)
            )

            if areas.exists():
                is_no_data_area = False
                for area in areas:
                    for child_area in area.children.all():
                        if child_area.identifier.startswith('NODATA:'):
                            is_no_data_area = True
                            break

                if is_no_data_area:
                    area_type = "NODATA"
                else:
                    area_type = mapit_area['type']

                result.append((
                    area_type,
                    format_code_from_area(mapit_area))
                )

        result = sorted(result, key=area_sort_key)

        cache.set(cache_key, result, settings.MAPIT_CACHE_SECONDS)
        return result
    elif r.status_code == 400:
        mapit_result = r.json()
        raise BadPostcodeException(mapit_result['error'])
    elif r.status_code == 404:
        raise BadPostcodeException(
            _('The postcode “{0}” couldn’t be found').format(original_postcode)
        )
    else:
        raise UnknownMapitException(
            _('Unknown MapIt error for postcode "{0}"').format(
                original_postcode
            )
        )
Esempio n. 33
0
def _logout_url(request, next_page=None):
    """Generates CAS logout URL"""

    url = urllib_parse.urljoin(settings.CAS_SERVER_URL, 'logout')
    if next_page:
        protocol = ('http://', 'https://')[request.is_secure()]
        host = request.get_host()
        url += '?' + urllib_parse.urlencode({'url': protocol + host + next_page})
    return url
Esempio n. 34
0
def _login_url(service):
    """Generates CAS login URL"""

    params = {'service': service}
    if settings.CAS_RENEW:
        params.update({'renew': 'true'})
    if settings.CAS_EXTRA_LOGIN_PARAMS:
        params.update(settings.CAS_EXTRA_LOGIN_PARAMS)
    return urllib_parse.urljoin(settings.CAS_SERVER_URL, 'login') + '?' + urllib_parse.urlencode(params)
Esempio n. 35
0
    def __make_absolute__(self, path):
        """
        When `relative_paths` is True, fully qualify path with API Path.
        """
        if path:
            if SWAGGER_SETTINGS.get('relative_paths', False):
                path = urljoin(SWAGGER_SETTINGS.get('api_path'), path)

            return path
Esempio n. 36
0
def check_address(address_string, country=None):
    tidied_address_before_country = address_string.strip()
    if country is None:
        tidied_address = tidied_address_before_country
    else:
        tidied_address = tidied_address_before_country + ', ' + country
    try:
        location_results = Geocoder.geocode(tidied_address)
    except GeocoderError:
        message = _("Failed to find a location for '{0}'")
        raise ValidationError(message.format(tidied_address_before_country))
    lat, lon = location_results[0].coordinates
    all_mapit_json = []
    queries_to_try = defaultdict(set)
    for election in Election.objects.current().prefetch_related('area_types'):
        area_types = election.area_types.values_list('name', flat=True)
        queries_to_try[election.area_generation].update(area_types)
    for area_generation, area_types in queries_to_try.items():
        mapit_lookup_url = urljoin(settings.MAPIT_BASE_URL,
                                   'point/4326/{lon},{lat}'.format(
                                       lon=lon,
                                       lat=lat,
                                       ))
        mapit_lookup_url += '?type=' + ','.join(area_types)
        mapit_lookup_url += '&generation={0}'.format(election.area_generation)
        mapit_result = requests.get(mapit_lookup_url)
        mapit_json = mapit_result.json()
        if 'error' in mapit_json:
            message = _("The area lookup returned an error: '{error}'")
            raise ValidationError(message.format(error=mapit_json['error']))
        all_mapit_json += mapit_json.items()
    sorted_mapit_results = sorted(
        all_mapit_json,
        key=lambda t: (t[1]['type'], int(t[0]))
    )
    if not sorted_mapit_results:
        message = _("The address '{0}' appears to be outside the area this site knows about")
        raise ValidationError(message.format(tidied_address_before_country))
    types_and_areas = [
        {
            'area_type_code': a[1]['type'],
            'area_id': a[0],
        }
        for a in sorted_mapit_results
    ]
    if settings.AREAS_TO_ALWAYS_RETURN:
        types_and_areas += settings.AREAS_TO_ALWAYS_RETURN
    types_and_areas_joined = ','.join(
        '{area_type_code}-{area_id}'.format(**ta) for ta in types_and_areas
    )
    area_slugs = [slugify(a[1]['name']) for a in sorted_mapit_results]
    ignored_slug = '-'.join(area_slugs)
    return {
        'type_and_area_ids': types_and_areas_joined,
        'ignored_slug': ignored_slug,
    }
Esempio n. 37
0
def _login_url(service):
    """Generates CAS login URL"""

    params = {'service': service}
    if settings.CAS_RENEW:
        params.update({'renew': 'true'})
    if settings.CAS_EXTRA_LOGIN_PARAMS:
        params.update(settings.CAS_EXTRA_LOGIN_PARAMS)
    return urllib_parse.urljoin(settings.CAS_SERVER_URL,
                                'login') + '?' + urllib_parse.urlencode(params)
Esempio n. 38
0
def _logout_url(request, next_page=None):
    """Generates CAS logout URL"""

    url = urllib_parse.urljoin(settings.CAS_SERVER_URL, 'logout')
    if next_page:
        protocol = ('http://', 'https://')[request.is_secure()]
        host = request.get_host()
        url += '?' + urllib_parse.urlencode(
            {'url': protocol + host + next_page})
    return url
Esempio n. 39
0
    def get_login_url(self):
        """Generates CAS login URL"""
        params = {'service': self.service_url}
        if self.renew:
            params.update({'renew': 'true'})

        params.update(self.extra_login_params)
        url = urllib_parse.urljoin(self.server_url, 'login')
        query = urllib_parse.urlencode(params)
        return url + '?' + query
Esempio n. 40
0
def get_post_elections_from_coords(coords):
    url = urljoin(EE_BASE_URL, "/api/elections/?coords={}".format(
        urlquote(coords)))

    cache_key = 'geolookup-coords:' + coords
    cached_result = cache.get(cache_key)
    if cached_result:
        return cached_result

    return get_post_elections(url, cache_key, BadCoordinatesException)
Esempio n. 41
0
    def __make_relative__(self, path, strip=False):
        """
        When `relative_paths` is True, make path relative to API Path.
        """
        if path:
            if SWAGGER_SETTINGS.get('relative_paths', False):
                res = UrlParser.__relative_path_matcher__.match(path)
                path = urljoin('/', res.groups()[0]) if res else path

            return path.strip('/') if strip else path
Esempio n. 42
0
def get_areas_from_coords(coords):
    url = urljoin(EE_BASE_URL,
                  "/api/elections/?coords={0}".format(urlquote(coords)))

    cache_key = 'mapit-postcode:' + coords
    cached_result = cache.get(cache_key)
    if cached_result:
        return cached_result

    return get_areas(url, cache_key, BadCoordinatesException)
Esempio n. 43
0
    def __make_relative__(self, path, strip=False):
        """
        When `relative_paths` is True, make path relative to API Path.
        """
        if path:
            if SWAGGER_SETTINGS.get('relative_paths', False):
                res = UrlParser.__relative_path_matcher__.match(path)
                path = urljoin('/', res.groups()[0]) if res else path

            return path.strip('/') if strip else path
Esempio n. 44
0
def _logout_url(request, next_page=None):
    """Generates CAS logout URL"""

    url = urllib_parse.urljoin(settings.CAS_SERVER_URL, "logout")
    if next_page:
        protocol = get_protocol(request)
        host = request.get_host()
        next_page_url = urllib_parse.urlunparse((protocol, host, next_page, "", "", ""))
        url += "?" + urllib_parse.urlencode({"url": next_page_url})
    return url
Esempio n. 45
0
    def get_login_url(self):
        """Generates CAS login URL"""
        params = {'service': self.service_url}
        if self.renew:
            params.update({'renew': 'true'})

        params.update(self.extra_login_params)
        url = urllib_parse.urljoin(self.server_url, 'login')
        query = urllib_parse.urlencode(params)
        return url + '?' + query
Esempio n. 46
0
def check_address(address_string, country=None):
    tidied_address_before_country = address_string.strip()
    if country is None:
        tidied_address = tidied_address_before_country
    else:
        tidied_address = tidied_address_before_country + ', ' + country
    try:
        location_results = Geocoder.geocode(tidied_address)
    except GeocoderError:
        message = _("Failed to find a location for '{0}'")
        raise ValidationError(message.format(tidied_address_before_country))
    lat, lon = location_results[0].coordinates
    all_mapit_json = []
    queries_to_try = defaultdict(set)
    for election in Election.objects.current().prefetch_related('area_types'):
        area_types = election.area_types.values_list('name', flat=True)
        queries_to_try[election.area_generation].update(area_types)
    for area_generation, area_types in queries_to_try.items():
        mapit_lookup_url = urljoin(
            settings.MAPIT_BASE_URL, 'point/4326/{lon},{lat}'.format(
                lon=lon,
                lat=lat,
            ))
        mapit_lookup_url += '?type=' + ','.join(area_types)
        mapit_lookup_url += '&generation={}'.format(election.area_generation)
        mapit_result = requests.get(mapit_lookup_url,
                                    headers={
                                        'User-Agent': 'scraper/sym',
                                    })
        mapit_json = mapit_result.json()
        if 'error' in mapit_json:
            message = _("The area lookup returned an error: '{error}'")
            raise ValidationError(message.format(error=mapit_json['error']))
        all_mapit_json += mapit_json.items()
    sorted_mapit_results = sorted(all_mapit_json,
                                  key=lambda t: (t[1]['type'], int(t[0])))
    if not sorted_mapit_results:
        message = _(
            "The address '{0}' appears to be outside the area this site knows about"
        )
        raise ValidationError(message.format(tidied_address_before_country))
    types_and_areas = [{
        'area_type_code': a[1]['type'],
        'area_id': a[0],
    } for a in sorted_mapit_results]
    if settings.AREAS_TO_ALWAYS_RETURN:
        types_and_areas += settings.AREAS_TO_ALWAYS_RETURN
    types_and_areas_joined = ','.join(
        '{area_type_code}--{area_id}'.format(**ta) for ta in types_and_areas)
    area_slugs = [slugify(a[1]['name']) for a in sorted_mapit_results]
    ignored_slug = '-'.join(area_slugs)
    return {
        'type_and_area_ids': types_and_areas_joined,
        'ignored_slug': ignored_slug,
    }
Esempio n. 47
0
 def get_verification_response(self, ticket):
     params = [('ticket', ticket), ('service', self.service_url)]
     if self.proxy_callback:
         params.append(('pgtUrl', self.proxy_callback))
     base_url = urllib_parse.urljoin(self.server_url, self.URL_SUFFIX)
     url = base_url + '?' + urllib_parse.urlencode(params)
     page = urlopen(url)
     try:
         return page.read()
     finally:
         page.close()
Esempio n. 48
0
def _logout_url(request, next_page=None):
    """Generates CAS logout URL"""

    url = urllib_parse.urljoin(settings.CAS_SERVER_URL, 'logout')
    if next_page:
        protocol = get_protocol(request)
        host = request.get_host()
        next_page_url = urllib_parse.urlunparse(
            (protocol, host, next_page, '', '', ''), )
        url += '?' + urllib_parse.urlencode({'url': next_page_url})
    return url
Esempio n. 49
0
def _login_url(service):
    """Generates CAS login URL"""

    params = {"service": service}
    if settings.CAS_RENEW:
        params.update({"renew": "true"})
    if settings.CAS_EXTRA_LOGIN_PARAMS:
        params.update(settings.CAS_EXTRA_LOGIN_PARAMS)
    url = urllib_parse.urljoin(settings.CAS_SERVER_URL, "login")
    query = urllib_parse.urlencode(params)
    return url + "?" + query
    def get(self, request, **kwargs):
        latitude = kwargs['latitude']
        longitude = kwargs['longitude']

        generation_with_types = defaultdict(list)
        for t in AreaType.objects.filter(election__current=True) \
                .values_list('election__area_generation', 'name'):
            generation_with_types[t[0]].append(t[1])

        mapit_base_url = urljoin(settings.MAPIT_BASE_URL,
                                 'point/4326/{lon},{lat}'.format(
                                     lon=longitude,
                                     lat=latitude,
                                 ))

        mapit_json = []
        for generation, types in generation_with_types.items():
            lookup_url = mapit_base_url + '?type=' \
                + ','.join(sorted(types))
            lookup_url += '&generation={0}'.format(generation)
            mapit_result = requests.get(lookup_url)
            mapit_result = mapit_result.json()
            if 'error' in mapit_result:
                message = _("The area lookup returned an error: '{0}'") \
                    .format(mapit_result['error'])
                return HttpResponse(
                    json.dumps({'error': message}),
                    content_type='application/json',
                )
            mapit_json += mapit_result.items()

        if len(mapit_json) == 0:
            message = _("Your location does not seem to be covered by this site")
            return HttpResponse(
                json.dumps({'error': message}),
                content_type='application/json',
            )

        ids_and_areas = [
            "{0}-{1}".format(
                area[1]['type'],
                mapit.format_code_from_area(area[1]))
            for area in mapit_json
        ]

        url = reverse('areas-view', kwargs={
            'type_and_area_ids': ','.join(sorted(ids_and_areas))
        })

        return HttpResponse(
            json.dumps({'url': url}),
            content_type='application/json',
        )
Esempio n. 51
0
def _logout_url(request, next_page=None):
    """Generates CAS logout URL"""

    url = urllib_parse.urljoin(settings.CAS_SERVER_URL, 'logout')
    if next_page:
        protocol = get_protocol(request)
        host = request.get_host()
        next_page_url = urllib_parse.urlunparse(
            (protocol, host, next_page, '', '', ''),
        )
        url += '?' + urllib_parse.urlencode({'url': next_page_url})
    return url
Esempio n. 52
0
def _verify_cas2_saml(ticket, service):
    """Verifies CAS 3.0+ XML-based authentication ticket and returns extended attributes.

    @date: 2011-11-30
    @author: Carlos Gonzalez Vila <*****@*****.**>

    Returns username and attributes on success and None,None on failure.
    """

    try:
        from xml.etree import ElementTree
    except ImportError:
        from elementtree import ElementTree

    # We do the SAML validation
    headers = {'soapaction': 'http://www.oasis-open.org/committees/security',
        'cache-control': 'no-cache',
        'pragma': 'no-cache',
        'accept': 'text/xml',
        'connection': 'keep-alive',
        'content-type': 'text/xml; charset=utf-8'}
    params = [('TARGET', service)]

    url = Request(urllib_parse.urljoin(settings.CAS_SERVER_URL, 'samlValidate') + '?' + urllib_parse.urlencode(params), '', headers)
    page = urlopen(url, data=get_saml_assertion(ticket))

    try:
        user = None
        attributes = {}
        response = page.read()
        print(response)
        tree = ElementTree.fromstring(response)
        # Find the authentication status
        success = tree.find('.//' + SAML_1_0_PROTOCOL_NS + 'StatusCode')
        if success is not None and success.attrib['Value'] == 'samlp:Success':
            # User is validated
            attrs = tree.findall('.//' + SAML_1_0_ASSERTION_NS + 'Attribute')
            for at in attrs:
                if 'uid' in list(at.attrib.values()):
                    user = at.find(SAML_1_0_ASSERTION_NS + 'AttributeValue').text
                    attributes['uid'] = user
                values = at.findall(SAML_1_0_ASSERTION_NS + 'AttributeValue')
                if len(values) > 1:
                    values_array = []
                    for v in values:
                        values_array.append(v.text)
                    attributes[at.attrib['AttributeName']] = values_array
                else:
                   attributes[at.attrib['AttributeName']] = values[0].text
        return user, attributes
    finally:
        page.close()
Esempio n. 53
0
 def verify_ticket(self, ticket):
     """Verifies CAS 1.0 authentication ticket."""
     params = [('ticket', ticket), ('service', self.service)]
     url = (urllib_parse.urljoin(self.server_url, 'validate') + '?' +
            urllib_parse.urlencode(params))
     page = urlopen(url)
     try:
         verified = page.readline().strip()
         if verified == 'yes':
             return page.readline().strip(), None, None
         else:
             return None, None, None
     finally:
         page.close()
    def handle(self, **options):

        mapit_url = settings.MAPIT_BASE_URL

        self.gb_parties, _ = PartySet.objects.get_or_create(
            slug='gb', defaults={'name': 'Great Britain'}
        )

        self.organizations = {}

        self.base_election_info = {
            'name': 'Police and Crime Commissioner Elections 2016',
            'for_post_role': 'Police and Crime Commissioner',
            'label_format': 'Police and Crime Commissioner for {area_name}',
            'area_generation': 1,
            'election_date': date(2016, 5, 5),
            'party_lists_in_use': False,
            'mapit_code': 'PDG',
            'electon_id_prefix': 'pcc',
            # 'area_type_description': 'Police Force',
        }

        url_path = '/areas/' + self.base_election_info['mapit_code']
        url = urljoin(mapit_url, url_path)
        r = requests.get(url)
        mapit_results = r.json().items()

        AREAS_WITHOUT_PCCS = [
            "metropolitan",
            "city-of-london",
            "northern-ireland",
        ]

        # First make all the organisations
        for mapit_area_id, mapit_area_data in mapit_results:
            if mapit_area_data['codes']['police_id'] in AREAS_WITHOUT_PCCS:
                continue
            self.add_police_force_orgs(mapit_area_id, mapit_area_data)

        # Create a single election
        self.create_pcc_election()
        # Add all the areas for that election
        for mapit_area_id, mapit_area_data in mapit_results:
            if mapit_area_data['codes']['police_id'] in AREAS_WITHOUT_PCCS:
                # The Met doesn't have a PCCge
                continue
            self.add_pcc_areas(mapit_area_id, mapit_area_data)
Esempio n. 55
0
def _verify_cas1(ticket, service):
    """Verifies CAS 1.0 authentication ticket.

    Returns username on success and None on failure.
    """

    params = [('ticket', ticket), ('service', service)]
    url = (urllib_parse.urljoin(settings.CAS_SERVER_URL, 'validate') + '?' +
           urllib_parse.urlencode(params))
    page = urlopen(url)
    try:
        verified = page.readline().strip()
        if verified == 'yes':
            return page.readline().strip(), None
        else:
            return None, None
    finally:
        page.close()
Esempio n. 56
0
def get_areas_from_postcode(original_postcode):
    postcode = re.sub(r'(?ms)\s*', '', original_postcode.lower())
    if re.search(r'[^a-z0-9]', postcode):
        raise BadPostcodeException(
            _('There were disallowed characters in "{0}"').format(original_postcode)
        )
    cache_key = 'mapit-postcode:' + postcode
    cached_result = cache.get(cache_key)
    if cached_result:
        return cached_result
    url = urljoin(settings.MAPIT_BASE_URL,
                  '/postcode/{0}'.format(urlquote(postcode)))
    r = requests.get(url)
    if r.status_code == 200:
        mapit_result = r.json()

        result = sorted(
            [
                (a['type'], format_code_from_area(a))
                for a in mapit_result['areas'].values()
                if Area.objects.filter(
                        extra__type__name=a['type'],
                        identifier=format_code_from_area(a)
                ).exists()
            ],
            key=area_sort_key
        )

        cache.set(cache_key, result, settings.MAPIT_CACHE_SECONDS)
        return result
    elif r.status_code == 400:
        mapit_result = r.json()
        raise BadPostcodeException(mapit_result['error'])
    elif r.status_code == 404:
        raise BadPostcodeException(
            _('The postcode “{0}” couldn’t be found').format(original_postcode)
        )
    else:
        raise UnknownMapitException(
            _('Unknown MapIt error for postcode "{0}"').format(
                original_postcode
            )
        )
Esempio n. 57
0
 def __getitem__(self, key):
     key = str(key)
     try:
         return super(MapitLookup, self).__getitem__(key)
     except KeyError:
         try:
             logging.info("Making extra request to Mapit for ID {0}".format(
                 key
             ))
             url = urljoin(self.mapit_base_url,
                           "area/{key}".format(key=key))
             req = requests.get(url)
             if req.status_code == 404 or \
                     req.json()[key].get('code', 200) == 404:
                 raise MapItAreaNotFoundException
             self.update({key: req.json()})
             return self[key]
         except MapItAreaNotFoundException:
             raise KeyError
         except Exception:
             raise
Esempio n. 58
0
    def fetch_saml_validation(self, ticket):
        # We do the SAML validation
        headers = {
            'soapaction': 'http://www.oasis-open.org/committees/security',
            'cache-control': 'no-cache',
            'pragma': 'no-cache',
            'accept': 'text/xml',
            'connection': 'keep-alive',
            'content-type': 'text/xml; charset=utf-8',
        }
        params = [('TARGET', self.service_url)]
        saml_validate_url = urllib_parse.urljoin(
            self.server_url, 'samlValidate',
        )
        url = Request(
            saml_validate_url + '?' + urllib_parse.urlencode(params),
            '',
            headers,
        )
        page = urlopen(url, data=self.get_saml_assertion(ticket))

        return page
def check_address(address_string, country=None):
    tidied_address_before_country = address_string.strip()
    if country is None:
        tidied_address = tidied_address_before_country
    else:
        tidied_address = tidied_address_before_country + ", " + country
    try:
        location_results = Geocoder.geocode(tidied_address)
    except GeocoderError:
        message = _("Failed to find a location for '{0}'")
        raise ValidationError(message.format(tidied_address_before_country))
    lat, lon = location_results[0].coordinates
    all_mapit_json = []
    queries_to_try = defaultdict(set)
    for election in Election.objects.current().prefetch_related("area_types"):
        area_types = election.area_types.values_list("name", flat=True)
        queries_to_try[election.area_generation].update(area_types)
    for area_generation, area_types in queries_to_try.items():
        mapit_lookup_url = urljoin(settings.MAPIT_BASE_URL, "point/4326/{lon},{lat}".format(lon=lon, lat=lat))
        mapit_lookup_url += "?type=" + ",".join(area_types)
        mapit_lookup_url += "&generation={0}".format(election.area_generation)
        mapit_result = requests.get(mapit_lookup_url, headers={"User-Agent": "scraper/sym"})
        mapit_json = mapit_result.json()
        if "error" in mapit_json:
            message = _("The area lookup returned an error: '{error}'")
            raise ValidationError(message.format(error=mapit_json["error"]))
        all_mapit_json += mapit_json.items()
    sorted_mapit_results = sorted(all_mapit_json, key=lambda t: (t[1]["type"], int(t[0])))
    if not sorted_mapit_results:
        message = _("The address '{0}' appears to be outside the area this site knows about")
        raise ValidationError(message.format(tidied_address_before_country))
    types_and_areas = [{"area_type_code": a[1]["type"], "area_id": a[0]} for a in sorted_mapit_results]
    if settings.AREAS_TO_ALWAYS_RETURN:
        types_and_areas += settings.AREAS_TO_ALWAYS_RETURN
    types_and_areas_joined = ",".join("{area_type_code}-{area_id}".format(**ta) for ta in types_and_areas)
    area_slugs = [slugify(a[1]["name"]) for a in sorted_mapit_results]
    ignored_slug = "-".join(area_slugs)
    return {"type_and_area_ids": types_and_areas_joined, "ignored_slug": ignored_slug}
Esempio n. 60
0
def _verify_cas2(ticket, service):
    """Verifies CAS 2.0+ XML-based authentication ticket.

    Returns username on success and None on failure.
    """
    try:
        from xml.etree import ElementTree
    except ImportError:
        from elementtree import ElementTree

    params = [('ticket', ticket), ('service', service)]
    url = (urllib_parse.urljoin(settings.CAS_SERVER_URL, 'serviceValidate') + '?' +
           urllib_parse.urlencode(params))
    page = urlopen(url)
    try:
        response = page.read()
        tree = ElementTree.fromstring(response)
        if tree[0].tag.endswith('authenticationSuccess'):
            return tree[0][0].text, None
        else:
            return None, None
    finally:
        page.close()