def scrape_arcgis():
    stations_url = "https://maps.bristol.gov.uk/arcgis/rest/services/ext/localinfo/MapServer/%s/query?where=OBJECTID+LIKE+%%27%%25%%27&text=&objectIds=&time=&geometry=&geometryType=esriGeometryEnvelope&inSR=&spatialRel=esriSpatialRelIntersects&relationParam=&outFields=*&returnGeometry=true&maxAllowableOffset=&geometryPrecision=&outSR=4326&returnIdsOnly=false&returnCountOnly=false&orderByFields=OBJECTID&groupByFieldsForStatistics=&outStatistics=&returnZ=false&returnM=false&gdbVersion=&returnDistinctValues=false&f=pjson"
    districts_url = "https://maps.bristol.gov.uk/arcgis/rest/services/ext/localinfo/MapServer/%s/query?where=OBJECTID+LIKE+%%27%%25%%27&text=&objectIds=&time=&geometry=&geometryType=esriGeometryEnvelope&inSR=&spatialRel=esriSpatialRelIntersects&relationParam=&outFields=*&returnGeometry=true&maxAllowableOffset=&geometryPrecision=&outSR=4326&returnIdsOnly=false&returnCountOnly=false&orderByFields=OBJECTID&groupByFieldsForStatistics=&outStatistics=&returnZ=false&returnM=false&gdbVersion=&returnDistinctValues=false&f=pjson"
    index_url = "https://maps.bristol.gov.uk/arcgis/rest/services/ext/localinfo/MapServer/?f=pjson"

    """
    Bristol's polling station and district data is not published
    with a consistent layer id: it keeps changing
    Read the index to identify the correct layers to request.
    """
    index_str = get_data_from_url(index_url)
    index_data = json.loads(index_str.decode('utf-8'))
    for layer in index_data['layers']:
        if 'polling station' in layer['name'].lower().strip():
            stations_url = stations_url % (layer['id'])
        if 'polling district' in layer['name'].lower().strip():
            districts_url = districts_url % (layer['id'])

    # If we couldn't find appropriate layers, give up
    if '%s' in stations_url:
        raise ValueError('Failed to find Polling Stations layer')
    if '%s' in districts_url:
        raise ValueError('Failed to find Polling Districts layer')

    print(stations_url)
    stations_scraper = ArcGisScraper(stations_url, council_id, 'utf-8', 'stations_arcgis')
    stations_scraper.scrape()

    print(districts_url)
    districts_scraper = ArcGisScraper(districts_url, council_id, 'utf-8', 'districts_arcgis')
    districts_scraper.scrape()
    def get_data(self):

        """
        Older versions of ArcGIS do not support orderByFields
        Sort the data before so that we can detect when the
        actual data has changed and not just the order.
        """

        data_str = get_data_from_url(self.url)

        data = json.loads(
            data_str.decode(self.encoding),
            object_pairs_hook=OrderedDict)

        data['features'] = sorted(data['features'], key=lambda k: k['attributes'][self.key])
        data_str = json.dumps(data).encode(self.encoding)

        return (data_str, data)
    def get_data(self):

        """
        Some WFS servers produce output with id fields that seem to
        be randomly generated. Strip the ids out so that we can
        detect when the actual data has changed and not just the ids.
        """

        data_str = get_data_from_url(self.url)
        data_str = rewind(data_str.decode(self.encoding))

        data = json.loads(data_str, object_pairs_hook=OrderedDict)

        for i in range(0, len(data['features'])):
            data['features'][i]['id'] = i
        data_str = json.dumps(data).encode(self.encoding)

        return (data_str, data)
    def get_data(self):

        """
        Sometimes we find an ArcGIS server where the ID field is
        for some reason unstable. Strip the ID out so we can detect
        when the actual data has changed and not just the ids.
        """

        data_str = get_data_from_url(self.url)

        data = json.loads(
            data_str.decode(self.encoding),
            object_pairs_hook=OrderedDict)

        for i in range(0, len(data['features'])):
            data['features'][i]['attributes'][self.key] = i

        data_str = json.dumps(data).encode(self.encoding)

        return (data_str, data)
 def get_data(self):  # pragma: no cover
     data_str = get_data_from_url(self.url)
     data = json.loads(data_str.decode(self.encoding))
     return (data_str, data)
 def get_data(self):  # pragma: no cover
     return get_data_from_url(self.url)