class BaseGenericApiImporter(BaseStationsDistrictsImporter): srid = 4326 districts_srid = 4326 districts_name = None districts_url = None stations_name = None stations_url = None local_files = False def import_data(self): self.districts = DistrictSet() self.stations = StationSet() # deal with 'stations only' or 'districts only' data if self.districts_url is not None: self.import_polling_districts() if self.stations_url is not None: self.import_polling_stations() self.districts.save() self.stations.save() def get_districts(self): with tempfile.NamedTemporaryFile() as tmp: req = urllib.request.urlretrieve(self.districts_url, tmp.name) return self.get_data(self.districts_filetype, tmp.name) def get_stations(self): with tempfile.NamedTemporaryFile() as tmp: req = urllib.request.urlretrieve(self.stations_url, tmp.name) return self.get_data(self.stations_filetype, tmp.name)
class BaseStationsDistrictsImporter(BaseStationsImporter, BaseDistrictsImporter): def import_data(self): self.stations = StationSet() self.districts = DistrictSet() self.import_polling_districts() self.import_polling_stations() self.districts.save() self.stations.save()
def import_data(self): # Optional step for pre import tasks try: self.pre_import() except NotImplementedError: pass self.stations = StationSet() self.districts = DistrictSet() self.import_polling_districts() self.import_polling_stations() self.districts.save() self.stations.save()
def import_data(self): self.stations = StationSet() self.districts = DistrictSet() self.import_polling_districts() self.import_polling_stations() self.districts.save() self.stations.save()
def import_data(self): # Optional step for pre import tasks try: self.pre_import() except NotImplementedError: pass self.districts = DistrictSet() self.stations = StationSet() # deal with 'stations only' or 'districts only' data if self.districts_url is not None: self.import_polling_districts() if self.stations_url is not None: self.import_polling_stations() self.districts.save() self.stations.save()
class BaseStationsDistrictsImporter(BaseStationsImporter, BaseDistrictsImporter): def pre_import(self): raise NotImplementedError def import_data(self): # Optional step for pre import tasks try: self.pre_import() except NotImplementedError: pass self.stations = StationSet() self.districts = DistrictSet() self.import_polling_districts() self.import_polling_stations() self.districts.save() self.stations.save()
def import_data(self): # override import_data so we can populate # self.split_districts as a pre-process self.find_split_districts() self.stations = StationSet() self.districts = DistrictSet() self.import_polling_districts() self.import_polling_stations() self.districts.save() self.stations.save()
class BaseGenericApiImporter(BaseStationsDistrictsImporter): srid = 4326 districts_srid = 4326 districts_name = None districts_url = None stations_name = None stations_url = None local_files = False def import_data(self): # Optional step for pre import tasks try: self.pre_import() except NotImplementedError: pass self.districts = DistrictSet() self.stations = StationSet() # deal with 'stations only' or 'districts only' data if self.districts_url is not None: self.import_polling_districts() if self.stations_url is not None: self.import_polling_stations() self.districts.save() self.stations.save() def get_districts(self): with tempfile.NamedTemporaryFile() as tmp: urllib.request.urlretrieve(self.districts_url, tmp.name) return self.get_data(self.districts_filetype, tmp.name) def get_stations(self): with tempfile.NamedTemporaryFile() as tmp: urllib.request.urlretrieve(self.stations_url, tmp.name) return self.get_data(self.stations_filetype, tmp.name)
def import_data(self): self.districts = DistrictSet() self.stations = StationSet() # deal with 'stations only' or 'districts only' data if self.districts_url is not None: self.import_polling_districts() if self.stations_url is not None: self.import_polling_stations() self.districts.save() self.stations.save()
class Command(BaseMorphApiImporter): srid = 4326 districts_srid = 4326 council_id = 'E07000228' elections = ['local.west-sussex.2017-05-04'] scraper_name = 'wdiv-scrapers/DC-PollingStations-Mid-Sussex' geom_type = 'geojson' split_districts = set() def get_station_hash(self, record): # handle exact dupes on code/address return "-".join([ record['msercode'], record['uprn'] ]) def find_split_districts(self): # identify any district codes which appear more than once # with 2 different polling station addresses. # We do not want to import these. stations = self.get_stations() for station1 in stations: for station2 in stations: if (station2['msercode'] == station1['msercode'] and\ station2['uprn'] != station1['uprn']): self.split_districts.add(station1['msercode']) def district_record_to_dict(self, record): poly = self.extract_geometry(record, self.geom_type, self.get_srid('districts')) return { 'internal_council_id': record['msercode'], 'name' : record['boundname'], 'area' : poly, 'polling_station_id' : record['msercode'], } def station_record_to_dict(self, record): # handle split districts if record['msercode'] in self.split_districts: return None location = self.extract_geometry(record, self.geom_type, self.get_srid('stations')) return { 'internal_council_id': record['msercode'], 'postcode': '', 'address': record['address'], 'location': location, } def import_data(self): # override import_data so we can populate # self.split_districts as a pre-process self.find_split_districts() self.stations = StationSet() self.districts = DistrictSet() self.import_polling_districts() self.import_polling_stations() self.districts.save() self.stations.save()