def __init__(self, label=None, validators=None, **kwargs): default = kwargs.pop('default', lambda: SpatialCoverage()) super(SpatialCoverageField, self).__init__(SpatialCoverageForm, label, validators, default=default, **kwargs)
def test_initial_values(self): Fake, FakeForm = self.factory() zones = [GeoZoneFactory() for _ in range(3)] fake = Fake(spatial=SpatialCoverage(zones=zones)) form = FakeForm(None, fake) self.assertEqual(form.spatial.zones._value(), ','.join([z.id for z in zones]))
def test_initial_values(self): Fake, FakeForm = self.factory() territories = [TerritoryFactory() for _ in range(3)] fake = Fake(spatial=SpatialCoverage( territories=[t.reference() for t in territories], granularity=SPATIAL_GRANULARITIES.keys()[1])) form = FakeForm(None, fake) self.assertEqual(form.spatial.territories._value(), ','.join([str(t.id) for t in territories]))
def test_geolabel_priority(self): register_level('country', 'fake', 'Fake level') coverage = SpatialCoverage(territories=[ TerritoryReference(name='France', level='country', code='fr'), TerritoryReference(name='Fake', level='fake', code='fake'), TerritoryReference( name='Union Européenne', level='country-group', code='ue'), ]) self.assertEqual(coverage.top_label, 'Union Européenne')
def test_geolabel_priority(self): GeoLevelFactory(id='top') GeoLevelFactory(id='middle', parents=['top']) GeoLevelFactory(id='bottom', parents=['middle']) big = GeoZoneFactory(level='top') medium = GeoZoneFactory(level='middle', parents=[big.id]) small = GeoZoneFactory(level='bottom', parents=[big.id, medium.id]) coverage = SpatialCoverage(zones=[small, medium, big]) self.assertEqual(coverage.top_label, big.name)
def test_with_initial(self): Fake, FakeForm = self.factory() zones = [GeoZoneFactory() for _ in range(3)] fake = Fake(spatial=SpatialCoverage( zones=zones, granularity=random_spatial_granularity())) zone = GeoZoneFactory() data = MultiDict({ 'spatial-zones': zone.id, 'spatial-granularity': random_spatial_granularity() }) form = FakeForm(data, fake) form.validate() self.assertEqual(form.errors, {}) form.populate_obj(fake) self.assertEqual(len(fake.spatial.zones), 1) self.assertEqual(fake.spatial.zones[0], zone)
def test_with_initial(self): Fake, FakeForm = self.factory() territories = [TerritoryFactory() for _ in range(3)] fake = Fake(spatial=SpatialCoverage( territories=[t.reference() for t in territories], granularity=SPATIAL_GRANULARITIES.keys()[1])) territory = TerritoryFactory() data = MultiDict({ 'spatial-territories': str(territory.id), 'spatial-granularity': VALID_GRANULARITY }) form = FakeForm(data, fake) form.validate() self.assertEqual(form.errors, {}) form.populate_obj(fake) self.assertEqual(len(fake.spatial.territories), 1) self.assertEqual(fake.spatial.territories[0], territory.reference()) self.assertTrue(shape(fake.spatial.geom).equals(shape(territory.geom)))
def populate_obj(self, obj, name): self._obj = self._obj or SpatialCoverage() super(SpatialCoverageField, self).populate_obj(obj, name)
def migrate_zones_ids(): """Migrate zones from old to new ids in datasets. Should only be run once with the new version of geozones w/ geohisto. """ counter_datasets = 0 counter_zones = 0 counter_towns = 0 counter_counties = 0 counter_regions = 0 counter_drom = 0 counter_dromcom = 0 drom_zone = GeoZone.objects(id='country-subset:fr:drom').first() dromcom_zone = GeoZone.objects(id='country-subset:fr:dromcom').first() for dataset in Dataset.objects.all(): if dataset.spatial and dataset.spatial.zones: counter_datasets += 1 new_zones = [] for zone in dataset.spatial.zones: if zone.id.startswith('fr/'): counter_zones += 1 country, kind, zone_id = zone.id.split('/') zone_id = zone_id.upper() # Corsica 2a/b case. if kind == 'town': counter_towns += 1 new_zones.append( GeoZone.objects(code=zone_id, level='fr:commune').valid_at( date.today()).first()) elif kind == 'county': counter_counties += 1 new_zones.append( GeoZone.objects(code=zone_id, level='fr:departement').valid_at( date.today()).first()) elif kind == 'region': counter_regions += 1 # Only link to pre-2016 regions which kept the same id. new_zones.append( GeoZone.objects(code=zone_id, level='fr:region').first()) else: new_zones.append(zone) elif zone.id.startswith('country-subset/fr'): counter_zones += 1 subset, country, kind = zone.id.split('/') if kind == 'dom': counter_drom += 1 new_zones.append(drom_zone) elif kind == 'domtom': counter_dromcom += 1 new_zones.append(dromcom_zone) else: new_zones.append(zone) dataset.update(spatial=SpatialCoverage( zones=[z.id for z in new_zones if z])) print('{} datasets and {} zones affected.'.format(counter_datasets, counter_zones)) print('{} town zones, {} county zones and {} region zones updated.'.format( counter_towns, counter_counties, counter_regions)) print('{} DROM zones, {} DROM-COM zones updated.'.format( counter_drom, counter_dromcom)) log.info('Done')
def process(self, item): response = self.get(item.remote_id) encoding = chardet.detect(response.content)['encoding'] xml = self.parse_xml(response.content.decode(encoding)) metadata = xml['metadata'] # Resolve and remote id from metadata item.remote_id = metadata['id'] dataset = self.get_dataset(metadata['id']) dataset.title = metadata['title'] dataset.frequency = FREQUENCIES.get(metadata['frequency'], 'unknown') dataset.description = metadata['notes'] dataset.private = metadata['private'] dataset.tags = sorted(set(metadata['tags'])) if metadata.get('license_id'): dataset.license = License.objects.get(id=metadata['license_id']) if (metadata.get('temporal_coverage_from') and metadata.get('temporal_coverage_to')): dataset.temporal_coverage = db.DateRange( start=metadata['temporal_coverage_from'], end=metadata['temporal_coverage_to']) if (metadata.get('territorial_coverage_code') or metadata.get('territorial_coverage_granularity')): dataset.spatial = SpatialCoverage() if metadata.get('territorial_coverage_granularity'): dataset.spatial.granularity = GRANULARITIES.get( metadata['territorial_coverage_granularity']) if metadata.get('territorial_coverage_code'): dataset.spatial.zones = [ ZONES[metadata['territorial_coverage_code']] ] dataset.resources = [] cle = get_by(metadata['resources'], 'format', 'cle') for row in metadata['resources']: if row['format'] == 'cle': continue else: resource = Resource(title=row['name'], description=(row['description'] + '\n\n' + SSL_COMMENT).strip(), filetype='remote', url=row['url'], format=row['format']) if resource.format == 'csv' and cle: resource.checksum = Checksum(type='sha256', value=self.get( cle['url']).text) if row.get('last_modified'): resource.modified = row['last_modified'] dataset.resources.append(resource) if metadata.get('author'): dataset.extras['author'] = metadata['author'] if metadata.get('author_email'): dataset.extras['author_email'] = metadata['author_email'] if metadata.get('maintainer'): dataset.extras['maintainer'] = metadata['maintainer'] if metadata.get('maintainer_email'): dataset.extras['maintainer_email'] = metadata['maintainer_email'] for extra in metadata['extras']: dataset.extras[extra['key']] = extra['value'] return dataset
def process(self, item): response = self.get_action('package_show', id=item.remote_id) data = self.validate(response['result'], self.schema) if type(data) == list: data = data[0] # Fix the remote_id: use real ID instead of not stable name item.remote_id = data['id'] # Skip if no resource if not len(data.get('resources', [])): msg = 'Dataset {0} has no record'.format(item.remote_id) raise HarvestSkipException(msg) dataset = self.get_dataset(item.remote_id) # Core attributes if not dataset.slug: dataset.slug = data['name'] dataset.title = data['title'] dataset.description = parse_html(data['notes']) # Detect license default_license = dataset.license or License.default() dataset.license = License.guess(data['license_id'], data['license_title'], default=default_license) dataset.tags = [t['name'] for t in data['tags'] if t['name']] dataset.created_at = data['metadata_created'] dataset.last_modified = data['metadata_modified'] dataset.extras['ckan:name'] = data['name'] temporal_start, temporal_end = None, None spatial_geom, spatial_zone = None, None for extra in data['extras']: key = extra['key'] value = extra['value'] if value is None or (isinstance(value, str) and not value.strip()): # Skip empty extras continue elif key == 'spatial': # GeoJSON representation (Polygon or Point) spatial_geom = json.loads(value) elif key == 'spatial-text': # Textual representation of the extent / location qs = GeoZone.objects(db.Q(name=value) | db.Q(slug=value)) qs = qs.valid_at(datetime.now()) if qs.count() == 1: spatial_zone = qs.first() else: dataset.extras['ckan:spatial-text'] = value log.debug('spatial-text value not handled: %s', value) elif key == 'spatial-uri': # Linked Data URI representing the place name dataset.extras['ckan:spatial-uri'] = value log.debug('spatial-uri value not handled: %s', value) elif key == 'frequency': # Update frequency freq = frequency_from_rdf(value) if freq: dataset.frequency = freq elif value in UPDATE_FREQUENCIES: dataset.frequency = value else: dataset.extras['ckan:frequency'] = value log.debug('frequency value not handled: %s', value) # Temporal coverage start elif key == 'temporal_start': temporal_start = daterange_start(value) # Temporal coverage end elif key == 'temporal_end': temporal_end = daterange_end(value) else: dataset.extras[extra['key']] = value if spatial_geom or spatial_zone: dataset.spatial = SpatialCoverage() if spatial_zone: dataset.spatial.zones = [spatial_zone] if spatial_geom: if spatial_geom['type'] == 'Polygon': coordinates = [spatial_geom['coordinates']] elif spatial_geom['type'] == 'MultiPolygon': coordinates = spatial_geom['coordinates'] else: raise HarvestException('Unsupported spatial geometry') dataset.spatial.geom = { 'type': 'MultiPolygon', 'coordinates': coordinates } if temporal_start and temporal_end: dataset.temporal_coverage = db.DateRange( start=temporal_start, end=temporal_end, ) # Remote URL dataset.extras['remote_url'] = self.dataset_url(data['name']) if data.get('url'): try: url = uris.validate(data['url']) except uris.ValidationError: dataset.extras['ckan:source'] = data['url'] else: # use declared `url` as `remote_url` if any dataset.extras['remote_url'] = url # Resources for res in data['resources']: if res['resource_type'] not in ALLOWED_RESOURCE_TYPES: continue try: resource = get_by(dataset.resources, 'id', UUID(res['id'])) except Exception: log.error('Unable to parse resource ID %s', res['id']) continue if not resource: resource = Resource(id=res['id']) dataset.resources.append(resource) resource.title = res.get('name', '') or '' resource.description = parse_html(res.get('description')) resource.url = res['url'] resource.filetype = 'remote' resource.format = res.get('format') resource.mime = res.get('mimetype') resource.hash = res.get('hash') resource.created = res['created'] resource.modified = res['last_modified'] resource.published = resource.published or resource.created return dataset
def test_top_label_empty(self): coverage = SpatialCoverage() self.assertIsNone(coverage.top_label)
def process(self, item): response = self.get_action('package_show', id=item.remote_id) data = self.validate(response['result'], self.schema) if type(data) == list: data = data[0] # Fix the remote_id: use real ID instead of not stable name item.remote_id = data['id'] # Skip if no resource if not len(data.get('resources', [])): msg = 'Dataset {0} has no record'.format(item.remote_id) raise HarvestSkipException(msg) dataset = self.get_dataset(item.remote_id) # Core attributes if not dataset.slug: dataset.slug = data['name'] dataset.title = data['title'] dataset.description = parse_html(data['notes']) # Detect Org organization_acronym = data['organization']['name'] orgObj = Organization.objects(acronym=organization_acronym).first() if orgObj: #print 'Found %s' % orgObj.acronym dataset.organization = orgObj else: orgObj = Organization() orgObj.acronym = organization_acronym orgObj.name = data['organization']['title'] orgObj.description = data['organization']['description'] orgObj.save() #print 'Created %s' % orgObj.acronym dataset.organization = orgObj # Detect license default_license = self.harvest_config.get('license', License.default()) dataset.license = License.guess(data['license_id'], data['license_title'], default=default_license) dataset.tags = [t['name'] for t in data['tags'] if t['name']] dataset.tags.append(urlparse(self.source.url).hostname) dataset.created_at = data['metadata_created'] dataset.last_modified = data['metadata_modified'] dataset.frequency = 'unknown' dataset.extras['ckan:name'] = data['name'] temporal_start, temporal_end = None, None spatial_geom = None for extra in data['extras']: # GeoJSON representation (Polygon or Point) if extra['key'] == 'spatial': spatial_geom = json.loads(extra['value']) # Textual representation of the extent / location elif extra['key'] == 'spatial-text': log.debug('spatial-text value not handled') # Linked Data URI representing the place name elif extra['key'] == 'spatial-uri': log.debug('spatial-uri value not handled') # Update frequency elif extra['key'] == 'frequency': print 'frequency', extra['value'] # Temporal coverage start elif extra['key'] == 'temporal_start': temporal_start = daterange_start(extra['value']) continue # Temporal coverage end elif extra['key'] == 'temporal_end': temporal_end = daterange_end(extra['value']) continue dataset.extras[extra['key']] = extra['value'] # We don't want spatial to be added on harvester if self.harvest_config.get('geozones', False): dataset.spatial = SpatialCoverage() dataset.spatial.zones = [] for zone in self.harvest_config.get('geozones'): geo_zone = GeoZone.objects.get(id=zone) dataset.spatial.zones.append(geo_zone) # # if spatial_geom: # dataset.spatial = SpatialCoverage() # if spatial_geom['type'] == 'Polygon': # coordinates = [spatial_geom['coordinates']] # elif spatial_geom['type'] == 'MultiPolygon': # coordinates = spatial_geom['coordinates'] # else: # HarvestException('Unsupported spatial geometry') # dataset.spatial.geom = { # 'type': 'MultiPolygon', # 'coordinates': coordinates # } if temporal_start and temporal_end: dataset.temporal_coverage = db.DateRange( start=temporal_start, end=temporal_end, ) # Remote URL if data.get('url'): try: url = uris.validate(data['url']) except uris.ValidationError: dataset.extras['remote_url'] = self.dataset_url(data['name']) dataset.extras['ckan:source'] = data['url'] else: dataset.extras['remote_url'] = url dataset.extras['harvest:name'] = self.source.name current_resources = [ str(resource.id) for resource in dataset.resources ] fetched_resources = [] # Resources for res in data['resources']: if res['resource_type'] not in ALLOWED_RESOURCE_TYPES: continue #Ignore invalid Resources try: url = uris.validate(res['url']) except uris.ValidationError: continue try: resource = get_by(dataset.resources, 'id', UUID(res['id'])) except Exception: log.error('Unable to parse resource ID %s', res['id']) continue fetched_resources.append(str(res['id'])) if not resource: resource = Resource(id=res['id']) dataset.resources.append(resource) resource.title = res.get('name', '') or '' resource.description = parse_html(res.get('description')) resource.url = res['url'] resource.filetype = 'remote' resource.format = res.get('format') resource.mime = res.get('mimetype') resource.hash = res.get('hash') resource.created = res['created'] resource.modified = res['last_modified'] resource.published = resource.published or resource.created # Clean up old resources removed from source for resource_id in current_resources: if resource_id not in fetched_resources: try: resource = get_by(dataset.resources, 'id', UUID(resource_id)) except Exception: log.error('Unable to parse resource ID %s', resource_id) continue else: if resource and not self.dryrun: dataset.resources.remove(resource) return dataset
def migrate_zones_ids(): """Migrate zones from old to new ids in datasets. Should only be run once with the new version of geozones w/ geohisto. """ counter = Counter() drom_zone = GeoZone.objects(id='country-subset:fr:drom').first() dromcom_zone = GeoZone.objects(id='country-subset:fr:dromcom').first() # Iter over datasets with zones for dataset in Dataset.objects(spatial__zones__gt=[]): counter['datasets'] += 1 new_zones = [] for zone in dataset.spatial.zones: if zone.id.startswith('fr/'): counter['zones'] += 1 country, kind, zone_id = zone.id.split('/') zone_id = zone_id.upper() # Corsica 2a/b case. if kind == 'town': counter['towns'] += 1 new_zones.append( GeoZone.objects(code=zone_id, level='fr:commune').valid_at( date.today()).first()) elif kind == 'county': counter['counties'] += 1 new_zones.append( GeoZone.objects(code=zone_id, level='fr:departement').valid_at( date.today()).first()) elif kind == 'region': counter['regions'] += 1 # Only link to pre-2016 regions which kept the same id. new_zones.append( GeoZone.objects(code=zone_id, level='fr:region').first()) elif kind == 'epci': counter['epcis'] += 1 new_zones.append( GeoZone.objects(code=zone_id, level='fr:epci').valid_at( dataset.created_at.date()).first()) else: new_zones.append(zone) elif zone.id.startswith('country-subset/fr'): counter['zones'] += 1 subset, country, kind = zone.id.split('/') if kind == 'dom': counter['drom'] += 1 new_zones.append(drom_zone) elif kind == 'domtom': counter['dromcom'] += 1 new_zones.append(dromcom_zone) elif zone.id.startswith('country/'): counter['zones'] += 1 counter['countries'] += 1 new_zones.append(zone.id.replace('/', ':')) elif zone.id.startswith('country-group/'): counter['zones'] += 1 counter['countrygroups'] += 1 new_zones.append(zone.id.replace('/', ':')) else: new_zones.append(zone) dataset.update(spatial=SpatialCoverage( granularity=dataset.spatial.granularity, zones=[getattr(z, 'id', z) for z in new_zones if z])) log.info(Formatter().vformat( '''Summary Processed {zones} zones in {datasets} datasets: - {countrygroups} country groups (World/UE) - {countries} countries - France: - {regions} regions - {counties} counties - {epcis} EPCIs - {towns} towns - {drom} DROM - {dromcom} DROM-COM ''', (), counter)) log.info('Done')
def test_top_label_single(self): zone = GeoZoneFactory(name='name', level='level', code='code') coverage = SpatialCoverage(zones=[zone]) self.assertEqual(coverage.top_label, 'name')
def process(self, item): response = self.get_action('package_show', id=item.remote_id) data = self.validate(response['result'], schema) # Fix the remote_id: use real ID instead of not stable name item.remote_id = data['id'] # Skip if no resource if not len(data.get('resources', [])): msg = 'Dataset {0} has no record'.format(item.remote_id) raise HarvestSkipException(msg) dataset = self.get_dataset(item.remote_id) # Core attributes if not dataset.slug: dataset.slug = data['name'] dataset.title = data['title'] dataset.description = data['notes'] dataset.license = License.objects(id=data['license_id']).first() # dataset.license = license or License.objects.get(id='notspecified') dataset.tags = [t['name'] for t in data['tags'] if t['name']] dataset.created_at = data['metadata_created'] dataset.last_modified = data['metadata_modified'] dataset.extras['ckan:name'] = data['name'] temporal_start, temporal_end = None, None spatial_geom = None for extra in data['extras']: # GeoJSON representation (Polygon or Point) if extra['key'] == 'spatial': spatial_geom = json.loads(extra['value']) # Textual representation of the extent / location elif extra['key'] == 'spatial-text': log.debug('spatial-text value not handled') print 'spatial-text', extra['value'] # Linked Data URI representing the place name elif extra['key'] == 'spatial-uri': log.debug('spatial-uri value not handled') print 'spatial-uri', extra['value'] # Update frequency elif extra['key'] == 'frequency': print 'frequency', extra['value'] # Temporal coverage start elif extra['key'] == 'temporal_start': print 'temporal_start', extra['value'] temporal_start = daterange_start(extra['value']) continue # Temporal coverage end elif extra['key'] == 'temporal_end': print 'temporal_end', extra['value'] temporal_end = daterange_end(extra['value']) continue # else: # print extra['key'], extra['value'] dataset.extras[extra['key']] = extra['value'] if spatial_geom: dataset.spatial = SpatialCoverage() if spatial_geom['type'] == 'Polygon': coordinates = [spatial_geom['coordinates']] elif spatial_geom['type'] == 'MultiPolygon': coordinates = spatial_geom['coordinates'] else: HarvestException('Unsupported spatial geometry') dataset.spatial.geom = { 'type': 'MultiPolygon', 'coordinates': coordinates } if temporal_start and temporal_end: dataset.temporal_coverage = db.DateRange( start=temporal_start, end=temporal_end, ) # Remote URL if data.get('url'): dataset.extras['remote_url'] = data['url'] # Resources for res in data['resources']: if res['resource_type'] not in ALLOWED_RESOURCE_TYPES: continue try: resource = get_by(dataset.resources, 'id', UUID(res['id'])) except: log.error('Unable to parse resource ID %s', res['id']) continue if not resource: resource = Resource(id=res['id']) dataset.resources.append(resource) resource.title = res.get('name', '') or '' resource.description = res.get('description') resource.url = res['url'] resource.filetype = ('api' if res['resource_type'] == 'api' else 'remote') resource.format = res.get('format') resource.mime = res.get('mimetype') resource.hash = res.get('hash') resource.created = res['created'] resource.modified = res['last_modified'] resource.published = resource.published or resource.created return dataset
def test_top_label_single(self): territory = TerritoryReference(name='name', level='level', code='code') coverage = SpatialCoverage(territories=[territory]) self.assertEqual(coverage.top_label, 'name')