def setup(self): # Create fixtures CreateTestData.create_arbitrary([ {'name': 'serviceA', 'extras': {'coupled-resource': json.dumps( [{'href': [ref_prefix+'Bref']}, {'href': [ref_prefix+'Href']}, {'href': [ref_prefix+'Eref']}]), 'resource-type': 'service'}}, {'name': 'serviceF', 'extras': {'coupled-resource': json.dumps( [{'href': [ref_prefix+'Dref']}]), 'resource-type': 'service'}}, {'name': 'serviceG', 'extras': {'coupled-resource': json.dumps( [{'href': [ref_prefix+'Gref']}]), 'resource-type': 'service'}}, {'name': 'datasetB', 'extras': {'resource-type': 'dataset'}}, {'name': 'datasetC', 'extras': {'resource-type': 'dataset'}}, {'name': 'datasetD', 'extras': {'resource-type': 'dataset'}}, {'name': 'datasetE', 'extras': {'resource-type': 'dataset'}}, {'name': 'datasetG', 'extras': {'resource-type': 'dataset'}}, {'name': 'datasetH', 'extras': {'resource-type': 'dataset'}}, {'name': 'serviceD', 'extras': {'coupled-resource': json.dumps( [{'href': [ref_prefix+'Dref']}]), 'resource-type': 'service'}}, ]) self._create_user() self._create_publisher() self.source, self.job = self._create_source_and_job() self._create_harvest_object('datasetB', ref='Bref') self._create_harvest_object('datasetC', ref='Cref') self._create_harvest_object('datasetD', ref='Dref') self._create_harvest_object('datasetE', ref='Eref') # Create a partially-filled coupling table self._create_coupled_resource('serviceA', 'Bref', 'datasetB') self._create_coupled_resource('serviceA', 'Cref', 'datasetC') self._create_coupled_resource(None, 'Dref', 'datasetD') self._create_coupled_resource('serviceA', 'Eref', None) self._create_coupled_resource('serviceF', 'Dref', 'datasetD') model.Session.commit() model.Session.remove() self.couples_before = self._get_coupled_resources() pprint(self.couples_before) assert_equal(len(self.couples_before), 5)
def output_feed(self, results, feed_title, feed_description, feed_link, feed_url, navigation_urls, feed_guid): author_name = config.get('ckan.feeds.author_name', '').strip() or \ config.get('ckan.site_id', '').strip() author_link = config.get('ckan.feeds.author_link', '').strip() or \ config.get('ckan.site_url', '').strip() # TODO language feed = _FixedAtom1Feed( title=feed_title, link=feed_link, description=feed_description, language=u'en', author_name=author_name, author_link=author_link, feed_guid=feed_guid, feed_url=feed_url, previous_page=navigation_urls['previous'], next_page=navigation_urls['next'], first_page=navigation_urls['first'], last_page=navigation_urls['last'], ) for pkg in results: feed.add_item( title=pkg.get('title', ''), link=self.base_url + url_for(controller='package', action='read', id=pkg['id']), description=pkg.get('notes', ''), updated=date_str_to_datetime(pkg.get('metadata_modified')), published=date_str_to_datetime(pkg.get('metadata_created')), unique_id=_create_atom_id(u'/dataset/%s' % pkg['id']), author_name=pkg.get('author', ''), author_email=pkg.get('author_email', ''), categories=[t['name'] for t in pkg.get('tags', [])], enclosure=webhelpers.feedgenerator.Enclosure( self.base_url + url_for(controller='api', register='package', action='show', id=pkg['name'], ver='2'), unicode(len(json.dumps(pkg))), # TODO fix this u'application/json') ) response.content_type = feed.mime_type return feed.writeString('utf-8')
def output_feed(self, results, feed_title, feed_description, feed_link, feed_url, navigation_urls, feed_guid): author_name = config.get('ckan.feeds.author_name', '').strip() or \ config.get('ckan.site_id', '').strip() author_link = config.get('ckan.feeds.author_link', '').strip() or \ config.get('ckan.site_url', '').strip() # TODO language feed = _FixedAtom1Feed( title=feed_title, link=feed_link, description=feed_description, language=u'en', author_name=author_name, author_link=author_link, feed_guid=feed_guid, feed_url=feed_url, previous_page=navigation_urls['previous'], next_page=navigation_urls['next'], first_page=navigation_urls['first'], last_page=navigation_urls['last'], ) for pkg in results: feed.add_item( title = pkg.get('title', ''), link = self.base_url + url_for(controller='package', action='read', id=pkg['id']), description = pkg.get('notes', ''), updated = date_str_to_datetime(pkg.get('metadata_modified')), published = date_str_to_datetime(pkg.get('metadata_created')), unique_id = _create_atom_id(u'/dataset/%s' % pkg['id']), author_name = pkg.get('author', ''), author_email = pkg.get('author_email', ''), categories = [t['name'] for t in pkg.get('tags', [])], enclosure=webhelpers.feedgenerator.Enclosure( self.base_url + url_for(controller='api', register='package', action='show', id=pkg['name'], ver='2'), unicode(len(json.dumps(pkg))), # TODO fix this u'application/json' ) ) response.content_type = feed.mime_type return feed.writeString('utf-8')
def _features_as_json(self, features): '''Returns features in JSON format, with this structure: {"type": "FeatureCollection", "features": [ { "type": "Feature", "geometry": {"type": "Point", "coordinates": [102.0, 0.5]}, "properties": {"ID": 11, "SchoolName": "Camden", "SchoolType": "Primary", "StreetName": "Camden Road", "Town": "Carshalton", "Postcode": "SM5 2NS", "TelephoneNumber": "020 86477324", "Easting": 527700.179, "Northing": 164916.916} }, ... ] } ''' feature_dicts = [] for feature in features: # ignore feature['datasetid'] try: properties = json.loads(feature['properties']) except ValueError: log.error('Properties did not parse as JSON. Dataset: %s Properties: %r', feature['datasetid'], feature['properties']) properties = 'Error loading properties' coords = parse_point_wkt(feature['geom']) feature_dict = {'type': 'Feature', 'geometry': { 'type': 'Point', 'coordinates': coords, }, 'properties': properties, } feature_dicts.append(feature_dict) features_dict = {'type': 'FeatureCollection', 'features': feature_dicts} return json.dumps(features_dict)