def test_DatasetSerializer(self): dataset = DatasetFactory.build() serializer = DatasetSerializer(dataset, context={'request': self.request_dummy}) self.assertEqual(serializer.data.get('id'), dataset.id) self.assertEqual(serializer.data.get('name'), dataset.name) self.assertEqual(serializer.data.get('title'), dataset.title) self.assertEqual(serializer.data.get('filetype'), 'Activity') self.assertEqual(serializer.data.get('source_url'), dataset.source_url) # assert serializer.data['date_created'] == dataset.date_created,\ # """ # 'dataset.date_created' should be serialized to a field called 'date_created' # """ # assert serializer.data['date_updated'] == dataset.date_updated,\ # """ # 'dataset.date_updated' should be serialized to a field called 'date_updated' # """ # assert serializer.data['last_found_in_registry'] == dataset.last_found_in_registry,\ # """ # 'dataset.last_found_in_registry' should be serialized to a field called 'last_found_in_registry' # """ self.assertEqual(serializer.data.get('iati_version'), dataset.iati_version) required_fields = ('id', 'url', 'name', 'title', 'filetype', 'publisher', 'source_url', 'activities', 'activity_count', 'date_created', 'date_updated', 'last_found_in_registry', 'iati_version', 'sha1', 'note_count', 'notes') assertion_msg = "the field '{0}' should be in the serialized dataset" for field in required_fields: assert field in serializer.data, assertion_msg.format(field)
def put(self, request, publisher_id, dataset_id): publisher = Publisher.objects.get(pk=publisher_id) source_url = request.data.get('source_url', None) # TODO: call package_update to update source_url for registry as well # - 2017-02-20 if not source_url: raise exceptions.APIException(detail="no source_url provided") dataset = Dataset.objects.get(id=dataset_id) dataset.date_updated = datetime.now() dataset.source_url = source_url dataset.save() # get all ready to publish organisations organisations = Organisation.objects.filter(ready_to_publish=True, publisher=publisher) non_r2p_organisations = Organisation.objects.filter( ready_to_publish=False, publisher=publisher) # update the affected organisations flags organisations.update( published=True, modified=False, ready_to_publish=True, last_updated_datetime=datetime.now().isoformat(' ')) non_r2p_organisations.update(published=False) # return Dataset object serializer = DatasetSerializer(dataset, context={'request': request}) return Response(serializer.data)
class PublisherSerializer(DynamicFieldsModelSerializer): url = HyperlinkedIdentityField(view_name='publishers:publisher-detail') datasets = DatasetSerializer(many=True, source="iatixmlsource_set", fields=('url', 'ref', 'title', 'type', 'source_url')) activity_count = SerializerMethodField() note_count = SerializerMethodField() activities = SerializerMethodField() class Meta: model = Publisher fields = ('url', 'id', 'org_id', 'org_abbreviate', 'org_name', 'activities', 'activity_count', 'note_count', 'datasets') def get_activities(self, obj): request = self.context.get('request') url = request.build_absolute_uri(reverse('activities:activity-list')) return url + '?reporting_organisation=' + obj.org_id def get_activity_count(self, obj): return Activity.objects.filter( reporting_organisations__normalized_ref=obj.org_id).count() def get_note_count(self, obj): sum_queryset = IatiXmlSource.objects.filter( publisher=obj.id).aggregate(Sum('note_count')) return sum_queryset.get('note_count__sum')
class PublisherSerializer(DynamicFieldsModelSerializer): url = HyperlinkedIdentityField(view_name='publishers:publisher-detail') datasets = DatasetSerializer( many=True, source="dataset_set", fields=( 'id', 'iati_id', 'url', 'name', 'title', 'filetype', 'source_url', 'added_manually', 'is_parsed', 'export_in_progress', 'parse_in_progress')) activity_count = SerializerMethodField() note_count = SerializerMethodField() activities = SerializerMethodField() class Meta: model = Publisher fields = ( 'id', 'url', 'iati_id', 'publisher_iati_id', 'display_name', 'name', 'organisation', 'activities', 'activity_count', 'note_count', 'datasets',) def get_activities(self, obj): request = self.context.get('request') url = request.build_absolute_uri(reverse('activities:activity-list')) return (url + '?reporting_organisation_identifier=' + obj.publisher_iati_id) def get_activity_count(self, obj): return Activity.objects.filter( reporting_organisations__normalized_ref=obj.publisher_iati_id).count() def get_note_count(self, obj): sum_queryset = Dataset.objects.filter(publisher=obj.id).aggregate(Sum('note_count')) return sum_queryset.get('note_count__sum')
def test_DatasetSerializer(self): dataset = DatasetFactory.build() serializer = DatasetSerializer(dataset, context={'request': self.request_dummy}) assert serializer.data['ref'] == dataset.ref,\ """ 'dataset.ref' should be serialized to a field called 'ref' """ assert serializer.data['title'] == dataset.title,\ """ 'dataset.title' should be serialized to a field called 'title' """ assert serializer.data['type'] == 'Activity',\ """ 'dataset.type' should be serialized to a field called 'type' """ assert serializer.data['source_url'] == dataset.source_url,\ """ 'dataset.source_url' should be serialized to a field called 'source_url' """ # assert serializer.data['date_created'] == dataset.date_created,\ # """ # 'dataset.date_created' should be serialized to a field called 'date_created' # """ # assert serializer.data['date_updated'] == dataset.date_updated,\ # """ # 'dataset.date_updated' should be serialized to a field called 'date_updated' # """ # assert serializer.data['last_found_in_registry'] == dataset.last_found_in_registry,\ # """ # 'dataset.last_found_in_registry' should be serialized to a field called 'last_found_in_registry' # """ assert serializer.data['iati_standard_version'] == dataset.iati_standard_version,\ """ 'dataset.iati_standard_version' should be serialized to a field called 'iati_standard_version' """ required_fields = ('ref', 'title', 'type', 'publisher', 'url', 'source_url', 'date_created', 'date_updated', 'last_found_in_registry', 'iati_standard_version') assertion_msg = "the field '{0}' should be in the serialized dataset" for field in required_fields: assert field in serializer.data, assertion_msg.format(field)
def put(self, request, publisher_id, dataset_id): user = request.user.organisationuser publisher = Publisher.objects.get(pk=publisher_id) admin_group = OrganisationAdminGroup.objects.get(publisher_id=publisher_id) source_url = request.data.get('source_url', None) # TODO: call package_update to update source_url for registry as well - 2017-02-20 if not source_url: raise exceptions.APIException(detail="no source_url provided") user = request.user organisationuser = user.organisationuser api_key = organisationuser.iati_api_key client = RemoteCKAN(settings.CKAN_URL, apikey=api_key) dataset = Dataset.objects.get(id=dataset_id) dataset.date_updated = datetime.now() dataset.source_url = source_url dataset.save() # get all ready to publish activities activities = Activity.objects.filter(ready_to_publish=True, publisher=publisher) non_r2p_activities = Activity.objects.filter(ready_to_publish=False, publisher=publisher) # update the affected activities flags activities.update( published=True, modified=False, ready_to_publish=True, last_updated_datetime=datetime.now().isoformat(' ') ) non_r2p_activities.update(published=False) # return Dataset object serializer = DatasetSerializer(dataset, context={'request': request}) return Response(serializer.data)
def post(self, request, publisher_id): user = request.user.organisationuser iati_user_id = user.iati_user_id publisher = Publisher.objects.get(pk=publisher_id) admin_group = OrganisationAdminGroup.objects.get(publisher_id=publisher_id) source_url = request.data.get('source_url', None) if not source_url: raise exceptions.APIException(detail="no source_url provided") user = request.user organisationuser = user.organisationuser api_key = organisationuser.iati_api_key client = RemoteCKAN(settings.CKAN_URL, apikey=api_key) # TODO: should this be the name? - 2017-02-20 source_name = '{}-activities'.format(publisher.name) # get all published activities, except for the ones that are just modified activities = Activity.objects.filter(ready_to_publish=True, publisher=publisher) try: orgList = client.call_action('organization_list_for_user', {}) except: raise exceptions.APIException(detail="Can't get organisation list for user".format(user_id)) primary_org_id = orgList[0]['id'] try: # sync main datasets to IATI registry registry_dataset = client.call_action('package_create', { "resources": [ { "url": source_url } ], "name": source_name, "filetype": "activity", "date_updated": datetime.now().strftime('%Y-%m-%d %H:%M'), "activity_count": activities.count(), "title": source_name, "owner_org": primary_org_id, "url": source_url, }) except Exception as e: # try to recover from case when the dataset already exists (just update it instead) old_package = client.call_action('package_show', { "name_or_id": source_name, }) if not old_package: print('exception raised in client_call_action', e, e.error_dict) raise exceptions.APIException(detail="Failed publishing dataset") registry_dataset = client.call_action('package_update', { "id": old_package.get('id'), "resources": [ { "url": source_url } ], "name": source_name, "filetype": "activity", "date_updated": datetime.now().strftime('%Y-%m-%d %H:%M'), "activity_count": activities.count(), "title": source_name, "owner_org": primary_org_id, "url": source_url, }) # 0. create_or_update Dataset object dataset = Dataset.objects.create( id=registry_dataset['id'], name=source_name, title=source_name, filetype=1, publisher=publisher, source_url=source_url, # TODO: store in OIPA somewhere, or let user define this? - 2017-01-13 is_parsed=False, iati_version="2.02", ) # update the affected activities flags activities.update(published=True, modified=False, ready_to_publish=True) # remove the old datasets from the registry # TODO: query the registry to remove a dataset - 2017-01-16 # TODO: remove old datasets locally as well - 2017-01-16 # TODO: Or just ask the user to remove the old datasets by hand? - 2017-02-20 # return Dataset object serializer = DatasetSerializer(dataset, context={'request': request}) return Response(serializer.data)
def post(self, request, publisher_id): user = request.user.organisationuser iati_user_id = user.iati_user_id publisher = Publisher.objects.get(pk=publisher_id) source_url = request.data.get('source_url', None) if not source_url: raise exceptions.APIException(detail="no source_url provided") user = request.user organisationuser = user.organisationuser api_key = organisationuser.iati_api_key client = RemoteCKAN(settings.CKAN_URL, apikey=api_key) # TODO: should this be the name? - 2017-02-20 source_name = '{}-organisations'.format(publisher.name) # get all published organisations, except for the ones that are just # modified organisations = Organisation.objects.filter(ready_to_publish=True, publisher=publisher) try: orgList = client.call_action('organization_list_for_user', {}) except BaseException: raise exceptions.APIException( detail="Can't get organisation list for user".format( iati_user_id)) primary_org_id = orgList[0]['id'] try: # sync main datasets to IATI registry registry_dataset = client.call_action( 'package_create', { "resources": [{ "url": source_url }], "name": source_name, "filetype": "organisation", "date_updated": datetime.now().strftime('%Y-%m-%d %H:%M'), "organisation_count": organisations.count(), "title": source_name, "owner_org": primary_org_id, "url": source_url, }) except Exception as e: # try to recover from case when the dataset already exists (just # update it instead) old_package = client.call_action('package_show', { "name_or_id": source_name, }) if not old_package: raise exceptions.APIException( detail="Failed publishing dataset") registry_dataset = client.call_action( 'package_update', { "id": old_package.get('id'), "resources": [{ "url": source_url }], "name": source_name, "filetype": "organisation", "date_updated": datetime.now().strftime('%Y-%m-%d %H:%M'), "organisation_count": organisations.count(), "title": source_name, "owner_org": primary_org_id, "url": source_url, }) # 0. create_or_update Dataset object dataset = Dataset.objects.get( filetype=2, publisher=publisher, added_manually=True, ) dataset.iati_id = registry_dataset['id'] dataset.name = source_name dataset.title = source_name dataset.source_url = source_url dataset.is_parsed = False dataset.save() # update the affected organisations flags organisations.update(published=True, modified=False, ready_to_publish=True) # remove the old datasets from the registry # TODO: query the registry to remove a dataset - 2017-01-16 # TODO: remove old datasets locally as well - 2017-01-16 # TODO: Or just ask the user to remove the old datasets by hand? # - 2017-02-20 # return Dataset object serializer = DatasetSerializer(dataset, context={'request': request}) return Response(serializer.data)