def test_dates(self): # ISO style (YYYY-MM-DD HH:MM, YYYY-MM-DD, YYYY-MM, YYYY) added, updated, warnings, errors = self.assert_csv_import('dates_iso.csv',1,0,0,0) pkg = package_show_rest(self.context,{'id':'test-publisher-vu'}) assert pkg['extras']['activity_period-from'] == '1904-06-16 10:01' assert pkg['extras']['activity_period-to'] == '1904-06-16' assert pkg['extras']['data_updated'] == '1904-06' # "Excel" style (DD/MM/YYYY HH:MM, DD/MM/YYYY, MM/YYYY, YYYY) added, updated, warnings, errors = self.assert_csv_import('dates_excel.csv',1,0,0,0) pkg = package_show_rest(self.context,{'id':'test-publisher-vu'}) assert pkg['extras']['activity_period-from'] == '1904-06-16 10:01' assert pkg['extras']['activity_period-to'] == '1904-06-16' assert pkg['extras']['data_updated'] == '1904-06' # Wrong dates added, updated, warnings, errors = self.assert_csv_import('dates_errors.csv',0,0,0,1) for field, msg in errors[0][1].iteritems(): assert 'cannot parse db date' in msg[0].lower()
def package_show_rest(context, data_dict): data_dict = core_get.package_show_rest(context, data_dict) extras = data_dict.get('extras', {}) rollup = extras.pop('extras_rollup', None) if rollup: rollup = json.loads(rollup) for key, value in rollup.items(): extras[key] = value return data_dict
def package_show_rest(context, data_dict): data_dict = core_get.package_show_rest(context, data_dict) extras = data_dict.get('extras', {}) rollup = extras.pop('extras_rollup') if rollup: rollup = json.loads(rollup) for key, value in rollup.items(): extras[key] = value return data_dict
def package_show_rest_minimal(context, data_dict): package = get.package_show_rest(context, data_dict) if check_logged_in(context): fulltext = _get_fulltext(package['id']) if fulltext: package['extras']['full_text_search'] = fulltext.text return package minimal_package = _del_extra_field_from_list(package) minimal_package = _del_main_field_from_dict(minimal_package) return minimal_package
def test_dates(self): # ISO style (YYYY-MM-DD HH:MM, YYYY-MM-DD, YYYY-MM, YYYY) added, updated, warnings, errors = self.assert_csv_import('dates_iso.csv',1,0,0,0) pkg = package_show_rest(self.context,{'id':'test-publisher-vu'}) assert pkg['extras']['data_updated'] == '1904-06' # "Excel" style (DD/MM/YYYY HH:MM, DD/MM/YYYY, MM/YYYY, YYYY) added, updated, warnings, errors = self.assert_csv_import('dates_excel.csv',1,0,0,0) pkg = package_show_rest(self.context,{'id':'test-publisher-vu'}) assert pkg['extras']['data_updated'] == '1904-06' # Wrong dates added, updated, warnings, errors = self.assert_csv_import('dates_errors.csv',0,0,0,1) for field, msg in errors[0][1].iteritems(): assert 'cannot parse db date' in msg[0].lower()
def package_show_rest(context, data_dict): # Add some extras to the dataset from its publisher. # The ideal place to do this should be the after_show hook on the # iati_datasets plugin but package_show_rest does not call it in core. package_dict = get_core.package_show_rest(context, data_dict) group = context['package'].groups[0] if len(context['package'].groups) else None if group: new_extras = [] for key in ('publisher_source_type', 'publisher_organization_type', 'publisher_country', 'publisher_iati_id',): new_extras.append({'key': key, 'value': group.get(key, '')}) package_dict['extras'].update(new_extras) return package_dict