def test_temporal_extent_with_mixed_timezones(): csv_text = ('number,date\n' '1,Fri Sep 26 11:22:13 CET 2007\n' '2,Sun Sep 28 09:11:45 GMT 2007\n' '0,Thu Sep 25 10:36:28 CET 2007\n') csv_file = StringIO.StringIO(csv_text) extent = csv_utils.temporal_extent(csv_file, column_num=1) nose.tools.assert_equals( extent, '2007-09-25T10:36:28+01:00/2007-09-28T09:11:45+00:00' )
def test_temporal_extent_MM_DD_YY(): '''Test temporal_extent() with MM-DD-YY-formatted date strings. ''' csv_text = ('number,date\n' '0,06-23-17\n' '1,07-23-17\n' '2,012-13-23') csv_file = StringIO.StringIO(csv_text) extent = csv_utils.temporal_extent(csv_file, column_num=1) assert extent == '2017-06-23T00:00:00/2023-12-13T00:00:00'
def test_temporal_extent_YYYY(): '''Test temporal_extent() with YYYY-formatted (year only) date strings. ''' csv_text = ('number,date\n' '0,1933\n' '1,1997\n' '2,2016') csv_file = StringIO.StringIO(csv_text) extent = csv_utils.temporal_extent(csv_file, column_num=1) assert extent == '1933-01-01T00:00:00/2016-01-01T00:00:00'
def test_temporal_extent_with_timezone(): '''If given dates with timezones in the input data, temporal_extent() should output dates with UTC offsets. ''' csv_text = ('number,date\n' '1,Fri Sep 26 11:22:13 CET 2007\n' '2,Sun Sep 28 09:11:45 CET 2007\n' '0,Thu Sep 25 10:36:28 CET 2007\n') csv_file = StringIO.StringIO(csv_text) extent = csv_utils.temporal_extent(csv_file, column_num=1) nose.tools.assert_equals( extent, '2007-09-25T10:36:28+01:00/2007-09-28T09:11:45+01:00' )
resource_id = data_dict.pop('resource_id') resource_dict = toolkit.get_action('resource_show')(context, {'id': resource_id}) if data_dict.get('type') in ('date', 'time', 'datetime'): try: path = util.get_path_to_resource_file(resource_dict) except exceptions.ResourceFileDoesNotExistException: path = None if path: try: data_dict['temporal_extent'] = csv_utils.temporal_extent(path, column_num=data_dict['index']) except ValueError: pass except TypeError: pass schema_ = toolkit.get_action('resource_schema_show')(context, {'resource_id': resource_id}) schema_['fields'].append(data_dict) schema_ = json.dumps(schema_) toolkit.get_action('resource_update')(context, {'id': resource_id, 'url': resource_dict['url'], 'name': resource_dict['name'], 'schema': schema_}) # This is probably unnecessary as we already have the schema above.