def test_data_1d(self): """ Test that one-dimensional variable data is successfully written to file and can be retrieved on a later read. Ensures correct ordering of data in the file. """ # Simple test with only one datapoint. reader1 = NetCDFReader(DATASET1) data1 = reader1.collect_untimed_data(READ_VAR_NAME) reader1.close() self.assertEqual(data1.shape, (1, 1)) self.assertEqual(data1[0, 0], 1) # Test with multiple values in one dimension. reader5 = NetCDFReader(DATASET5) data5 = reader5.collect_untimed_data(READ_VAR_NAME) reader5.close() self.assertEqual(data5.shape, (5, 1)) # Check a few values to demonstrate that the return value lines up # with expected value. self.assertEqual(data5[0, 0], 1) self.assertEqual(data5[1, 0], 2) self.assertEqual(data5[2, 0], 3) self.assertEqual(data5[3, 0], 4) self.assertEqual(data5[4, 0], 5)
def test_read_single(self): """ Test that a very simple dataset can be read successfully, opening the dataset and returning the only datapoint inside. """ reader = NetCDFReader(DATASET1) data = reader.collect_untimed_data(READ_VAR_NAME) reader.close() self.assertEqual(data.shape, (1, 1)) self.assertEqual(data[0, 0], 1)
def test_latitude(self): """ Test that latitude data is stored correctly, and its data is retrieved by the latitude method without interference by any other dimensions. """ # Test with a single datapoint. reader1 = NetCDFReader(DATASET1) lat1 = reader1.latitude() reader1.close() self.assertEqual(1, len(lat1)) self.assertEqual(0, lat1[0]) # Test with multiple points in latitude, but only one in longitude. reader5 = NetCDFReader(DATASET5) lat5 = reader5.latitude() reader5.close() self.assertEqual(5, len(lat5)) self.assertEqual(0, lat5[0]) self.assertEqual(1, lat5[1]) self.assertEqual(2, lat5[2]) self.assertEqual(3, lat5[3]) self.assertEqual(4, lat5[4]) # Test with multiple points in both latitude and longitude. reader6 = NetCDFReader(DATASET6) lat6 = reader6.latitude() reader6.close() self.assertEqual(3, len(lat6)) self.assertEqual(0, lat6[0]) self.assertEqual(1, lat6[1]) self.assertEqual(2, lat6[2]) # Test with three dimensions, including multiple points in latitude # and longitude. reader12 = NetCDFReader(DATASET12) lat12 = reader12.latitude() reader12.close() self.assertEqual(2, len(lat12)) self.assertEqual(0, lat12[0]) self.assertEqual(1, lat12[1])
def test_longitude(self): """ Test that the latitude variable can be successfully returned, irrespective of any other dimensions on top of it. Checks that the values of the latitude variable can be successfully read. """ # Test on a dataset with only a single datapoint. reader1 = NetCDFReader(TIME_DATASET1) lon1 = reader1.longitude() reader1.close() self.assertEqual(1, len(lon1)) self.assertEqual(0, lon1[0]) # Test on a dataset with only a time dimension, and only one unit of # longitude in length. reader5 = NetCDFReader(TIME_DATASET5) lon5 = reader5.longitude() reader5.close() self.assertEqual(1, len(lon5)) self.assertEqual(0, lon5[0]) # Test on a dataset with two dimensions plus a time dimension, with # multiple units of longitude length. reader12 = NetCDFReader(TIME_DATASET12) lon12 = reader12.longitude() reader12.close() self.assertEqual(2, len(lon12)) self.assertEqual(0, lon12[0]) self.assertEqual(1, lon12[1]) # Test on a dataset with more dimensions that just latitude, # longitude, and time. reader48 = NetCDFReader(TIME_DATASET48) lon48 = reader48.longitude() reader48.close() self.assertEqual(2, len(lon48)) self.assertEqual(0, lon48[0]) self.assertEqual(1, lon48[1])
def test_longitude(self): """ Test that longitude data is stored correctly, and its data is retrieved by the longitude method without interference by any other dimensions. """ # Test with a single point in longitude and latitude. reader1 = NetCDFReader(DATASET1) lon1 = reader1.longitude() reader1.close() self.assertEqual(1, len(lon1)) self.assertEqual(0, lon1[0]) # Test with a single point in longitude, but more in latitude. reader5 = NetCDFReader(DATASET5) lon5 = reader5.longitude() reader5.close() self.assertEqual(1, len(lon5)) self.assertEqual(0, lon5[0]) # Test with multiple points in both longitude and latitude. reader6 = NetCDFReader(DATASET6) lon6 = reader6.longitude() reader6.close() self.assertEqual(2, len(lon6)) self.assertEqual(0, lon6[0]) self.assertEqual(1, lon6[1]) # Test with three dimensions, including multiple points in longitude # and latitude. reader12 = NetCDFReader(DATASET12) lon12 = reader12.longitude() reader12.close() self.assertEqual(2, len(lon12)) self.assertEqual(0, lon12[0]) self.assertEqual(1, lon12[1])
def test_data_3d(self): """ Test that three-dimensional (level/latitude/longitude) data is handled properly, and can be successfully retrieved in the right order on a later read. """ reader12 = NetCDFReader(DATASET12) data12 = reader12.collect_untimed_data(READ_VAR_NAME) reader12.close() self.assertEqual(data12.shape, (3, 2, 2)) # Check a few values to demonstrate that the return value lines up # with expected value. self.assertEqual(data12[0, 0, 0], 1) self.assertEqual(data12[0, 0, 1], 2) self.assertEqual(data12[0, 1, 0], 3) self.assertEqual(data12[1, 0, 0], 5) self.assertEqual(data12[1, 1, 0], 7)
def test_untimed_data_2d(self): """ Test that time independent data is returned correctly with dimensions of length greater than 1, and respects the dimensions on which it was built by being returned in the correct order. """ reader12 = NetCDFReader(TIME_DATASET12) data12 = reader12.collect_untimed_data(READ_VAR_NAME) reader12.close() self.assertEqual(data12.shape, (3, 2, 2)) # Check that various points in the data match the values expected # at those points, based on the arrays that defined the data. self.assertEqual(data12[0, 0, 0], 1) self.assertEqual(data12[0, 0, 1], 2) self.assertEqual(data12[1, 1, 0], 7) self.assertEqual(data12[1, 1, 1], 8) self.assertEqual(data12[2, 0, 1], 10) self.assertEqual(data12[2, 1, 0], 11)
def save_from_dataset(dataset_parent: str, var_name: str, time_seg: Optional[int], config: 'ArrheniusConfig') -> bool: """ Produce a set of image outputs based on a dataset, written by a previous run of the Arrhenius model that used config as its configuration set. This dataset is stored in the directory given by the path dataset_parent. The images produced are under the variable var_name in the dataset, and only in the time unit given by time_seg. If time_seg is 0, then one image will be produced containing averages over the datapoints in all time units. If time_seg is None, then an image will be produced for every valid time segment. Returns True iff a new image was produced by this call, i.e. if it did not exist prior to the call. :param dataset_parent: A path to the directory containing the dataset :param var_name: The variable from the dataset that will be used to generate the images :param time_seg: An integer specifying which time unit to use data from :param config: Configuration options for the previously-run model run :return: True iff a new image file was created """ run_id = config.run_id() # Locate or create a directory to contain image files. if time_seg is None: # Assume at least one image needs to be produced, and immediately # read in data in preparation for that. dataset_path = path.join(dataset_parent, run_id + ".nc") reader = NetCDFReader(dataset_path) data = reader.collect_untimed_data(var_name) reader.close() # Write all images for variable var_name to the proper destination. return write_image_type(data, dataset_parent, var_name, config) else: # Create an output directory for the new images and get its name. parent_path = \ get_image_directory(dataset_parent, run_id, var_name, config.colorbar(), create=True) # Detect if the desired image file already exists. base_name = var_name + "_" + str(time_seg) file_name = image_file_name(base_name, config) + ".png" img_path = path.join(parent_path, file_name) created = not Path(img_path).is_file() if created: # Locate the dataset and read the desired variable from it. dataset_path = path.join(dataset_parent, run_id + ".nc") reader = NetCDFReader(dataset_path) data = reader.collect_untimed_data(var_name) # Extract only the requested parts of the data. if time_seg == 0: selected_time_data = data.mean(axis=0) else: selected_time_data = data[time_seg - 1] # Write the new image file. img_writer = ModelImageRenderer(selected_time_data) img_writer.save_image(img_path, config.colorbar()) reader.close() return created