示例#1
0
文件: iris.py 项目: ioam/geoviews
    def concat_dim(cls, datasets, dim, vdims):
        """
        Concatenates datasets along one dimension
        """
        import iris
        from iris.experimental.equalise_cubes import equalise_attributes

        cubes = []
        for c, cube in datasets.items():
            cube = cube.copy()
            cube.add_aux_coord(iris.coords.DimCoord([c], var_name=dim.name))
            cubes.append(cube)
        cubes = iris.cube.CubeList(cubes)
        equalise_attributes(cubes)
        return cubes.merge_cube()
示例#2
0
文件: cmip5.py 项目: ajferraro/jazz
def fetch(location, constraint=None, cheat_with_coordinates=False):
    """Fetch data from a netCDF or a directory containing netCDFs using iris.
    A common cause of concatenate errors is when the time data aren't
    contiguous. This is checked for and corrected.

    Args:
        location (str): path to netCDF file or directory
        constraint (iris.Constraint, optional): constraint for the cubes

    Returns:
        iris.Cube

    """
    # Extract only the data, not the ancillary cubes
    var_name = location.split("/")[-2]
    name_constraint = iris.Constraint(cube_func=lambda cube: cube.var_name == var_name)

    # Check if the supplied location is a single file or a directory
    cubes = iris.load(location, constraint & name_constraint, callback=clean)

    if cheat_with_coordinates:
        fix_coords(cubes)

    if len(cubes) == 1:
        cube = cubes[0]
    else:
        # clean_cubelist_atts(cubes)
        iris.util.unify_time_units(cubes)
        from iris.experimental.equalise_cubes import equalise_attributes

        equalise_attributes(cubes)
        coord_names = [coord.standard_name for coord in cubes[0].dim_coords]
        if "time" in coord_names:
            cubes = check_realizations_timepoint_duplicates(cubes)
        if "air_pressure" in coord_names:
            cubes = homogenise_air_pressure(cubes)
        check_coords(cubes)

        cubes = cubes.concatenate()
        realizations = [cube.coord("realization").points[0] for cube in cubes]
        if len(set(realizations)) != 1:
            cubes = iris.cube.CubeList(utils.make_common_in_time(*cubes))

        cube = cubes.merge_cube()

    return cube
示例#3
0
 def _test(self, cubes, expect_attributes):
     """Test."""
     working_cubes = [cube.copy() for cube in cubes]
     original_working_list = [cube for cube in working_cubes]
     # Exercise basic operation
     equalise_attributes(working_cubes)
     # Check they are the same cubes
     self.assertEqual(working_cubes, original_working_list)
     # Check resulting attributes all match the expected set
     for cube in working_cubes:
         self.assertEqual(cube.attributes, expect_attributes)
     # Check everything else remains the same
     for new_cube, old_cube in zip(working_cubes, cubes):
         cube_before_noatts = old_cube.copy()
         cube_before_noatts.attributes.clear()
         cube_after_noatts = new_cube.copy()
         cube_after_noatts.attributes.clear()
         self.assertEqual(cube_after_noatts, cube_before_noatts)
示例#4
0
def make_cubes_compatible(list_of_cubes):
    equalise_attributes(list_of_cubes)
    unify_time_units(list_of_cubes)

    for cube_i in list_of_cubes:
        cube_i.cell_methods = ()
    
    c = 0
    for i in range(len(list_of_cubes)):
        for j in range(i+1,len(list_of_cubes)):
            if( not list_of_cubes[i].is_compatible(list_of_cubes[j])):
                print('cubes {} and {}:\n'.format(i,j))
                describe_diff(list_of_cubes[i],list_of_cubes[j])
                c+=1
    if c == 0:
        print("All cubes are now compatible.")
    else:
        print("{} incompatible cubes".format(c))
示例#5
0
文件: dataset.py 项目: zuphilip/pyCAT
    def get_cube(self, extra_constraints=None):
        """
        return the cube of the dataset constrainted by period and extent
        (if they have been set) and extra constraints given in the call

        also all adjustments, i.e. units, standard_name, ... are applied

        Kwargs:
        
        * extra_constraints (iris.Constraint):
            will be applied to the cube

        Returns:

            the concatenated constrained cube of the dataset
        """
        constraints = extra_constraints
        try:
            start, end = self.period
            import datetime
            constraints &= iris.Constraint(
                time=lambda cell: start <= cell.point < end+datetime.timedelta(1)
            )
        except AttributeError:
            pass

        try:
            constraints &= self._extent_constraint()
        except AttributeError:
            pass
            
        with iris.FUTURE.context(cell_datetime_objects=True):
            cl = self.cube_list.extract(constraints)
        equalise_attributes(cl)
        
        merged_cube = self._merge_by_time(cl)
        try:
            for k, v in self.adjustments.iteritems():
                setattr(merged_cube, k, v)
        except AttributeError:
            pass
        
        return merged_cube
示例#6
0
def main():
    if len(sys.argv) != 3:
        sys.exit("program needs two arguments")
    if sys.argv[1] != 'ssp119' and sys.argv[1] != 'ssp585' and sys.argv[
            1] != 'ssp534OS':
        sys.exit("argument must be ssp119, ssp585 or ssp534OS")
    if sys.argv[2] != 'ssp119' and sys.argv[2] != 'ssp585' and sys.argv[
            2] != 'ssp534OS':
        sys.exit("argument must be ssp119, ssp585 or ssp534OS")

    # Delete all the image files in the current directory to ensure that only those
    # created in the loop end up in the movie.
    print("\nDeleting all .png files in this directory...")
    SpawnCommand("rm -f *.png")

    print("Loading the data...")

    # Read all the temperature values and create a single cube containing this data

    for i in range(1, 3):
        cubeList = iris.cube.CubeList([])
        if sys.argv[i] == 'ssp585':
            cubeList.extend(myload(1960, 2014, 'tas_1850-2014/bc179a.p5'))
            cubeList.extend(
                myload(2015, 2100, 'tas_2015-2100-ssp585/be653a.p5'))
        elif sys.argv[i] == 'ssp119':
            cubeList.extend(myload(1960, 2014, 'tas_1850-2014/bc179a.p5'))
            cubeList.extend(
                myload(2015, 2100, 'tas_2015-2100-ssp119/bh409a.p5'))
        elif sys.argv[i] == 'ssp534OS':
            cubeList.extend(myload(1960, 2014, 'tas_1850-2014/bc179a.p5'))
            cubeList.extend(myload(2015, 2039, 'tas_2015-2100/be653a.p5'))
            cubeList.extend(
                myload(2040, 2100, 'tas_2015-2100-ssp534OS/bh409a.p5'))

        equalise_attributes(cubeList)
        temperatures = cubeList.merge_cube()
        if i == 1:
            leftCube = temperatures.intersection(longitude=(-181, 0),
                                                 ignore_bounds=True)
        elif i == 2:
            rightCube = temperatures.intersection(longitude=(0, 180))

    cubeList = iris.cube.CubeList([leftCube, rightCube])
    temperatures = cubeList.concatenate_cube()

    baseYears = temperatures[:29, :, :]
    baseYearsMean = baseYears.collapsed('time', iris.analysis.MEAN)
    # Calculate the difference in annual mean temperature from the mean  baseline (returns a cube)
    anomaly = temperatures - baseYearsMean
    print("Data downloaded! Now Processing...")

    # Get the range of values.

    # Add a new coordinate containing the year.
    icat.add_year(anomaly, 'time')
    years = anomaly.coord('year')

    # Set the limits for the loop over years.
    minTime = 0
    maxTime = temperatures.shape[0]

    print("Making images from year", years[minTime].points[0], "to",
          years[maxTime - 1].points[0], "...")

    for time in range(minTime, maxTime):

        # Set up for larger image.
        figSize = [12, 6]
        fig = plt.figure(figsize=figSize, dpi=200)
        rect = 0, 0, 200 * figSize[0], 200 * figSize[1]
        fig.add_axes(rect)
        geo_axes = plt.axes(projection=ccrs.PlateCarree())

        # We need to fix the boundary of the figure (otherwise we get a black border at left & top).
        # Cartopy removes matplotlib's axes.patch (which normally defines the boundary) and
        # replaces it with outline_patch and background_patch.  It's the former which is causing
        # the black border.  Get the axis object and make its outline patch invisible.
        geo_axes.outline_patch.set_visible(False)
        plt.margins(0, 0)
        fig.subplots_adjust(left=0, right=1, bottom=0, top=1)

        # Contour plot the temperatures and add the coastline.

        iplt.contourf(anomaly[time],
                      levels=(-6, -3, 0, 4, 8, 12, 17, 22, 28),
                      colors=('darkblue', 'blue', 'cyan', 'lightyellow',
                              'yellow', 'orange', 'darkorange', 'red'))
        #-6.4358826, 27.94899
        plt.gca().coastlines()
        #plt.colorbar(boundaries = (-6, -3, 0, 4, 8, 12, 16, 20, 25), values = (-6, -3, 0, 4, 8, 12, 16, 20))

        # Extract the year value and display it (coordinates used in locating the text are
        # those of the data).
        year = years[time].points[0]

        # Display year on both sides of the display.
        plt.text(-110,
                 0,
                 year,
                 horizontalalignment='center',
                 verticalalignment='top',
                 size='large',
                 fontdict={'family': 'monospace'})
        plt.text(70,
                 0,
                 year,
                 horizontalalignment='center',
                 verticalalignment='top',
                 size='large',
                 fontdict={'family': 'monospace'})

        # Add labels to halves of display.
        plt.text(-110,
                 -60,
                 str(sys.argv[1]),
                 horizontalalignment='center',
                 size='small',
                 fontdict={'family': 'monospace'})
        plt.text(70,
                 -60,
                 str(sys.argv[2]),
                 horizontalalignment='center',
                 size='small',
                 fontdict={'family': 'monospace'})

        # Draw a line along the division between the two halves.
        plt.plot([0, 0], [-90, 90], color='gray', linewidth=3)
        plt.plot([-179.8, -179.8], [-90, 90], color='gray', linewidth=3)

        # Now save the plot in an image file.  The files are numbered sequentially, starting
        # from 000.png; this is so that the ffmpeg command can grok them.
        filename = "image-%04d.png" % time
        plt.savefig(filename, dpi=200)

        # Discard the figure (otherwise the text will be overwritten
        # by the next iteration).
        plt.close()
        print('boundaries for colour = -6, -3, 0, 4, 8, 12, 16, 20, 25')
    print("images made! Now converting to .mp4...")
    create_video()
    print("Opening video...")
    myTime.sleep(5)
def main(inargs):
    """Run the program."""

    file_dict, tas_dict, area_dict, basin_dict = read_data(inargs)

    metadata_dict = {}
    climatology_dict = {}
    time_trend_dict = {}
    tas_scaled_trend_dict = {}
    branch_dict = {}
    for experiment in [
            'historical', 'historicalGHG', 'historicalAA', 'historicalnoAA',
            'piControl'
    ]:
        filenames = file_dict[experiment]
        if not filenames:
            climatology_dict[experiment] = None
            time_trend_dict[experiment] = None
            tas_scaled_trend_dict[experiment] = None
        else:
            print(experiment)
            try:
                time_constraint = gio.get_time_constraint(inargs.total_time)
            except (AttributeError, TypeError):
                time_constraint = iris.Constraint()

            with iris.FUTURE.context(cell_datetime_objects=True):
                cube = iris.load(filenames, gio.check_iris_var(inargs.var))

                # Merge cubes
                metadata_dict[filenames[0]] = cube[0].attributes['history']
                equalise_attributes(cube)
                iris.util.unify_time_units(cube)
                cube = cube.concatenate_cube()
                cube = gio.check_time_units(cube)

                # Time extraction and branch time info
                coord_names = [coord.name() for coord in cube.dim_coords]
                assert coord_names[0] == 'time'

                if 'historical' in experiment:
                    original_time_length = cube.shape[0]
                    cube = cube.extract(time_constraint)
                    new_time_length = cube.shape[0]
                    branch_time_index_offset = original_time_length - new_time_length

                    branch_time = cube.attributes['branch_time']
                    time_length = cube.shape[0]
                    branch_dict[experiment] = (branch_time, time_length,
                                               branch_time_index_offset)

                elif experiment == 'piControl':
                    branch_time, time_length, branch_time_index_offset = branch_dict[
                        'historical']
                    start_index, error = uconv.find_nearest(
                        cube.coord('time').points,
                        float(branch_time) + 15.5,
                        index=True)
                    if abs(error) > 15:
                        print(
                            "WARNING: Large error of %f in locating branch time"
                            % (error))
                        start_index = 0
                    start_index = start_index + branch_time_index_offset
                    cube = cube[start_index:start_index + time_length, ::]

                # Temporal smoothing
                cube = timeseries.convert_to_annual(cube, full_months=True)

                # Mask marginal seas
                if basin_dict[experiment]:
                    basin_cube = iris.load_cube(basin_dict[experiment])
                    cube = uconv.mask_marginal_seas(cube, basin_cube)

                # Regrid and select basin
                cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
                    cube)
                if not inargs.basin == 'globe':
                    if basin_dict[experiment] and not regrid_status:
                        ndim = cube.ndim
                        basin_array = uconv.broadcast_array(
                            basin_cube.data, [ndim - 2, ndim - 1], cube.shape)
                    else:
                        basin_array = uconv.create_basin_array(cube)
                    cube.data.mask = numpy.where(
                        (cube.data.mask == False) &
                        (basin_array == basins[inargs.basin]), False, True)

                # Scale
                cube, units = scale_data(cube,
                                         inargs.var,
                                         reverse_sign=inargs.reverse_sign)

                # Zonal statistic
                if inargs.area_adjust:
                    if regrid_status:
                        area_dict[experiment] = None
                    cube, units, metadata_dict = area_ajustment(
                        cube, area_dict[experiment], metadata_dict)
                    zonal_cube = cube.collapsed('longitude', iris.analysis.SUM)
                    aggregation = 'Zonally integrated'
                else:
                    zonal_cube = cube.collapsed('longitude',
                                                iris.analysis.MEAN)
                    aggregation = 'Zonal mean'
                zonal_cube.remove_coord('longitude')

                # Climatology and trends
                climatology_dict[experiment] = calculate_climatology(
                    zonal_cube, inargs.climatology_time, experiment)
                time_trend_dict[experiment] = get_trend_cube(zonal_cube)
                if tas_dict[experiment]:
                    tas_cube = iris.load_cube(
                        tas_dict[experiment],
                        'air_temperature' & time_constraint)
                    scale_factor = get_scale_factor(tas_cube)
                    print(experiment, 'warming:', scale_factor)
                    tas_scaled_trend_dict[experiment] = time_trend_dict[
                        experiment] * (1. / abs(scale_factor))
                    metadata_dict[tas_dict[experiment]
                                  [0]] = tas_cube.attributes['history']
                else:
                    tas_scaled_trend_dict[experiment] = None

    # Create the plots

    tas_scaled_trend_flag = tas_scaled_trend_dict[
        'historicalGHG'] and tas_scaled_trend_dict['historicalAA']

    fig = plt.figure(figsize=[15, 20])
    gs = set_plot_grid(tas_trend=tas_scaled_trend_flag)

    ax_main = plt.subplot(gs[0])
    plt.sca(ax_main)
    plot_climatology(climatology_dict, inargs.var, units, inargs.legloc,
                     aggregation)
    plt.title('%s (%s), %s' % (inargs.model, inargs.run, inargs.basin))

    ax_diff = plt.subplot(gs[1])
    plt.sca(ax_diff)
    plot_difference(climatology_dict)

    ax_time_trend = plt.subplot(gs[2])
    plt.sca(ax_time_trend)
    plot_trend(time_trend_dict, units)

    if tas_scaled_trend_flag:
        ax_tas_trend = plt.subplot(gs[3])
        plt.sca(ax_tas_trend)
        plot_trend(tas_scaled_trend_dict, units, scaled=True)

    plt.xlabel('latitude')

    plt.savefig(inargs.outfile, bbox_inches='tight')
    gio.write_metadata(inargs.outfile, file_info=metadata_dict)
示例#8
0
def main(inargs):
    """Run the program."""

    keep_coord = 'latitude' if inargs.direction == 'zonal' else 'longitude'
    collapse_coord = 'longitude' if inargs.direction == 'zonal' else 'latitude'

    depth_constraint = gio.iris_vertical_constraint(None, inargs.max_depth)
    metadata_dict = {}

    if inargs.basin:
        basin_file, basin_name = inargs.basin
        basin_cube = iris.load_cube(basin_file, 'region' & depth_constraint)
        metadata_dict[basin_file] = basin_cube.attributes['history']
    else:
        basin_cube = None

    if inargs.area:
        area_cube = iris.load_cube(inargs.area, 'cell_area' & depth_constraint)
    else:
        area_cube = None

    if inargs.weights:
        weights_cube = iris.load_cube(inargs.weights)
        metadata_dict[inargs.weights] = weights_cube.attributes['history']

    if inargs.sftlf_file or inargs.realm:
        assert inargs.sftlf_file and inargs.realm, "Must give --realm and --sftlf_file"
        sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')

    if inargs.ref_file:
        ref_cube = iris.load_cube(inargs.ref_file[0], inargs.ref_file[1])
    else:
        ref_cube = None

    output_cubelist = iris.cube.CubeList([])
    for fnum, filename in enumerate(inargs.infiles):
        print(filename)
        cube = iris.load_cube(filename, gio.check_iris_var(inargs.var) & depth_constraint)

        if inargs.annual:
            cube = timeseries.convert_to_annual(cube)
    
        if basin_cube:
            cube = select_basin(cube, basin_cube, basin_name)        

        if args.multiply_by_area:
            cube = spatial_weights.multiply_by_area(cube, area_cube=area_cube) 

        if inargs.realm:
            cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, inargs.realm)

        if inargs.weights:
            assert cube.ndim == 3
            broadcasted_weights = uconv.broadcast_array(weights_cube.data, [1, 2], cube.shape)
        else:
            broadcasted_weights = None
            
        aux_coord_names = [coord.name() for coord in cube.aux_coords]
        if 'latitude' in aux_coord_names:
            # curvilinear grid
            assert ref_cube
            horiz_aggregate = curvilinear_agg(cube, ref_cube, keep_coord, aggregation_functions[inargs.aggregation])     
            #TODO: Add weights=broadcasted_weights
        else:
            # rectilinear grid
            horiz_aggregate = cube.collapsed(collapse_coord, aggregation_functions[inargs.aggregation],
                                             weights=broadcasted_weights)
            horiz_aggregate.remove_coord(collapse_coord)

        if inargs.flux_to_mag:
            horiz_aggregate = uconv.flux_to_magnitude(horiz_aggregate)
            
        horiz_aggregate.data = horiz_aggregate.data.astype(numpy.float32)

        if inargs.outfile[-3:] == '.nc':
            output_cubelist.append(horiz_aggregate)
        elif inargs.outfile[-1] == '/': 
            if inargs.cumsum:
                horiz_aggregate = cumsum(horiz_aggregate)       
            infile = filename.split('/')[-1]
            infile = re.sub(cube.var_name + '_', cube.var_name + '-' + inargs.direction + '-' + inargs.aggregation + '_', infile)
            if inargs.annual:
                infile = re.sub('Omon', 'Oyr', infile)
                infile = re.sub('Amon', 'Ayr', infile)
       
            outfile = inargs.outfile + infile
            metadata_dict[filename] = cube.attributes['history'] 
            horiz_aggregate.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)

            iris.save(horiz_aggregate, outfile)
            print('output:', outfile)
            del horiz_aggregate

    if inargs.outfile[-3:] == '.nc':
        equalise_attributes(output_cubelist)
        iris.util.unify_time_units(output_cubelist)
        output_cubelist = output_cubelist.concatenate_cube()

        if inargs.cumsum:
            output_cubelist = cumsum(output_cubelist)

        metadata_dict[filename] = cube.attributes['history']
        output_cubelist.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
        iris.save(output_cubelist, inargs.outfile) 
示例#9
0
def main(inargs):
    """Run the program."""

    agg_functions = {'mean': iris.analysis.MEAN, 'sum': iris.analysis.SUM}
    metadata_dict = {}

    basin_cube = iris.load_cube(inargs.basin_file, 'region')
    assert basin_cube.data.min() == 11
    assert basin_cube.data.max() == 17
    basin_numbers = numpy.array([11, 12, 13, 14, 15, 16, 17, 18])
    metadata_dict[inargs.basin_file] = basin_cube.attributes['history']

    flag_values = basin_cube.attributes['flag_values'] + ' 18'
    flag_meanings = basin_cube.attributes['flag_meanings'] + ' globe'
    basin_coord = iris.coords.DimCoord(basin_numbers,
                                       standard_name=basin_cube.standard_name,
                                       long_name=basin_cube.long_name,
                                       var_name=basin_cube.var_name,
                                       units=basin_cube.units,
                                       attributes={
                                           'flag_values': flag_values,
                                           'flag_meanings': flag_meanings
                                       })

    if inargs.weights:
        weights_cube = gio.get_ocean_weights(inargs.weights)
        metadata_dict[inargs.weights] = weights_cube.attributes['history']

    output_cubelist = iris.cube.CubeList([])
    for infile in inargs.infiles:
        print(infile)
        if inargs.var == 'ocean_volume':
            cube = gio.get_ocean_weights(infile)
            history = [cube.attributes['history']]
        else:
            cube, history = gio.combine_files(infile, inargs.var, checks=True)
        assert cube.ndim in [3, 4]
        coord_names = [coord.name() for coord in cube.dim_coords]
        if inargs.annual:
            cube = timeseries.convert_to_annual(cube, chunk=inargs.chunk)

        assert basin_cube.shape == cube.shape[-2:]
        basin_array = uconv.broadcast_array(basin_cube.data,
                                            [cube.ndim - 2, cube.ndim - 1],
                                            cube.shape)
        if inargs.weights:
            assert weights_cube.data.shape == cube.shape[-3:]
            if cube.ndim == 4:
                weights_array = uconv.broadcast_array(weights_cube.data,
                                                      [1, 3], cube.shape)
            else:
                weights_array = weights_cube.data
        else:
            weights_array = None

        if cube.ndim == 3:
            outdata = numpy.ma.zeros([cube.shape[0], len(basin_numbers)])
        else:
            outdata = numpy.ma.zeros(
                [cube.shape[0], cube.shape[1],
                 len(basin_numbers)])

        for basin_index, basin_number in enumerate(basin_numbers):
            temp_cube = cube.copy()
            if basin_number == 18:
                temp_cube.data = numpy.ma.masked_where(basin_array == 17,
                                                       temp_cube.data)
            else:
                temp_cube.data = numpy.ma.masked_where(
                    basin_array != basin_number, temp_cube.data)
            if len(coord_names) == cube.ndim:
                horiz_agg = temp_cube.collapsed(coord_names[-2:],
                                                agg_functions[inargs.agg],
                                                weights=weights_array).data
            elif inargs.agg == 'mean':
                horiz_agg = numpy.ma.average(temp_cube.data,
                                             axis=(-2, -1),
                                             weights=weights_array)
            elif inargs.agg == 'sum':
                horiz_agg = numpy.ma.sum(temp_cube.data, axis=(-2, -1))
            if outdata.ndim == 2:
                outdata[:, basin_index] = horiz_agg
            else:
                outdata[:, :, basin_index] = horiz_agg

        coord_list = [(cube.dim_coords[0], 0)]
        if cube.ndim == 4:
            coord_list.append((cube.dim_coords[1], 1))
            coord_list.append((basin_coord, 2))
        else:
            coord_list.append((basin_coord, 1))
        outcube = iris.cube.Cube(outdata,
                                 standard_name=cube.standard_name,
                                 long_name=cube.long_name,
                                 var_name=cube.var_name,
                                 units=cube.units,
                                 attributes=cube.attributes,
                                 dim_coords_and_dims=coord_list)
        output_cubelist.append(outcube)

    equalise_attributes(output_cubelist)
    iris.util.unify_time_units(output_cubelist)
    outcube = output_cubelist.concatenate_cube()
    if history:
        metadata_dict[inargs.infiles[-1]] = history[0]
    outcube.attributes['history'] = cmdprov.new_log(
        infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(outcube, inargs.outfile)
        ensemble_mean.standard_name = temp_list_trim[0].standard_name
        ensemble_mean.attributes = temp_list_trim[0].attributes
        dates = get_dates(ensemble_mean, verbose=False)
        outpath = (fpath + '/test_ensemble_mean_historical_' + model + '_' +
                   var + '_' + str(dates[0].year) +
                   str(dates[0].month).zfill(2) + '_' + str(dates[-1].year) +
                   str(dates[-1].month).zfill(2) + '.nc')
        print(outpath)
        iris.save(ensemble_mean, outpath)
        continue

    else:
        # if multiple runs calculate mean of runs
        n = len(temp_list_trim)
        print(n)
        equalise_attributes(temp_list_trim)
        unify_time_units(temp_list_trim)
        if n == 2:
            ensemble_mean = (temp_list_trim[0] + temp_list_trim[1]) / n

        if n == 3:
            ensemble_mean = (temp_list_trim[0] + temp_list_trim[1] +
                             temp_list_trim[2]) / n
        if n == 4:
            ensemble_mean = (temp_list_trim[0] + temp_list_trim[1] +
                             temp_list_trim[2] + temp_list_trim[3]) / n
        if n == 5:
            ensemble_mean = (temp_list_trim[0] + temp_list_trim[1] +
                             temp_list_trim[2] + temp_list_trim[3] +
                             temp_list_trim[4]) / n
        if n == 6:
def main():
    if len(sys.argv) != 2:
        sys.exit("must have an argument")
    elif sys.argv[1] != 'ssp119' or 'ssp585' or 'ssp534OS':
        sys.exit("argument must be ssp119, ssp585 or ssp534OS")

    # Delete all the image files in the current directory to ensure that only those
    # created in the loop end up in the movie.
    print("\nDeleting all .png files in this directory...")
    SpawnCommand("rm -f *.png")

    print("Loading the data...")

    # Read all the temperature values and create a single cube containing this data
    cubeList = iris.cube.CubeList([])

    if sys.argv[1] == 'ssp585':
        cubeList.extend(myload(1960, 2014, 'tas_1850-2014/bc179a.p5'))
        cubeList.extend(myload(2015, 2100, 'tas_2015-2100-ssp585/be653a.p5'))
    elif sys.argv[1] == 'ssp119':
        cubeList.extend(myload(1960, 2014, 'tas_1850-2014/bc179a.p5'))
        cubeList.extend(myload(2015, 2100, 'tas_2015-2100-ssp119/bh409a.p5'))
    elif sys.argv[1] == 'ssp534OS':
        cubeList.extend(myload(1960, 2014, 'tas_1850-2014/bc179a.p5'))
        cubeList.extend(myload(2015, 2039, 'tas_2015-2100/be653a.p5'))
        cubeList.extend(myload(2040, 2100, 'tas_2015-2100-ssp534OS/bh409a.p5'))

    equalise_attributes(cubeList)
    temperatures = cubeList.merge_cube()
    baseYears = temperatures[:29, :, :]
    baseYearsMean = baseYears.collapsed('time', iris.analysis.MEAN)
    # Calculate the difference in annual mean temperature from the mean  baseline (returns a cube)
    anomaly = temperatures - baseYearsMean
    print("Data downloaded! Now Processing...")

    # Get the range of values.

    # Add a new coordinate containing the year.
    icat.add_year(anomaly, 'time')
    years = anomaly.coord('year')

    # Set the limits for the loop over years.
    minTime = 0
    maxTime = temperatures.shape[0]

    print("Making images from year", years[minTime].points[0], "to",
          years[maxTime - 1].points[0], "...")

    for time in range(minTime, maxTime):

        # Contour plot the temperatures and add the coastline.

        iplt.contourf(anomaly[time],
                      levels=(-6, -3, 0, 4, 8, 12, 17, 22, 28),
                      colors=('darkblue', 'blue', 'cyan', 'lightyellow',
                              'yellow', 'orange', 'darkorange', 'red'))
        #-6.4358826, 27.94899
        plt.gca().coastlines()
        #plt.colorbar(boundaries = (-6, -3, 0, 4, 8, 12, 16, 20, 25), values = (-6, -3, 0, 4, 8, 12, 16, 20))
        # We need to fix the boundary of the figure (otherwise we get a black border at left & top).
        # Cartopy removes matplotlib's axes.patch (which normally defines the boundary) and
        # replaces it with outline_patch and background_patch.  It's the former which is causing
        # the black border.  Get the axis object and make its outline patch invisible.
        ax = plt.gca()
        ax.outline_patch.set_visible(False)

        # Extract the year value and display it (coordinates used in locating the text are
        # those of the data).
        year = years[time].points[0]
        plt.text(0, -60, year, horizontalalignment='center')

        # Now save the plot in an image file.  The files are numbered sequentially, starting
        # from 000.png; this is so that the ffmpeg command can grok them.
        filename = "image-%04d.png" % time
        plt.savefig(filename, bbox_inches='tight', pad_inches=0)

        # Discard the figure (otherwise the text will be overwritten
        # by the next iteration).
        plt.close()
        print('boundaries for colour = -6, -3, 0, 4, 8, 12, 16, 20, 25')
    print("images made! Now converting to .mp4...")
    create_video()
    print("Opening video...")
    myTime.sleep(5)