Example #1
0
def cmd(args):
    in_ds = Dataset(args.input_file, 'r')
    create_dir_for_file(args.output_file)
    out_ds = Dataset(args.output_file, 'w', format='NETCDF4')

    copy_nc_attributes(in_ds, out_ds)

    for in_dim in in_ds.dimensions.values():
        out_ds.createDimension(in_dim.name,
                               None if in_dim.isunlimited() else in_dim.size)

    all_args = vars(args)
    compression_args = {}
    for key in ['zlib', 'complevel', 'shuffle', 'fletcher32']:
        compression_args[key] = all_args[key]

    for in_var in in_ds.variables.values():
        print in_var.name
        out_var = create_nc_var_like_other(out_ds, in_var, **compression_args)

        iter_mask = np.ones((len(in_var.shape, )), dtype=bool)
        iter_mask[-MAX_COPY_DIM_COUNT:] = False
        dim_iterator = DimIterator(in_var.shape, None, iter_mask)
        write_op_count = len(dim_iterator)
        for write_op_num, slc in enumerate(dim_iterator.slice_tuples()):
            _progress(write_op_num, write_op_count)
            out_var[slc] = in_var[slc]

    add_or_append_history(out_ds)
    in_ds.close()
    out_ds.close()
Example #2
0
def cmd(args):
    if len(args.data_var_names) == 1:
        data_var_names = ([args.data_var_names[0]] *
                          len(args.input_files))
    elif len(args.data_var_names) != len(args.input_files):
        raise Exception()
    else:
        data_var_names = args.data_var_names

    create_dir_for_file(args.output_file)
    shutil.copyfile(args.input_files[0], args.output_file)

    output_ds = Dataset(args.output_file, 'r+')
    output_var = output_ds.variables[args.data_var_names[0]]
    output_var_data = output_var[:]

    for input_var_name, input_file in zip(data_var_names[1:],
                                          args.input_files[1:]):
        input_ds = Dataset(input_file, 'r')
        input_var_data = input_ds.variables[input_var_name][:]

        select_mask = np.logical_and(np.ma.getmaskarray(output_var_data),
                                     ~np.ma.getmaskarray(input_var_data.mask))
        output_var_data[select_mask] = input_var_data[select_mask]

        input_ds.close()

    output_var[:] = output_var_data[:]

    add_or_append_history(output_ds)

    output_ds.close()
Example #3
0
def cmd(args):
    create_dir_for_file(args.output_file)
    shutil.copyfile(args.input_file, args.output_file)

    output_ds = Dataset(args.output_file, 'r+')
    appended_ds = Dataset(args.appended_file, 'r')

    for appended_var_name in args.appended_var_names:
        appended_var = appended_ds.variables[appended_var_name]
        output_var = output_ds.createVariable(
            appended_var_name,
            appended_var.dtype,
            dimensions=appended_var.dimensions)
        output_var[:] = appended_var[:]
        copy_nc_attributes(appended_var, output_var)

    add_or_append_history(output_ds)

    output_ds.close()
Example #4
0
def cmd(args):
    if args.backup_values is not None and \
                    len(args.backup_values) != len(args.data_var_names):
        raise Exception()

    mask_ds = Dataset(args.mask_file, 'r')
    mask = np.ma.getmaskarray(mask_ds.variables[names.VAR_MASK][:])
    mask_ds.close()

    create_dir_for_file(args.output_file)
    shutil.copyfile(args.input_file, args.output_file)

    ds = Dataset(args.output_file, 'r+')

    for i, data_var_name in enumerate(args.data_var_names):
        print data_var_name
        data_var = ds.variables[data_var_name]

        if args.backup_values is None:
            backup_value = _DEFAULT_BACKUP_VALUE
        else:
            backup_value = args.backup_values[i]

        backup_value = data_var.dtype.type(backup_value)

        if len(data_var.dimensions) == 2:
            _progress(0, 1)
            data_var[:] = _apply_mask_fast(data_var[:], mask, backup_value)
            _progress(1, 1)
        # Assume that time is the first dimension
        elif len(data_var.dimensions) == 3:
            for time_idx in xrange(0, data_var.shape[0]):
                _progress(time_idx, data_var.shape[0])
                data_var[time_idx, :] = _apply_mask_fast(
                    data_var[time_idx, :], mask, backup_value)
            _progress(data_var.shape[0], data_var.shape[0])
        else:
            raise Exception()

    add_or_append_history(ds)
    ds.close()
Example #5
0
def cmd(args):
    input_ds = Dataset(args.input_file, 'r')

    depth_var = input_ds.variables[args.depth_data_var]

    create_dir_for_file(args.output_file)
    output_ds = Dataset(args.output_file, 'w')

    add_missing_dim_vars(input_ds, output_ds, depth_var.dimensions)

    depth_data = depth_var[:]
    depth_data *= args.depth_factor

    mask = np.ma.masked_where(depth_data <= 0.0, np.ones(depth_data.shape),
                              copy=False)

    output_var = output_ds.createVariable(names.VAR_MASK, 'u1',
                                          dimensions=depth_var.dimensions,
                                          fill_value=0)
    output_var[:] = mask

    input_ds.close()
    output_ds.close()
Example #6
0
def cmd(args):
    in_grid_ds = Dataset(args.in_grid_file, 'r')
    in_grid, in_grid_dim_names = \
        init_grid_from_vars(in_grid_ds.variables[args.in_x_name],
                            in_grid_ds.variables[args.in_y_name])
    in_grid_ds.close()

    out_grid_ds = Dataset(args.out_grid_file, 'r')
    out_grid, out_grid_dim_names = \
        init_grid_from_vars(out_grid_ds.variables[args.out_x_name],
                            out_grid_ds.variables[args.out_y_name])
    out_grid_ds.close()

    if len(in_grid.shape) != len(out_grid.shape):
        raise Exception()

    indices = np.ma.masked_all(out_grid.shape +
                               (len(in_grid.shape), in_grid.cell_vert_count),
                               dtype=np.intp)

    weights = np.ma.masked_all(out_grid.shape + (in_grid.cell_vert_count, ),
                               dtype=np.float64)

    no_gap_axis = None
    if args.no_gap_dim is not None:
        no_gap_axis = find_dim_indices([args.no_gap_dim], in_grid_dim_names)[0]
        if no_gap_axis is None:
            raise Exception('Dimension %s is not found.' % args.no_gap_dim)

    in_grid.init_cell_locator(no_gap_axis)
    op_iter = DimIterator(out_grid.shape)
    for slc in op_iter.slice_tuples():
        cell_indices, cell_weights = in_grid.calc_weights(*out_grid[slc])
        if cell_indices is not None:
            indices[slc] = cell_indices
            weights[slc] = cell_weights

    create_dir_for_file(args.weight_file)
    out_ds = Dataset(args.weight_file, 'w')

    for dim_idx, dim_name in enumerate(out_grid_dim_names):
        out_ds.createDimension(dim_name, out_grid.shape[dim_idx])

    out_ds.createDimension(names.DIM_DIM_IDX, len(in_grid.shape))
    out_ds.createDimension(names.DIM_VERTEX_IDX, in_grid.cell_vert_count)

    out_grid_shape_var = out_ds.createVariable(
        names.VAR_INPUT_SHAPE, np.intp, dimensions=(names.DIM_DIM_IDX, ))
    out_grid_shape_var[:] = in_grid.shape

    out_grid_dim_names_var = out_ds.createVariable(
        names.VAR_INPUT_DIMS, str, dimensions=(names.DIM_DIM_IDX, ))
    for dim_idx, dim_name in enumerate(in_grid_dim_names):
        out_grid_dim_names_var[dim_idx] = dim_name

    out_indices_var = \
        out_ds.createVariable(names.VAR_INDICES,
                              indices.dtype,
                              dimensions=
                              out_grid_dim_names +
                              (names.DIM_DIM_IDX,
                               names.DIM_VERTEX_IDX))

    out_indices_var[:] = indices

    out_weights_var = \
        out_ds.createVariable(names.VAR_WEIGHTS,
                              weights.dtype,
                              dimensions=
                              out_grid_dim_names +
                              (names.DIM_VERTEX_IDX,))

    out_weights_var[:] = weights

    add_or_append_history(out_ds)
    out_ds.close()
Example #7
0
def cmd(args):
    in_ds = Dataset(args.input_file, 'r')

    in_grid, in_grid_dim_names = \
        init_grid_from_vars(in_ds.variables[args.lon_name],
                            in_ds.variables[args.lat_name])

    scalar_vars, vector_vars = split_scalar_and_vector_vars(args.var_names)

    create_dir_for_file(args.output_file)
    out_ds = Dataset(args.output_file, 'w')

    add_missing_dim_vars(in_ds, out_ds, in_grid_dim_names)

    grid_ds = Dataset(args.grid_file, 'r')
    grid_proj_var = grid_ds.variables[names.VAR_PROJECTION]
    converter = init_converter_from_proj_var(grid_proj_var)

    print 'Calculating coordinates of grid points:'
    xx, yy = [], []
    for i in xrange(in_grid.shape[0]):
        _progress(i, in_grid.shape[0])
        row_xx, row_yy = converter.convert_points(in_grid[i, :, 1],
                                                  in_grid[i, :, 0])
        xx.append(row_xx)
        yy.append(row_yy)
    xx = np.concatenate(xx)
    yy = np.concatenate(yy)
    _progress(in_grid.shape[0], in_grid.shape[0])

    out_proj_var = out_ds.createVariable(names.VAR_PROJECTION,
                                         grid_proj_var.dtype)
    copy_nc_attributes(grid_proj_var, out_proj_var)

    out_x_var = out_ds.createVariable(args.x_name,
                                      xx.dtype,
                                      dimensions=in_grid_dim_names)
    copy_nc_attributes(grid_ds.variables[names.DIMVAR_X], out_x_var)
    out_x_var[:, :] = xx

    out_y_var = out_ds.createVariable(args.y_name,
                                      yy.dtype,
                                      dimensions=in_grid_dim_names)
    copy_nc_attributes(grid_ds.variables[names.DIMVAR_Y], out_y_var)
    out_y_var[:, :] = yy

    grid_ds.close()

    if len(scalar_vars) > 0:
        print 'Processing scalar fields:'

    for var_name in scalar_vars:
        print var_name
        in_var = in_ds.variables[var_name]
        add_missing_dim_vars(in_ds, out_ds, in_var.dimensions)

        out_var = out_ds.createVariable(var_name,
                                        in_var.dtype,
                                        dimensions=in_var.dimensions)

        copy_nc_attributes(in_var, out_var)

        iter_mask = np.ones((len(in_var.shape, )), dtype=bool)
        iter_mask[-MAX_COPY_DIM_COUNT:] = False

        dim_iterator = DimIterator(in_var.shape, None, iter_mask)
        write_op_count = len(dim_iterator)
        for write_op_num, slc in enumerate(dim_iterator.slice_tuples()):
            _progress(write_op_num, write_op_count)
            out_var[slc] = in_var[slc]
        _progress(write_op_count, write_op_count)

    if len(vector_vars) > 0:
        print 'Processing vector fields:'

    for var_name_pair in vector_vars:
        print var_name_pair
        in_u_var = in_ds.variables[var_name_pair[0]]
        in_v_var = in_ds.variables[var_name_pair[1]]

        if in_u_var.dimensions != in_v_var.dimensions:
            raise Exception()

        grid_dim_indices = find_dim_indices(in_grid_dim_names,
                                            in_u_var.dimensions)

        if any(idx is None for idx in grid_dim_indices):
            raise Exception()

        add_missing_dim_vars(in_ds, out_ds, in_u_var.dimensions)

        out_x_var = out_ds.createVariable('_'.join(var_name_pair) + '_' +
                                          args.x_name,
                                          in_u_var.dtype,
                                          dimensions=in_u_var.dimensions)
        out_x_var.projection = out_proj_var.grid_mapping_name

        out_y_var = out_ds.createVariable('_'.join(var_name_pair) + '_' +
                                          args.y_name,
                                          in_v_var.dtype,
                                          dimensions=in_u_var.dimensions)
        out_y_var.projection = out_proj_var.grid_mapping_name

        swap_axes = grid_dim_indices[0] > grid_dim_indices[1]

        iter_mask = np.ones((len(in_u_var.shape, )), dtype=bool)
        iter_mask[grid_dim_indices] = False

        dim_iterator = DimIterator(in_u_var.shape, None, iter_mask)
        write_op_count = len(dim_iterator)
        for write_op_num, slc in enumerate(dim_iterator.slice_tuples()):
            _progress(write_op_num, write_op_count)

            in_u_field = in_u_var[slc]
            in_v_field = in_v_var[slc]

            if swap_axes:
                in_u_field = np.swapaxes(in_u_field, grid_dim_indices[0],
                                         grid_dim_indices[1])
                in_v_field = np.swapaxes(in_v_field, grid_dim_indices[0],
                                         grid_dim_indices[1])

            out_x_field, out_y_field, = \
                converter.convert_vectors(in_u_field, in_v_field,
                                          in_grid[..., 1], in_grid[..., 0])

            if swap_axes:
                out_x_field = np.swapaxes(out_x_field, grid_dim_indices[0],
                                          grid_dim_indices[1])
                out_y_field = np.swapaxes(out_y_field, grid_dim_indices[0],
                                          grid_dim_indices[1])

            out_x_var[slc] = out_x_field
            out_y_var[slc] = out_y_field
        _progress(write_op_count, write_op_count)

    add_history(out_ds, get_history(in_ds))

    in_ds.close()
    out_ds.close()
Example #8
0
def cmd(args):
    converter = init_converter_from_args(args)

    grid = RectilinearGrid(
        RegularAxis(args.x_start, args.x_count, args.x_step),
        RegularAxis(args.y_start, args.y_count, args.y_step), False)

    create_dir_for_file(args.output_file)
    grid_ds = Dataset(args.output_file, mode='w', format='NETCDF4')
    grid_ds.title = 'Geographic coordinates of points of a regular grid ' \
                    'defined in Cartesian coordinates on a ' + \
                    converter.projection.long_name + ' projection plane.'

    grid_ds.createDimension(names.DIMVAR_X, args.x_count)
    x_var = grid_ds.createVariable(names.DIMVAR_X,
                                   grid.dtype,
                                   dimensions=(names.DIMVAR_X, ))
    x_var.long_name = 'x coordinate of projection'
    x_var.standard_name = 'projection_x_coordinate'
    x_var.axis = 'X'
    x_var.units = 'm'
    x_var.step = args.x_step
    x_var[:] = grid.x_axis

    grid_ds.createDimension(names.DIMVAR_Y, args.y_count)
    y_var = grid_ds.createVariable(names.DIMVAR_Y,
                                   grid.dtype,
                                   dimensions=(names.DIMVAR_Y, ))
    y_var.long_name = 'y coordinate of projection'
    y_var.standard_name = 'projection_y_coordinate'
    y_var.axis = 'Y'
    y_var.units = 'm'
    y_var.step = args.y_step
    y_var[:] = grid.y_axis

    proj_var = grid_ds.createVariable(names.VAR_PROJECTION, 'c')
    proj_var.description = \
        re.sub(r'\s{2,}', ' ',
               converter.projection.__doc__.replace('\n', ' ')).strip() + \
        ' ' \
        'Before applying the projection, a series of rotations of the ' \
        'geographical coordinate system is performed to shift the point ' \
        '(origin_lat;origin_lon) to its center and to adjust the ' \
        'orientation of the axes of the projection plane with respect to ' \
        'the surface.'
    proj_var.grid_mapping_name = (converter.projection.standard_name +
                                  '+rotated_latitude_longitude')
    proj_var.earth_radius = args.earth_radius
    proj_var.latitude_of_projection_origin = args.orig_lat
    proj_var.longitude_of_projection_origin = args.orig_lon
    proj_var.standard_parallel = converter.projection.true_scale_lats
    proj_var.rot_axes = converter.rotor.rot_axes_ids
    proj_var.rot_angles_deg = converter.rotor.rot_angles_deg
    proj_var.short_name = converter.projection.short_name
    proj_var.false_easting = converter.translator.easting
    proj_var.false_northing = converter.translator.northing

    rot_uu, rot_vv, lats, lons = converter.restore_vectors(
        np.ones(grid.shape), np.zeros(grid.shape), grid[:, :, 0],
        grid[:, :, 1], True)

    lats_var = grid_ds.createVariable(names.DIMVAR_LAT,
                                      lats.dtype,
                                      dimensions=(names.DIMVAR_Y,
                                                  names.DIMVAR_X))
    lats_var.units = 'degrees_north'
    lats_var.long_name = 'latitude coordinate'
    lats_var.standard_name = 'latitude'
    lats_var[:] = lats

    lons_var = grid_ds.createVariable(names.DIMVAR_LON,
                                      lons.dtype,
                                      dimensions=(names.DIMVAR_Y,
                                                  names.DIMVAR_X))
    lons_var.units = 'degrees_east'
    lons_var.long_name = 'longitude coordinate'
    lons_var.standard_name = 'longitude'
    lons_var[:] = lons

    restore_angles = np.degrees(np.arctan2(rot_vv, rot_uu))
    restore_angles_var = grid_ds.createVariable('restore_angles',
                                                restore_angles.dtype,
                                                dimensions=(names.DIMVAR_Y,
                                                            names.DIMVAR_X))
    restore_angles_var.units = 'degrees'
    restore_angles_var.long_name = 'restore rotation angles'
    restore_angles_var[:] = restore_angles

    add_or_append_history(grid_ds)

    grid_ds.close()
Example #9
0
def cmd(args):
    in_ds = Dataset(args.input_file, 'r')

    # Latitude variable is mandatory.
    in_lat_var = in_ds.variables[args.lat_name]
    if 1 != len(in_lat_var.dimensions):
        raise Exception('\'%s\' is not 1D variable.' % args.lat_name)

    in_lat_dim_name = in_lat_var.dimensions[0]

    # Longitude variable is optional (but only for scalar fields).
    if args.lon_name is not None:
        in_lon_var = in_ds.variables[args.lon_name]
        if 1 != len(in_lon_var.dimensions):
            raise Exception('\'%s\' is not 1D variable.' % args.lon_name)

        in_lon_dim_name = in_lon_var.dimensions[0]

        if in_lat_dim_name == in_lon_dim_name:
            raise Exception('Latitude and longitude dimension variables '
                            'can not be specified along the same dimension.')

    scalar_vars, vector_vars = split_scalar_and_vector_vars(args.var_names)

    if len(vector_vars) > 0 and args.lon_name is None:
        raise Exception('Vector fields cannot be processed without longitude '
                        'variable.')

    add_north_pole = (args.add == 'north' or args.add == 'both')
    add_south_pole = (args.add == 'south' or args.add == 'both')

    lat_list = in_lat_var[:]
    pole_tol = lat_list.dtype.type(POLE_TOLERANCE)
    np_lat = lat_list.dtype.type(90)

    min_lat_idx, max_lat_idx = 0, -1
    lat_list_ascending = True
    if lat_list.shape[0] > 1:
        if np.all(lat_list[1:] > lat_list[:-1]):
            pass
        elif np.all(lat_list[1:] < lat_list[:-1]):
            min_lat_idx, max_lat_idx = max_lat_idx, min_lat_idx
            lat_list_ascending = False
        else:
            raise Exception('Latitudes must be sorted and not be duplicated.')
    elif lat_list.shape[0] != 1:
        raise Exception('Array of latitudes must not be empty.')

    append_row = prepend_row = False
    if add_north_pole:
        if np.abs(lat_list[max_lat_idx] - np_lat) <= pole_tol:
            raise Exception('Input grid already contains grid points for the '
                            'North Pole.')
        if lat_list_ascending:
            append_row = True
        else:
            prepend_row = True

        lat_list = _extend_axis(lat_list, np_lat, not lat_list_ascending)

    if add_south_pole:
        if np.abs(lat_list[min_lat_idx] + np_lat) <= pole_tol:
            raise Exception('Input grid already contains grid points for the '
                            'South Pole.')
        if lat_list_ascending:
            prepend_row = True
        else:
            append_row = True

        lat_list = _extend_axis(lat_list, -np_lat, lat_list_ascending)

    create_dir_for_file(args.output_file)
    out_ds = Dataset(args.output_file, 'w')

    out_ds.createDimension(
        in_lat_dim_name, None if
        in_ds.dimensions[in_lat_dim_name].isunlimited() else lat_list.shape[0])
    out_lat_var = out_ds.createVariable(args.lat_name,
                                        in_lat_var.dtype,
                                        dimensions=(in_lat_dim_name, ))
    copy_nc_attributes(in_lat_var, out_lat_var)
    out_lat_var[:] = lat_list

    if args.lon_name is not None:
        lon_list = in_lon_var[:]

        out_ds.createDimension(
            in_lon_dim_name,
            None if in_ds.dimensions[in_lon_dim_name].isunlimited() else
            lon_list.shape[0])
        out_lon_var = out_ds.createVariable(args.lon_name,
                                            in_lon_var.dtype,
                                            dimensions=(in_lon_dim_name, ))
        copy_nc_attributes(in_lon_var, out_lon_var)
        out_lon_var[:] = lon_list

    if len(scalar_vars) > 0:
        print 'Processing scalar fields:'

    for var_name in scalar_vars:
        print var_name
        in_var = in_ds.variables[var_name]
        add_missing_dim_vars(in_ds, out_ds, in_var.dimensions)

        lat_idx, lon_idx = find_dim_indices([args.lat_name, args.lon_name],
                                            in_var.dimensions)

        out_var = out_ds.createVariable(var_name,
                                        in_var.dtype,
                                        dimensions=in_var.dimensions)

        iter_mask = np.ones((len(in_var.shape, )), dtype=bool)

        if lat_idx is not None:
            iter_mask[lat_idx] = False

            swap_axes = False
            if lon_idx is not None:
                iter_mask[lon_idx] = False
                swap_axes = lon_idx < lat_idx

            read_iter = DimIterator(in_var.shape, None, iter_mask)
            write_iter = DimIterator(out_var.shape, None, iter_mask)
            write_op_count = len(read_iter)

            for write_op_num, (read_slc, write_slc) in enumerate(
                    izip(read_iter.slice_tuples(), write_iter.slice_tuples())):
                _progress(write_op_num, write_op_count)

                in_field = in_var[read_slc]

                if swap_axes:
                    in_field = np.swapaxes(in_field, lat_idx, lon_idx)

                out_field = in_field

                if prepend_row:
                    out_field = _extend_scalar_field(out_field, True)
                if append_row:
                    out_field = _extend_scalar_field(out_field, False)

                if swap_axes:
                    out_field = np.swapaxes(out_field, lat_idx, lon_idx)

                out_var[write_slc] = out_field

            _progress(write_op_count, write_op_count)

    if len(vector_vars) > 0:
        print 'Processing vector fields:'
        to_zero, from_zero = gen_rot_matrices_deg(lon_list, True)

    for var_name_pair in vector_vars:
        print var_name_pair

        in_u_var = in_ds.variables[var_name_pair[0]]
        in_v_var = in_ds.variables[var_name_pair[1]]

        if in_u_var.dimensions != in_v_var.dimensions:
            raise Exception()

        lat_idx, lon_idx = find_dim_indices([args.lat_name, args.lon_name],
                                            in_u_var.dimensions)

        if lat_idx is None or lon_idx is None:
            raise Exception()

        add_missing_dim_vars(in_ds, out_ds, in_u_var.dimensions)

        out_u_var = out_ds.createVariable(var_name_pair[0],
                                          in_u_var.dtype,
                                          dimensions=in_u_var.dimensions)

        out_v_var = out_ds.createVariable(var_name_pair[1],
                                          in_v_var.dtype,
                                          dimensions=in_v_var.dimensions)

        swap_axes = lon_idx < lat_idx

        iter_mask = np.ones((len(in_u_var.shape, )), dtype=bool)
        iter_mask[lat_idx] = iter_mask[lon_idx] = False

        read_iter = DimIterator(in_u_var.shape, None, iter_mask)
        write_iter = DimIterator(out_u_var.shape, None, iter_mask)
        write_op_count = len(read_iter)
        for write_op_num, (read_slc, write_slc) in enumerate(
                izip(read_iter.slice_tuples(), write_iter.slice_tuples())):
            _progress(write_op_num, write_op_count)

            in_u_field = in_u_var[read_slc]
            in_v_field = in_v_var[read_slc]

            if swap_axes:
                in_u_field = np.swapaxes(in_u_field, lat_idx, lon_idx)
                in_v_field = np.swapaxes(in_v_field, lat_idx, lon_idx)

            out_u_field, out_v_field = in_u_field, in_v_field

            if prepend_row:
                out_u_field, out_v_field = \
                    _extend_vector_field(out_u_field, out_v_field,
                                         to_zero, from_zero,
                                         True)

            if append_row:
                out_u_field, out_v_field = \
                    _extend_vector_field(out_u_field, out_v_field,
                                         to_zero, from_zero,
                                         False)

            if swap_axes:
                out_u_field = np.swapaxes(out_u_field, lat_idx, lon_idx)
                out_v_field = np.swapaxes(out_v_field, lat_idx, lon_idx)

            out_u_var[write_slc] = out_u_field
            out_v_var[write_slc] = out_v_field

        _progress(write_op_count, write_op_count)

    add_history(out_ds, get_history(in_ds))

    in_ds.close()
    out_ds.close()
Example #10
0
def cmd(args):
    if len(args.var_names) == 0:
        raise Exception('Variable name list \'--var-names\' is empty.')

    in_ds = Dataset(args.input_file, 'r')
    create_dir_for_file(args.output_file)
    out_ds = Dataset(args.output_file, 'w')

    processed_requests = {}
    for var_name in args.var_names:
        in_var = in_ds.variables[var_name]
        var_request = []
        for dim_name in in_var.dimensions:
            if dim_name in processed_requests:
                dim_request = processed_requests[dim_name]
            else:
                dim = in_ds.dimensions[dim_name]

                in_dim_var = None
                if dim_name in in_ds.variables:
                    in_dim_var = in_ds.variables[dim_name]
                    if len(in_dim_var.dimensions) != 1 \
                            or in_dim_var.dimensions[0] != dim_name:
                        in_dim_var = None

                dim_request = _process_slice_request(dim_name, args.slice_dict,
                                                     slice(None))
                dim_request = _process_exclude_request(dim_name,
                                                       args.exclude_dict,
                                                       dim.size, dim_request)
                dim_request = _process_min_max_request(dim_name, args.min_dict,
                                                       args.max_dict,
                                                       in_dim_var, dim_request)

                processed_requests[dim_name] = dim_request
                out_ds.createDimension(
                    dim_name,
                    None if dim.isunlimited() else _calc_request_size(
                        dim_request, dim.size))

                if in_dim_var is not None:
                    out_dim_var = create_nc_var_like_other(out_ds, in_dim_var)
                    if dim_request is not None:
                        out_dim_var[:] = in_dim_var[dim_request]

            var_request.append(dim_request)

        out_var = create_nc_var_like_other(out_ds, in_var)

        iter_mask = np.ones((len(in_var.shape, )), dtype=bool)
        iter_mask[-MAX_COPY_DIM_COUNT:] = False

        read_iter = DimIterator(in_var.shape, var_request, iter_mask)
        write_iter = DimIterator(out_var.shape, None, iter_mask)

        for read_slc, write_slc in izip(read_iter.slice_tuples(),
                                        write_iter.slice_tuples()):
            out_var[write_slc] = in_var[read_slc]

    add_history(out_ds, get_history(in_ds))
    in_ds.close()
    out_ds.close()
Example #11
0
def cmd(args):
    scalar_vars, vector_vars = split_scalar_and_vector_vars(args.var_names)

    if len(vector_vars) < 1:
        raise Exception('No input vector fields specified.')

    if args.angle_file is None:
        args.angle_file = args.input_file

    angle_ds = Dataset(args.angle_file, 'r')

    if args.angle_name not in angle_ds.variables:
        raise Exception('Angle file \'%s\' does not contain angle variable '
                        '\'%s\'' % (args.angle_file, args.angle_name))

    angle_var = angle_ds.variables[args.angle_name]
    angle_shape = angle_var.shape

    in_ds = Dataset(args.input_file, 'r')

    if args.dim_names is None:
        args.dim_names = angle_var.dimensions
    else:
        if len(args.dim_names) != len(angle_var.dimensions):
            raise Exception('Number of dimensions specified by the parameter '
                            '--dim-names is not equal to the number of '
                            'dimensions of the --angle-name variable.')
        for dim_idx, dim_name in enumerate(args.dim_names):
            if dim_name not in in_ds.dimensions:
                raise Exception('Input file does not contain dimension '
                                '\'%s\'.' % dim_name)
            if in_ds.dimensions[dim_name].size != angle_shape[dim_idx]:
                raise Exception('Size of the dimension \'%s\' of the input '
                                'file is not equal to the size of the '
                                'dimension \'%s\' of the angle file.'
                                % (dim_name, angle_var.dimensions[dim_idx]))

    rot_matrices = gen_rot_matrices_rad(angle_var[:]) \
        if args.angle_units == 'rad' \
        else gen_rot_matrices_deg(angle_var[:])
    angle_ds.close()

    create_dir_for_file(args.output_file)
    out_ds = Dataset(args.output_file, 'w')

    if len(scalar_vars) > 0:
        print 'Processing scalar fields:'

    for var_name in scalar_vars:
        print var_name
        in_var = in_ds.variables[var_name]
        add_missing_dim_vars(in_ds, out_ds, in_var.dimensions)

        out_var = out_ds.createVariable(var_name,
                                        in_var.dtype,
                                        dimensions=in_var.dimensions)
        copy_nc_attributes(in_var, out_var)

        iter_mask = np.ones((len(in_var.shape, )), dtype=bool)
        iter_mask[-MAX_COPY_DIM_COUNT:] = False

        dim_iterator = DimIterator(in_var.shape, None, iter_mask)
        write_op_count = len(dim_iterator)
        for write_op_num, slc in enumerate(dim_iterator.slice_tuples()):
            _progress(write_op_num, write_op_count)
            out_var[slc] = in_var[slc]
        _progress(write_op_count, write_op_count)

    if len(vector_vars) > 0:
        print 'Processing vector fields:'

    angle_iter_mask = np.ones((len(args.dim_names),), dtype=bool)
    dims_per_operation = len(args.dim_names) \
        if len(args.dim_names) < MAX_COPY_DIM_COUNT \
        else MAX_COPY_DIM_COUNT
    angle_iter_mask[-dims_per_operation:] = False
    angle_dim_iterator = DimIterator(angle_shape, None, angle_iter_mask)

    angle_iter_count = len(angle_dim_iterator)

    for var_name_pair in vector_vars:
        print var_name_pair
        in_u_var = in_ds.variables[var_name_pair[0]]
        in_v_var = in_ds.variables[var_name_pair[1]]

        if in_u_var.dimensions != in_v_var.dimensions:
            raise Exception()

        angle_dim_indices = find_dim_indices(args.dim_names,
                                             in_u_var.dimensions)

        for idx, dim_name in enumerate(args.dim_names):
            if angle_dim_indices[idx] is None:
                raise Exception('Variable \'%s\' is not specified along '
                                'dimension \'%s\'.'
                                % (var_name_pair[0], dim_name))

        add_missing_dim_vars(in_ds, out_ds, in_u_var.dimensions)

        out_u_var = out_ds.createVariable(
            var_name_pair[0],
            in_u_var.dtype,
            dimensions=in_u_var.dimensions)

        out_v_var = out_ds.createVariable(
            var_name_pair[1],
            in_v_var.dtype,
            dimensions=in_u_var.dimensions)

        fixed_indices = angle_dim_indices[-dims_per_operation:]

        dim_var_order = np.arange(len(fixed_indices))
        dim_angle_order = np.argsort(fixed_indices)

        var_iter_mask = np.ones((len(in_u_var.shape, )), dtype=bool)
        var_iter_mask[fixed_indices] = False

        for angle_iter_num, angle_slc in \
                enumerate(angle_dim_iterator.slice_tuples()):
            var_fixed_slices = np.repeat(None, len(in_u_var.shape))
            var_fixed_slices[angle_dim_indices] = angle_slc
            var_iter = DimIterator(in_u_var.shape, var_fixed_slices,
                                   var_iter_mask)
            var_iter_count = len(var_iter)
            total_iter_count = angle_iter_count * var_iter_count
            for var_iter_num, var_slc in enumerate(var_iter.slice_tuples()):
                _progress(var_iter_num + angle_iter_num * var_iter_count,
                          total_iter_count)
                in_u_field = in_u_var[var_slc]
                in_v_field = in_v_var[var_slc]

                in_u_field = np.moveaxis(in_u_field, dim_var_order,
                                         dim_angle_order)
                in_v_field = np.moveaxis(in_v_field, dim_var_order,
                                         dim_angle_order)

                out_u_field, out_v_field = \
                    apply_rot_matrices(in_u_field, in_v_field,
                                       rot_matrices[(slice(None), slice(None))
                                                    + angle_slc])

                out_u_field = np.moveaxis(out_u_field, dim_angle_order,
                                          dim_var_order)

                out_v_field = np.moveaxis(out_v_field, dim_angle_order,
                                          dim_var_order)

                out_u_var[var_slc] = out_u_field
                out_v_var[var_slc] = out_v_field

        _progress(total_iter_count, total_iter_count)

    add_history(out_ds, get_history(in_ds))

    in_ds.close()
    out_ds.close()
Example #12
0
def cmd(args):
    weight_ds = Dataset(args.weight_file, 'r')
    expected_in_dims = tuple(weight_ds.variables[names.VAR_INPUT_DIMS][:])
    expected_in_shape = tuple(weight_ds.variables[names.VAR_INPUT_SHAPE][:])

    in_ds = Dataset(args.input_file, 'r')
    for dim_idx, dim_name in enumerate(expected_in_dims):
        if dim_name not in in_ds.dimensions or \
                        in_ds.dimensions[dim_name].size != \
                        expected_in_shape[dim_idx]:
            raise Exception('Weight file does not match the input file.')

    weight_var = weight_ds.variables[names.VAR_WEIGHTS]
    out_dims = weight_var.dimensions[:-1]
    weights = weight_var[:]
    indices = _split_and_squeeze(
        np.ma.filled(weight_ds.variables[names.VAR_INDICES][:], 0), -2)

    weight_ds.close()

    create_dir_for_file(args.output_file)
    out_ds = Dataset(args.output_file, 'w')
    dim_rename_dict = {}
    for dim_idx, dim_name in enumerate(out_dims):
        out_ds.createDimension(dim_name, weights.shape[dim_idx])
        dim_rename_dict[expected_in_dims[dim_idx]] = dim_name

    for var_name in args.var_names:
        print var_name
        in_var = in_ds.variables[var_name]
        in_var_dim_indices = find_dim_indices(expected_in_dims,
                                              in_var.dimensions)
        in_field_dim_indices = np.argsort(in_var_dim_indices)

        iter_mask = np.ones((len(in_var.shape, )), dtype=bool)
        for dim_idx in in_var_dim_indices:
            if dim_idx is None:
                raise Exception()
            iter_mask[dim_idx] = False

        out_dim_tuple = rename_dimensions(in_var.dimensions, dim_rename_dict)
        add_missing_dim_vars(in_ds, out_ds, out_dim_tuple)

        out_var = out_ds.createVariable(var_name, in_var.dtype,
                                        dimensions=out_dim_tuple)

        read_iter = DimIterator(in_var.shape, None, iter_mask)
        write_iter = DimIterator(out_var.shape, None, iter_mask)
        write_op_count = len(read_iter)
        for write_op_num, (read_slc, write_slc) in enumerate(
                izip(read_iter.slice_tuples(), write_iter.slice_tuples())):
            _progress(write_op_num, write_op_count)
            in_field = in_var[read_slc]

            in_field, undo_order = reorder_axes(in_field, in_field_dim_indices)

            in_field = in_field[indices]

            out_field = np.ma.masked_where(
                np.ma.count_masked(in_field, axis=2) > 0,
                np.ma.sum(in_field * weights, axis=-1), copy=False)

            out_field, _ = reorder_axes(out_field, undo_order)

            out_var[write_slc] = out_field
        _progress(write_op_count, write_op_count)

    add_history(out_ds, get_history(in_ds))

    in_ds.close()
    out_ds.close()