Esempio n. 1
0
def _generate_SVBRDF_directions(
        num_rows: int, num_cols: int) -> Tuple[Tensor, Tensor, Tensor]:
    '''
    Generates a set of normals and incident-outbound direction pairs which match the θ[h] and θ[d] parameterization of the
    MERL 100 BRDF slices.

    Args:
        num_rows: Number of θ[d] samples in the BRDF slice.
        num_cols: Number of θ[h] samples in the BRDF slice.
    
    Returns:
        Tensor [R, C, 1, 3] of normals, incident directions, and outbound directions.
    '''
    normals = torch.tensor([0, 0, 1],
                           dtype=torch.float64).expand(num_rows, num_cols, 3)
    # An incident direction corresponding to a (θ[h], θ[d]) pair is initialized by moving from the normal along the
    # positive X-axis until its zenith angle is θ[d].  The incident direction is then rotated along the positive Y-axis
    # by θ[h] degrees.
    angles = utils.create_grid(num_rows,
                               num_cols)[:, :, :2].double() * (math.pi / 2)
    half_angles = angles[:, :, 0]
    diff_angles = angles[:, :, 1].flip(0)
    incident_directions = torch.stack([
        torch.sin(diff_angles),
        torch.sin(half_angles) * torch.cos(diff_angles),
        torch.cos(half_angles) * torch.cos(diff_angles)
    ],
                                      dim=2)
    # The outbound direction corresponding to an incident direction is found by flipping its X-coordinate.
    outbound_directions = incident_directions * torch.tensor([-1, 1, 1])
    return normals.unsqueeze(2), incident_directions.unsqueeze(
        2), outbound_directions.unsqueeze(2)
def _shade_render_save(normals: Tensor, svbrdf: SVBRDF, lights: List[Light],
                       viewer: Viewer, camera: Camera, path: str) -> None:
    '''
    Shades, renders, and saves a picture to the given path from the provided normals, Lights, Viewer, Camera, and SVBRDF.

    Args:
        normals: Tensor [1, R, C, 3] of normals to use for shading.
        svbrdf: SVBRDF to use for shading.
        lights: Lights to use for shading.
        viewer: Viewer to use for shading.
        camera: Camera to use for rendering.
        path: Path to use for saving the image.
    '''
    num_rows, num_cols = normals.size(1), normals.size(2)
    num_size = max(num_rows, num_cols)
    surface = (utils.create_grid(num_rows=num_rows, num_cols=num_cols) -
               torch.tensor([0.5, 0.5, 0])) * torch.tensor(
                   [num_cols / num_size, num_rows / num_size, 1])
    radiance = shader.shade(surface=surface,
                            normals=normals,
                            lights=lights,
                            viewer=viewer,
                            svbrdf=svbrdf)
    picture = camera.render(surface=surface, radiance=radiance[0])
    image.save(path=path, image=picture, encoding='sRGB')
Esempio n. 3
0
def individual_to_image(individual, image_size=settings.image_size):
    if settings.show_generation_time:
        print("--------------------------------------")
        print("Generating image...")
        start = timer()
    inputs = utils.create_grid(image_size, image_size, individual.scale)
    inputs = inputs[:settings.nb_input_params]  # + (individual.latent_vector,)
    inputs = np.concatenate(inputs, axis=1)

    with torch.no_grad():
        inputs = torch.Tensor(inputs).to(settings.device)
        output = individual.net(inputs)

        image = []

        for channel in range(settings.channels):
            chan_data = output[:, channel].cpu().numpy()
            chan_data = utils.normalize(chan_data, 255)
            image.append(chan_data.reshape(image_size, image_size))

        image = np.dstack(image)

        if settings.channels == 1:
            image = np.repeat(image, 3, axis=2)

    if settings.show_generation_time:
        end = timer()
        print("Generation took : ", end - start, " seconds.")
        print("--------------------------------------")

    return image
Esempio n. 4
0
def main(year, dx=100000, snowType='SRLD', extraStr='v11'):

    xptsG, yptsG, latG, lonG, proj = cF.create_grid(dxRes=dx)
    print(xptsG)
    print(yptsG)

    dxStr = str(int(dx / 1000)) + 'km'
    print(dxStr)

    #region_mask, xptsI, yptsI = cF.get_region_mask_pyproj(anc_data_path, proj, xypts_return=1)
    #region_maskG = griddata((xptsI.flatten(), yptsI.flatten()), region_mask.flatten(), (xptsG, yptsG), method='nearest')

    #xptsDays, yptsDays, oibdates, snowDays= cF.read_icebridge_snowdepths(proj, oib_data_path, year)
    xptsDays, yptsDays, _, _, snowDays, oibdates = cF.getSTOSIWIGyear_proj(
        proj, oib_data_path + '/stosiwig/', snowType, year)

    for x in range(len(oibdates)):
        # Loop through dates of each flight. I want to keep separate to compre to the daily NESOSIM data.
        oib_dayG = bin_oib(dx, xptsDays[x], yptsDays[x], xptsG, yptsG,
                           snowDays[x])
        cF.plot_gridded_cartopy(
            lonG,
            latG,
            oib_dayG,
            proj=ccrs.NorthPolarStereo(central_longitude=-45),
            out=figure_path + '/OIB/' + oibdates[x] + dxStr + snowType +
            extraStr,
            date_string=oibdates[x],
            month_string='',
            varStr='OIB snow depth ',
            units_lab=r'm',
            minval=0,
            maxval=0.6,
            cmap_1=plt.cm.viridis)

        oib_dayG.dump(forcing_save_path + dxStr + '/OIB/' + str(year) + '/' +
                      oibdates[x] + dxStr + snowType + extraStr)

    arr = np.hstack(xptsDays)

    oib_daysG = cF.bin_oib(dx, np.hstack(xptsDays), np.hstack(yptsDays), xptsG,
                           yptsG, np.hstack(snowDays))

    cF.plot_gridded_cartopy(lonG,
                            latG,
                            oib_daysG,
                            proj=ccrs.NorthPolarStereo(central_longitude=-45),
                            out=figure_path + '/OIB/' + str(year) + dxStr +
                            snowType + extraStr,
                            date_string=str(year),
                            month_string='',
                            varStr='OIB snow depth ',
                            units_lab=r'm',
                            minval=0,
                            maxval=0.6,
                            cmap_1=plt.cm.viridis)
def main(year, dx=100000, extraStr='v11'):

    xptsG, yptsG, latG, lonG, proj = cF.create_grid(dxRes=dx)
    #print(xptsG)
    #print(yptsG)

    dxStr = str(int(dx / 1000)) + 'km'
    #print(dxStr)

    files = glob(forcing_save_path + dxStr + '/OIB/' + str(year) + '/' + '*' +
                 dxStr + extraStr + '')
    print(files)
    oibdates = [file[-16:-8] for file in files]

    print(oibdates)
    for x in range(len(oibdates)):
        # Loop through dates of each flight. I want to keep separate to compre to the daily NESOSIM data.

        try:
            oib_dayG1 = np.load(forcing_save_path + dxStr + '/OIB/' +
                                str(year) + '/' + oibdates[x] + dxStr +
                                'GSFC' + extraStr,
                                allow_pickle=True)
            oib_dayG2 = np.load(forcing_save_path + dxStr + '/OIB/' +
                                str(year) + '/' + oibdates[x] + dxStr +
                                'SRLD' + extraStr,
                                allow_pickle=True)
            oib_dayG3 = np.load(forcing_save_path + dxStr + '/OIB/' +
                                str(year) + '/' + oibdates[x] + dxStr + 'JPL' +
                                extraStr,
                                allow_pickle=True)

            oib_dayGC = np.median((oib_dayG1, oib_dayG2, oib_dayG3), axis=0)
            cF.plot_gridded_cartopy(
                lonG,
                latG,
                oib_dayGC,
                proj=ccrs.NorthPolarStereo(central_longitude=-45),
                out=figure_path + '/OIB/' + oibdates[x] + dxStr + 'MEDIAN' +
                extraStr,
                date_string=str(year),
                month_string='',
                varStr='OIB snow depth MEDIAN',
                units_lab=r'm',
                minval=0,
                maxval=0.6,
                cmap_1=plt.cm.viridis)

            oib_dayGC.dump(forcing_save_path + dxStr + '/OIB/' + str(year) +
                           '/' + oibdates[x] + dxStr + 'MEDIAN' + extraStr)
        except:
            print('All thre algs dont exist for this date:', oibdates[x])
Esempio n. 6
0
    def render(self, surface: Tensor, radiance: Tensor) -> Tensor:
        '''See Camera.render().'''
        # Determine the height and width of a surface texel.
        surface_row_step = (surface[-1, 0] - surface[0, 0]) / (surface.size(0) - 1)
        surface_col_step = (surface[0, -1] - surface[0, 0]) / (surface.size(1) - 1)
        # Compute the origin of the surface taking into account that each point represents the center of a texel.
        surface_origin = surface[0, 0] - surface_row_step / 2 - surface_col_step / 2
        # Derive the axes of the surface which are aligned with the row and column structure of the points.
        surface_row_axis = surface_row_step * surface.size(0)
        surface_col_axis = surface_col_step * surface.size(1)
        tiled_surface_row_axis = surface_row_axis.expand(int(self.resolution[1]), int(self.resolution[0]), 3)
        tiled_surface_col_axis = surface_col_axis.expand(int(self.resolution[1]), int(self.resolution[0]), 3)

        # Construct an image plane coincident with the Cartesian plane from the focal position (0, 0, 1).
        image_plane = create_grid(int(self.resolution[1]), int(self.resolution[0])) * 2 - 1
        image_plane[:, :, 0] *= torch.tan(math.pi * self.field_of_view[0] / 360)
        image_plane[:, :, 1] *= torch.tan(math.pi * self.field_of_view[1] / 360)
        # Derive an appropriate basis for the image plane based on the direction of the camera.
        image_plane_basis = create_orthonormal_basis(self.direction)
        # Transform the image plane into the reference frame of the camera.
        camera_ray_directions = image_plane_basis[2] * image_plane[:, :, [0]] - image_plane[:, :, [1]] * image_plane_basis[1] + image_plane_basis[0]

        # The intersection between a camera ray and the surface is given by the Möller–Trumbore algorithm.
        collision_mats = torch.stack([tiled_surface_row_axis, tiled_surface_col_axis, -camera_ray_directions], dim=2)
        collision_mats = torch.unsqueeze(collision_mats, dim=2).repeat(1, 1, 4, 1, 1).transpose(-1, -2)
        surface_to_camera_ray = self.position - surface_origin
        # The algorithm boils down to solving the following system of equations with Cramer's rule:
        #     [ surface_row_axis.x surface_col_axis.x -camera_ray_directions.x ]   [ A ]   [ surface_to_camera_ray.x ]
        #     | surface_row_axis.y surface_col_axis.y -camera_ray_directions.y | x | B | = | surface_to_camera_ray.y |
        #     [ surface_row_axis.z surface_col_axis.z -camera_ray_directions.z ]   [ C ]   [ surface_to_camera_ray.z ]
        collision_mats[:, :, 0, :, 0] = surface_to_camera_ray
        collision_mats[:, :, 1, :, 1] = surface_to_camera_ray
        collision_mats[:, :, 2, :, 2] = surface_to_camera_ray
        collision_dets = torch.det(collision_mats)
        # Solve the system of equations to find the collision scalars.
        collision_vars = collision_dets[:, :, :3] / collision_dets[:, :, [-1]].expand(-1, -1, 3)
        # Discard any collisions that occurred behind the camera.
        collision_vars[:, :, :2] *= collision_vars[:, :, [2]].sign().clamp(0, 1).expand(-1, -1, 2)

        # Sample the radiosity from the collision coordinates using bilinear filtering.
        image = self.exposure * torch.nn.functional.grid_sample(input=torch.unsqueeze(radiance.permute(2, 0, 1), dim=0),
                                                                grid=torch.unsqueeze(collision_vars[:, :, [1, 0]], dim=0) * 2 - 1,
                                                                mode='bilinear',
                                                                padding_mode='zeros',
                                                                align_corners=False).squeeze().permute(1, 2, 0)
        logging.info('Rendered %dx%d image of a %dx%d surface', image.size(1), image.size(0), surface.size(1), surface.size(0))
        return image
def _feedback_flow(config: Configuration) -> None:
    '''
    The "feedback" flow iteratively infers the SVBRDF parameters of a texture, renders it, and feeds the output of the
    rendering back into the network.  The purpose of this flow is to test the robustness of an SVBRDF autoencoder.

    Args:
        config: Configuration specifying the parameters of the flow.
    '''
    with torch.no_grad():
        autoencoder, svbrdf, camera, (feedback_lights, feedback_viewer), (
            rendering_lights, rendering_viewer
        ), input_path, output_path, loops = config.load_feedback_flow()
        autoencoder.eval()

        # It is assumed that the dimensions of the input image will be accepted by the network.
        input_image = image.load(path=input_path, encoding='sRGB')
        num_texture_rows = input_image.size(0)
        num_texture_cols = input_image.size(1)
        input_distance = utils.create_radial_distance_field(
            num_rows=num_texture_rows, num_cols=num_texture_cols)

        # By convention, PyTorch expects Tensors to be in [B, D, R, C] format.
        input_batch = torch.cat([input_image, input_distance],
                                dim=2).unsqueeze(0).permute(0, 3, 1, 2)

        normals, svbrdf.parameters = SVBRDFAutoencoder.interpret(
            autoencoder.forward(input_batch))
        surface = utils.create_grid(num_rows=num_texture_rows,
                                    num_cols=num_texture_cols)

        for i in tqdm.tqdm(range(loops), desc='Feedback Looping'):
            # The slightly-awkward ordering of statements before and inside the loops ensures that |loops| can be set to zero.
            input_image = shader.shade(surface=surface,
                                       normals=normals,
                                       lights=feedback_lights,
                                       viewer=feedback_viewer,
                                       svbrdf=svbrdf)[0]
            input_batch = torch.cat([input_image, input_distance],
                                    dim=2).unsqueeze(0).permute(0, 3, 1, 2)
            normals, svbrdf.parameters = SVBRDFAutoencoder.interpret(
                autoencoder.forward(input_batch))
        _shade_render_save(normals=normals,
                           svbrdf=svbrdf,
                           lights=rendering_lights,
                           viewer=rendering_viewer,
                           camera=camera,
                           path=output_path)
Esempio n. 8
0
    def __init__(self, dims: Dict, path: str, layout: Dict,
                 textures: List[Texture], transforms: List[Transform],
                 svbrdf: SVBRDF, lights: List[Light], viewer: Viewer) -> None:
        '''
        Constructs a new Dataset with the given dimensions, path, layout, textures, SVBRDF, Lights, and Viewer.

        Args:
            dims: Dimensions of the Dataset textures and crops.
            path: Path to the root directory of the Dataset.
            layout: Filesystem layout of each element in the Dataset.
            textures: Texture descriptions which comprise the Dataset.
            transforms: Transforms to be applied to a sample from the Dataset.
            svbrdf: SVBRDF associated with the parameter maps in the Dataset.
            lights: Lights used to shade a texture.
            viewer: Viewer used to shade a texture.
        '''
        for key in ('Texture', 'Crop'):
            assert key in dims, f'Dimensions dictionary is missing key "{key}".'
        for key in ('Normals', 'Parameters'):
            assert key in layout, f'Layout dictionary is missing key "{key}".'
        for i, parameter in enumerate(layout['Parameters']):
            assert 'Type' in parameter, f'Parameter {i} is missing key "Type" in layout dictionary.'
            assert 'Name' in parameter, f'Parameter {i} is missing key "Name" in layout dictionary.'
        self._dims = dims
        self._path = path
        self._layout = layout
        self._textures = textures
        self._transforms = transforms
        self._svbrdf = svbrdf
        self._lights = lights
        self._viewer = viewer
        # A rendering surface is needed to generate flash-lit images for consumption by an SVBRDF autoencoder network.
        # Similarly, a radial distance field (indicating the distance from each point on the surface to the center of
        # the surface) enables the network to discriminate between flash-lit and non-flash-lit regions.
        num_crop_rows = self._dims['Crop'][0]
        num_crop_cols = self._dims['Crop'][1]
        self._surface = utils.create_grid(num_rows=num_crop_rows,
                                          num_cols=num_crop_cols)
        self._radial_distance_field = utils.create_radial_distance_field(
            num_rows=num_crop_rows, num_cols=num_crop_cols).unsqueeze(0)
def compute_radiance(network_normals: Tensor, network_svbrdf: SVBRDF,
                     dataset_normals: Tensor,
                     dataset_svbrdf: SVBRDF) -> Tuple[Tensor, Tensor]:
    '''
    Computes the radiance from the given normals and SVBRDFs with respect to a random point Light and Viewer.

    Args:
        network_normals: Tensor [B, R, C, 3] of SVBRDF autoencoder normals.
        network_svbrdf: SVBRDF with embedded SVBRDF autoencoder parameters.
        dataset_normals: Tensor [B, R, C, 3] of ground-truth normals.
        dataset_svbrdf: SVBRDF with embedded ground-truth parameters.

    Returns:
        Tuple containing the SVBRDF autoencoder and Dataset radiance Tensors.
    '''
    # There is no harm in sharing the same drawing canvas for both the network and dataset renderings.
    texture_rows = dataset_normals.size(1)
    texture_cols = dataset_normals.size(2)
    surface = utils.create_grid(num_rows=texture_rows, num_cols=texture_cols)

    # The Light and Viewer are sampled from a cosine-weighted distribution following the Single-Image SVBRDF Capture
    # with a Rendering-Aware Deep Network paper.
    origin = torch.tensor([0.5, 0.5, 0.0], device=utils.get_device_name())
    lights = [
        PunctualLight(position=utils.sample_cosine_hemisphere(origin),
                      lumens=torch.rand(1).expand(3) * 2 + 0.5)
    ]
    viewer = PerspectiveViewer(position=utils.sample_cosine_hemisphere(origin))

    network_radiance = shader.shade(surface=surface,
                                    normals=network_normals,
                                    lights=lights,
                                    viewer=viewer,
                                    svbrdf=network_svbrdf)
    dataset_radiance = shader.shade(surface=surface,
                                    normals=dataset_normals,
                                    lights=lights,
                                    viewer=viewer,
                                    svbrdf=dataset_svbrdf)
    return network_radiance, dataset_radiance
Esempio n. 10
0
             saver.restore(sess, tf.train.latest_checkpoint(model_path))
             graph = tf.get_default_graph()
             x = graph.get_tensor_by_name("x:0")
             if len(data) % batch == 0:
                 for i in range(len(data) // batch):
                     minibatch = data[i * batch:(i + 1) * batch]
                     whoreco_result(minibatch, i * batch)
             elif len(data) < batch:
                 minibatch = data[0:len(data)]
                 partreco_result(minibatch, 0)
             else:
                 whoRange = len(data) // batch
                 partRange = len(data) % batch
                 for i in range(whoRange):
                     minibatch = data[i * batch:(i + 1) * batch]
                     whoreco_result(minibatch, i * batch)
                 minibatch = data[whoRange * batch:len(data)]
                 partreco_result(minibatch, whoRange * batch)
             result_output, crsave = selectResult(result_output, crsave)
             os.remove(ab_rootPath)
             filetime, generateTime, title = utils.create_grid(
                 crsave, result_output, path, gridH, gridC, station)
             utils.create_bin(crsave, result_output, title, filetime,
                              generateTime, station, gridH, gridC,
                              radarCount, startLon, startLat, endLon,
                              endLat, XReso, YReso)
 except:
     os.remove(ab_rootPath)
     pass
 record_count += 1
 print(record_count)
Esempio n. 11
0
        opt.zero_grad()
        masked_img = batch["masked_image"].to(device).float()
        mask = batch["mask"].to(device).float()
        image = batch["image"].to(device)
        pred = NET(masked_img, mask)

        loss.prepare_loss_calculation(pred, image, mask)
        loss_hole = loss.calculate_loss_hole()
        loss_valid = loss.calculate_loss_valid()
        perceptual_loss = loss.calculate_perceptual_loss()
        style_loss_out = loss.calculate_style_out_loss()
        style_loss_comp = loss.calculate_style_comp_loss()
        tv_loss = loss.calculate_tv_loss()
        actual_loss = loss_valid + 6*loss_hole + 0.05*perceptual_loss + \
                      120*(style_loss_out + style_loss_comp) + 0.1*tv_loss

        if GLOBAL_STEP % 3000 == 0:
            print(actual_loss)

            grid = create_grid(masked_img, pred)
            write_to_tensorboard(writer, grid, actual_loss, GLOBAL_STEP)

        write_to_tensorboard(writer, None, actual_loss, GLOBAL_STEP)
        actual_loss.backward()
        opt.step()
        torch.cuda.empty_cache()
        GLOBAL_STEP += 1

    model_path = os.path.join(checkpoint_path, str(epoch))
    torch.save(NET.state_dict(), model_path)
Esempio n. 12
0
def main(yearT,
         extraStr='v11_1',
         dx=100000,
         data_path=reanalysis_raw_path + 'ERA5/',
         out_path=forcing_save_path + 'Temp/ERA5/',
         fig_path=figure_path + 'Temp/ERA5/',
         anc_data_path='../../anc_data/'):

    xptsG, yptsG, latG, lonG, proj = cF.create_grid(dxRes=dx)
    print(xptsG)
    print(yptsG)

    dxStr = str(int(dx / 1000)) + 'km'
    print(dxStr)

    if not os.path.exists(out_path):
        os.makedirs(out_path)

    if not os.path.exists(fig_path):
        os.makedirs(fig_path)

    region_mask, xptsI, yptsI, _, _ = cF.get_region_mask_pyproj(anc_data_path,
                                                                proj,
                                                                xypts_return=1)
    region_maskG = griddata((xptsI.flatten(), yptsI.flatten()),
                            region_mask.flatten(), (xptsG, yptsG),
                            method='nearest')

    varStr = 't2m'

    xptsM, yptsM, temp2mYear = get_ERA5_temps(proj, data_path, yearT)

    hotdays_duration = xr.apply_ufunc(get_cumulative_hotdays,
                                      temp2mYear['t2m'].compute(),
                                      input_core_dims=[["time"]],
                                      vectorize=True)

    print(xptsM.flatten().shape)
    print(yptsM.flatten().shape)
    print(temp2mYear)
    print(hotdays_duration.values.flatten().shape)

    t2mdurG = griddata((xptsM.flatten(), yptsM.flatten()),
                       hotdays_duration.values.flatten(), (xptsG, yptsG),
                       method='linear')
    #t2mdurG[where(region_maskG>10)]=0.
    #t2mdur=t2mdur.astype('f2')

    cF.plot_gridded_cartopy(lonG,
                            latG,
                            t2mdurG,
                            proj=ccrs.NorthPolarStereo(central_longitude=-45),
                            out=fig_path + '/duration' + str(yearT) + extraStr,
                            date_string=str(yearT),
                            extra=extraStr,
                            varStr='hot days',
                            units_lab=r'>0',
                            minval=0,
                            maxval=100,
                            cmap_1=plt.cm.viridis)

    #monthStr='%02d' %(month+1)
    t2mdurG.dump(out_path + '/ERA5_duration' + dxStr + '-' + str(yearT) +
                 extraStr)
Esempio n. 13
0
def main(year,
         startMonth=0,
         endMonth=11,
         extraStr='v11',
         dx=100000,
         data_path=nsidc_raw_path,
         out_path=forcing_save_path,
         fig_path=figure_path + 'IceDrift/NSIDCv4/',
         anc_data_path='../../AncData/'):

    print(data_path)
    xptsG, yptsG, latG, lonG, proj = cF.create_grid(dxRes=dx)
    # create source and target cartopy projections
    srcProj = cF.EASE_North()
    newProj = cF.P3413()

    print(xptsG)
    print(yptsG)

    dxStr = str(int(dx / 1000)) + 'km'
    print(dxStr)

    numDays = cF.getLeapYr(year)
    if (numDays > 365):
        monIndex = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
    else:
        monIndex = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]

    if not os.path.exists(out_path + '/' + dxStr + '/IceDrift/NSIDCv4/' +
                          str(year)):
        os.makedirs(out_path + '/' + dxStr + '/IceDrift/NSIDCv4/' + str(year))

    if not os.path.exists(fig_path):
        os.makedirs(fig_path)

    for month in range(startMonth, endMonth + 1):
        print(month)
        numDays = monIndex[month]

        for x in range(numDays):
            dayT = sum(monIndex[0:month]) + x
            dayStr = '%03d' % dayT
            print(dayStr)

            print('Drift, year:', year, 'day:', dayStr)
            xeasedrift, yeasedrift, lont, latt = cF.get_nsidc_driftv4(
                data_path, year, dayT, 'daily')
            u_rot, v_rot = newProj.transform_vectors(srcProj, lont, latt,
                                                     xeasedrift, yeasedrift)
            drift_day_xy = np.stack((u_rot, v_rot))

            xpts, ypts = proj(lont, latt)
            drift_xyG = cF.int_smooth_drifts_v2(xptsG,
                                                yptsG,
                                                xpts,
                                                ypts,
                                                latt,
                                                drift_day_xy,
                                                sigma_factor=0.5)

            #drift_day_xy[1] = vy
            print(drift_xyG)

            cF.plot_drift_cartopy(lonG,
                                  latG,
                                  xptsG,
                                  yptsG,
                                  drift_xyG[0],
                                  drift_xyG[1],
                                  np.sqrt(drift_xyG[0]**2 + drift_xyG[1]**2),
                                  out=fig_path + str(year) + '_d' + dayStr +
                                  dxStr + extraStr,
                                  units_lab='m/s',
                                  units_vec=r'm s$^{-1}$',
                                  minval=0,
                                  maxval=0.5,
                                  vector_val=0.1,
                                  date_string=str(year) + '_d' + dayStr,
                                  month_string='',
                                  varStr='NSIDCv4 ice drift ',
                                  cbar_type='max',
                                  cmap_1=plt.cm.viridis)

            drift_xyG.dump(out_path + '/' + dxStr + '/IceDrift/NSIDCv4/' +
                           str(year) + '/NSIDCv4_driftG' + dxStr + '-' +
                           str(year) + '_d' + dayStr + extraStr)
Esempio n. 14
0
def main(year,
         startMonth=8,
         endMonth=11,
         dx=100000,
         extraStr='v11_1',
         data_path=reanalysis_raw_path + 'ERA5/',
         out_path=forcing_save_path + 'Precip/ERA5/',
         fig_path=figure_path + 'Precip/ERA5/',
         anc_data_path='../../anc_data/'):

    xptsG, yptsG, latG, lonG, proj = cF.create_grid(dxRes=dx)
    print(xptsG)
    print(yptsG)

    dxStr = str(int(dx / 1000)) + 'km'
    print(dxStr)

    region_mask, xptsI, yptsI, _, _ = cF.get_region_mask_pyproj(anc_data_path,
                                                                proj,
                                                                xypts_return=1)
    region_maskG = griddata((xptsI.flatten(), yptsI.flatten()),
                            region_mask.flatten(), (xptsG, yptsG),
                            method='nearest')

    varStr = 'sf'

    if not os.path.exists(fig_path):
        os.makedirs(fig_path)

    yearT = year

    numDays = cF.getLeapYr(year)
    if (numDays > 365):
        monIndex = [0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366]
    else:
        monIndex = [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365]

    if not os.path.exists(out_path + '/' + str(year)):
        os.makedirs(out_path + '/' + str(year))

    startDay = monIndex[startMonth]

    if (endMonth > 11):
        endDay = monIndex[endMonth + 1 - 12] + monIndex[-1] - 1
    else:
        endDay = monIndex[endMonth + 1]

    calc_weights = 1  # start as one to calculate weightings then gets set as zero for future files

    for dayT in range(startDay, endDay):

        dayStr = '%03d' % dayT
        month = np.where(dayT - np.array(monIndex) >= 0)[0][-1]
        monStr = '%02d' % (month + 1)
        dayinmonth = dayT - monIndex[month]
        print('Precip day:', dayT, dayinmonth)

        #in  kg/m2 per day
        xptsM, yptsM, lonsM, latsM, Precip = cF.get_ERA5_precip_days_pyproj(
            proj,
            data_path,
            str(yearT),
            monStr,
            dayinmonth,
            lowerlatlim=30,
            varStr=varStr)

        # if it's the first day, calculate weights
        if calc_weights == 1:
            # calculate Delaunay triangulation interpolation weightings for first file of the year
            print('calculating interpolation weightings')
            ptM_arr = np.array([xptsM.flatten(), yptsM.flatten()]).T
            tri = Delaunay(ptM_arr)  # delaunay triangulation
            calc_weights = 0

        # grid using linearNDInterpolator with triangulation calculated above
        # (faster than griddata but produces identical output)
        interp = LinearNDInterpolator(tri, Precip.flatten())
        PrecipG = interp((xptsG, yptsG))

        cF.plot_gridded_cartopy(
            lonG,
            latG,
            PrecipG,
            proj=ccrs.NorthPolarStereo(central_longitude=-45),
            out=fig_path + '/ERA5' + varStr + dxStr + '-' + str(yearT) + '_d' +
            dayStr + extraStr,
            date_string=str(yearT),
            month_string=str(dayT),
            extra=extraStr,
            varStr='ERA5 snowfall ',
            units_lab=r'kg/m2',
            minval=0,
            maxval=10,
            cmap_1=plt.cm.viridis)

        PrecipG.dump(out_path + str(yearT) + '/ERA5' + varStr + dxStr + '-' +
                     str(yearT) + '_d' + dayStr + extraStr)
Esempio n. 15
0
def main(yearT,
         extraStr='v11_1',
         dx=100000,
         data_path=reanalysis_raw_path + 'ERA5/',
         out_path=forcing_save_path + 'InitialConditions/ERA5/',
         fig_path=figure_path + 'InitialConditions/ERA5/',
         anc_data_path='../../anc_data/'):

    xptsG, yptsG, latG, lonG, proj = cF.create_grid(dxRes=dx)
    print(xptsG)
    print(yptsG)

    dxStr = str(int(dx / 1000)) + 'km'
    print(dxStr)

    if not os.path.exists(out_path):
        os.makedirs(out_path)

    if not os.path.exists(fig_path):
        os.makedirs(fig_path)

    region_mask, xptsI, yptsI, _, _ = cF.get_region_mask_pyproj(anc_data_path,
                                                                proj,
                                                                xypts_return=1)
    region_maskG = griddata((xptsI.flatten(), yptsI.flatten()),
                            region_mask.flatten(), (xptsG, yptsG),
                            method='nearest')

    reanalysis = 'ERA5'
    varStr = 't2m'

    iceconc_path = forcing_save_path + '/IceConc/CDR/'
    temp_path = forcing_save_path + '/Temp/ERA5/'

    t2mdurGAll = []
    for y in range(1980, 1991 + 1, 1):
        if (y == 1987):
            continue
        t2mdurGT = np.load(temp_path + 'duration' + dxStr + '-' + str(y) +
                           extraStr,
                           allow_pickle=True)
        t2mdurGAll.append(t2mdurGT)

    t2mdurGclim = ma.mean(t2mdurGAll, axis=0)
    print(t2mdurGclim.shape)
    w99 = cF.getWarren(lonG, latG, 7)

    for yearT in range(2021, 2021 + 1, 1):
        if (yearT == 1987):
            continue

        t2mdurGT = np.load(temp_path + 'duration' + dxStr + '-' + str(yearT) +
                           extraStr,
                           allow_pickle=True)
        print(t2mdurGT.shape)
        W99yrT = w99 * (t2mdurGclim / t2mdurGT)
        W99yrT[np.where(latG < 70)] = 0
        W99yrT[np.where(region_maskG > 8.2)] = 0
        W99yrT[np.where(region_maskG <= 7.8)] = 0
        W99yrT[np.where(W99yrT < 0)] = 0
        W99yrT[np.where(W99yrT > 10)] = 10

        day = 226
        dayStr = str(day)  #226 is middle of August

        iceConcDayG = np.load(iceconc_path + str(yearT) + '/iceConcG_CDR' +
                              dxStr + '-' + str(yearT) + '_d' + dayStr +
                              extraStr,
                              allow_pickle=True)
        W99yrT[np.where(iceConcDayG < 0.15)] = 0

        W99yrT = gaussian_filter(W99yrT, sigma=1)

        # Convert to meters
        W99yrT = W99yrT / 100.

        cF.plot_gridded_cartopy(
            lonG,
            latG,
            W99yrT,
            proj=ccrs.NorthPolarStereo(central_longitude=-45),
            out=fig_path + '/initial_conditions' + str(yearT) + dxStr +
            extraStr,
            date_string=str(yearT),
            extra=extraStr,
            varStr='Snow depth ',
            units_lab=r'm',
            minval=0,
            maxval=0.12,
            cmap_1=plt.cm.viridis)

        W99yrT.dump(out_path + 'ICsnow' + dxStr + '-' + str(yearT) + extraStr)
Esempio n. 16
0
from valueIteration import do_several_value_iterations
from policyIteration import do_several_policy_iterations
from utils import create_grid

file_name = 'i1.txt'

# create a new Grid
rlt = create_grid(file_name)  # [size, discount_factor, noises, states]
grid_size = rlt[0]
discount_factor = rlt[1]
noises = rlt[2]
states = rlt[3]

do_several_value_iterations(discount_factor, noises, states, grid_size)

rlt = create_grid(file_name)  # [size, discount_factor, noises, states]
grid_size = rlt[0]
discount_factor = rlt[1]
noises = rlt[2]
states = rlt[3]
do_several_policy_iterations(discount_factor, noises, states, grid_size)
Esempio n. 17
0
imsize = (256, 256)
center = (128, 128)
gauss_peak = 0
dist_from_core = 24
gauss_bmaj = 10
gauss_e = 1.
gauss_bpa = 0.
gauss_peak_jet = 0.0
dist_from_core_jet = 24
gauss_bmaj_jet = 10
gauss_e_jet = 1.
gauss_bpa_jet = 0.
cut = 0.000001
transverse = 'gauss'

x, y = create_grid(imsize)
x -= center[0]
y -= center[1]

max_flux = float(max_flux)
along = np.where(x > 0, 0, 0)
if transverse == 'linear':
    perp = -(2 * max_flux / width) * abs(y)
elif transverse == 'quadratic':
    perp = -(max_flux / (width / 2)**2.) * y**2.
elif transverse == 'sqrt':
    perp = -(max_flux / np.sqrt(width / 2.)) * np.sqrt(abs(y))
elif transverse == 'gauss':
    gauss_width = 0.3 * x
    perp = np.where(x > 0.1,
                    max_flux * np.exp(-y**2 / (2. * gauss_width**2)) / x, 0)
Esempio n. 18
0
train_loader, test_loader, N_train, N_test, z = data.train_test_split_latents(net,
                                                                              experiment_parameters,
                                                                              x_train,
                                                                              x_test,
                                                                              batch_size=batch_size)

if experiment_parameters["load_land"]:
    mu = torch.Tensor(np.load(join_path(model_dir, 'land_mu.npy'))).to(device).requires_grad_(True)
    A = torch.Tensor(np.load(join_path(model_dir, 'land_std.npy'))).to(device).requires_grad_(True)
else:
    mu = stats.sturm_mean(net, z.to(device), num_steps=5).unsqueeze(0)

# meshgrid creating
meshsize = 100 if experiment_parameters["sampled"] else 20
Mxy, dv = utils.create_grid(z, meshsize)
# curves = {}  # for init curves, but they seem useless for now

if experiment_parameters["sampled"]:
    with torch.no_grad():
        grid_prob, grid_metric, grid_metric_sum, grid_save = land.LAND_grid_prob(grid=Mxy,
                                                                                 model=net,
                                                                                 batch_size=1,
                                                                                 device=device)
else:
    grid_metric_sum = None


if not experiment_parameters["load_land"]:
    mus, average_loglik, stds = [], [], []
    for i in range(10):
Esempio n. 19
0
def main(year, startMonth=3, endMonth=3, extraStr='v11_1', dx=100000, data_path=cdr_raw_path, out_path=forcing_save_path, fig_path=figure_path+'IceConc/CDR/', anc_data_path='../../anc_data/'):
		
	xptsG, yptsG, latG, lonG, proj = cF.create_grid(dxRes=dx)
	print(xptsG)
	print(yptsG)

	dxStr=str(int(dx/1000))+'km'
	print(dxStr)

	region_mask, xptsI, yptsI, lonsI, latsI = cF.get_region_mask_pyproj(anc_data_path, proj, xypts_return=1)
	region_maskG = griddata((xptsI.flatten(), yptsI.flatten()), region_mask.flatten(), (xptsG, yptsG), method='nearest')

	product='CDR'

	numDaysYr=cF.getLeapYr(year)
	if (numDaysYr>365):
		monIndex = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
	else:
		monIndex = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]

	if not os.path.exists(out_path+'/'+str(year)):
		os.makedirs(out_path+'/'+str(year))

	if not os.path.exists(fig_path):
		os.makedirs(fig_path)
		
	calc_weights = 1 # start as one to calculate weightings then gets set as zero for future files

	for month in range(startMonth, endMonth+1):
		print(month)
		mstr='%02d' %(month+1)
		# Get pole hole	
		pmask=cF.get_pmask(year, month)

		numDays=monIndex[month]
		
		# should return array with nans not masked as needed for regridding.
		for x in range(numDays):
			dayT=sum(monIndex[0:month])+x
			daySumStr='%03d' %(dayT)
			dayMonStr='%02d' %(x+1)
			print('day month string', dayMonStr)
			
			try:
				# Try final first
				fileT=glob(data_path+'/final/'+str(year)+'/'+mstr+'/*'+str(year)+mstr+dayMonStr+'*.nc')[0]
				print(data_path+'/final/'+str(year)+'/'+mstr+'/*'+str(year)+mstr+dayMonStr+'*.nc')
				print('final data')
			except:
				try:
					# Try nrt
					fileT=glob(data_path+'/nrt/'+str(year)+'/'+mstr+'/*'+str(year)+mstr+dayMonStr+'*.nc')[0]
					print(data_path+'/nrt/'+str(year)+'/'+mstr+'/*'+str(year)+mstr+dayMonStr+'*.nc')
					print('nrt data')
				except:	
					try:
						dayMonStr='%03d' %(dayT-1)
						fileT=glob(data_path+'/final/'+str(year)+'/'+mstr+'/*'+str(year)+mstr+dayMonStr+'*.nc')[0]
						print(data_path+'/final/'+str(year)+'/'+mstr+'/*'+str(year)+mstr+dayMonStr+'*.nc')
						print('previous day final data')
					except:
						try:
							dayMonStr='%03d' %(dayT+1)
							fileT=glob(data_path+'/final/'+str(year)+'/'+mstr+'/*'+str(year)+mstr+dayMonStr+'*.nc')[0]
							print(data_path+'/final/'+str(year)+'/'+mstr+'/*'+str(year)+mstr+dayMonStr+'*.nc')
							print('following day final data')
						except:
							print('no conc')
							# previously was pass but this meant it would just use the previous loop data below
							continue

			iceConcDay = cF.getCDRconcproj(fileT, mask=0, maxConc=1, lowerConc=1)
			print(iceConcDay.shape)

			# if it's the first day, calculate weights
			if calc_weights == 1:

				# calculate Delaunay triangulation interpolation weightings for first file of the year
				print('calculating interpolation weightings')

				# Use region mask which is on the same grid!
				ptM_arr = np.array([xptsI.flatten(),yptsI.flatten()]).T
				tri = Delaunay(ptM_arr) # delaunay triangulation
				calc_weights = 0

			
			iceConcDay[np.where(region_mask>10)]=np.nan

			#iceConcDayG = griddata((xpts0.flatten(), ypts0.flatten()), iceConcDay.flatten(), (xptsG, yptsG), method='linear')
			# grid using linearNDInterpolator with triangulation calculated above 
			# (faster than griddata but produces identical output)
			interp = LinearNDInterpolator(tri,iceConcDay.flatten())
			iceConcDayG = interp((xptsG,yptsG))

			iceConcDayG[np.where(iceConcDayG<0.15)]=0
			iceConcDayG[np.where(iceConcDayG>1)]=1

			iceConcDayG[np.where(region_maskG>10)]=np.nan

			cF.plot_gridded_cartopy(lonG, latG, iceConcDayG, proj=ccrs.NorthPolarStereo(central_longitude=-45), out=fig_path+'iceConcG_-'+str(year)+mstr+dayMonStr+dxStr+extraStr, 
				date_string=str(year), month_string=mstr+dayMonStr, extra=extraStr, varStr='CDR ice conc', units_lab='', minval=0, maxval=1, cmap_1=plt.cm.viridis)
		
			iceConcDayG.dump(out_path+str(year)+'/iceConcG_CDR'+dxStr+'-'+str(year)+'_d'+daySumStr+extraStr)
Esempio n. 20
0
import numpy as np
import matplotlib.pyplot as plt
from scipy.interpolate import interp2d
import sys
sys.path.append('../')
import utils as cF
import cartopy.crs as ccrs
import pandas as pd
import xarray as xr

ancDataPath = '../../anc_data/'

dx = 100000  # adjust to change resolution
dxStr = str(int(dx / 1000)) + 'km'

xptsG, yptsG, latG, lonG, proj = cF.create_grid(dxRes=dx)

print('grid generated')
print(latG.shape)

lat_len = latG.shape[0]

lat_60_idx = np.argmin(np.abs(latG[:, lat_len // 2] -
                              60))  # index of lat closest to 60

# check which side of the centre the limit is on
if lat_60_idx < lat_len // 2:
    lim_low = lat_60_idx
    lim_high = lat_len - lat_60_idx
else:
    lim_low = lat_len - lat_60_idx
Esempio n. 21
0
def main(year1, month1, day1, year2, month2, day2, outPathT='.', forcingPathT='.', anc_data_pathT='../anc_data/', figPathT='../Figures/', 
	precipVar='ERA5', windVar='ERA5', driftVar='OSISAF', concVar='CDR', icVar='ERAI', densityTypeT='variable', 
	outStr='', extraStr='', IC=2, windPackFactorT=0.1, windPackThreshT=5., leadLossFactorT=0.1, atmLossFactorT=2.2e-8, dynamicsInc=1, leadlossInc=1, 
	windpackInc=1, atmlossInc=0, saveData=1, plotBudgets=1, plotdaily=1, saveFolder='', dx=50000,scaleCS=False):
	""" 

	Main model function

	Args:
		The various model configuration parameters

	"""

	#------- Create map projection
	xptsG, yptsG, latG, lonG, proj = cF.create_grid(dxRes=dx)
	nx=xptsG.shape[0]
	ny=xptsG.shape[1]

	dxStr=str(int(dx/1000))+'km'
	print(nx, ny, dxStr)

	# Assign some global parameters
	global dataPath, forcingPath, outPath, ancDataPath
	
	outPath=outPathT+dxStr+'/'
	forcingPath=forcingPathT+dxStr+'/'
	ancDataPath=anc_data_pathT
	print('OutPath:', outPath)
	print('forcingPath:', forcingPath)
	print('ancDataPath:', ancDataPath)

	# Assign density of the two snow layers
	global snowDensityFresh, snowDensityOld, minSnowD, minConc, leadLossFactor, atmLossFactor, windPackThresh, windPackFactor, deltaT
	snowDensityFresh=200. # density of fresh snow layer
	snowDensityOld=350. # density of old snow layer
	minSnowD=0.02 # minimum snow depth for a density estimate
	minConc=0.15 # mask budget values with a concentration below this value

	deltaT=60.*60.*24. # time interval (seconds in a day)

	region_mask, xptsI, yptsI = cF.get_region_mask_pyproj(anc_data_pathT, proj, xypts_return=1)
	region_maskG = griddata((xptsI.flatten(), yptsI.flatten()), region_mask.flatten(), (xptsG, yptsG), method='nearest')

	leadLossFactor=leadLossFactorT # Snow loss to leads coefficient
	windPackThresh=windPackThreshT # Minimum winds needed for wind packing
	windPackFactor=windPackFactorT # Fraction of snow packed into old snow layer
	atmLossFactor=atmLossFactorT # Snow loss to atmosphere coefficient

	#---------- Current year
	yearCurrent=year1
	
	#--------- Get time period info
	startDay, numDays, numDaysYear1, dateOut= cF.getDays(year1, month1, day1, year2, month2, day2)
	print (startDay, numDays, numDaysYear1, dateOut)

	# make this into a small function
	dates=[]
	for x in range(0, numDays):
		#print x
		date = datetime.datetime(year1, month1+1, day1+1) + datetime.timedelta(x)
		#print (int(date.strftime('%Y%m%d')))
		dates.append(int(date.strftime('%Y%m%d')))
	#print(dates)
	
	CSstr = ''
	if scaleCS:
		# load scaling factors; assumes scaling factors are in same directory as NESOSIM.py
		monthlyScalingFactors = xr.open_dataset('{}scale_coeffs_{}_{}_v2.nc'.format(ancDataPath, precipVar, dxStr))['scale_factors']
		CSstr = 'CSscaled'

	#------ create output strings and file paths -----------
	saveStr= precipVar+CSstr+'sf'+windVar+'winds'+driftVar+'drifts'+concVar+'sic'+'rho'+densityTypeT+'_IC'+str(IC)+'_DYN'+str(dynamicsInc)+'_WP'+str(windpackInc)+'_LL'+str(leadlossInc)+'_AL'+str(atmlossInc)+'_WPF'+str(windPackFactorT)+'_WPT'+str(windPackThreshT)+'_LLF'+str(leadLossFactorT)+'-'+dxStr+extraStr+outStr+'-'+dateOut
	saveStrNoDate=precipVar+CSstr+'sf'+windVar+'winds'+driftVar+'drifts'+concVar+'sic'+'rho'+densityTypeT+'_IC'+str(IC)+'_DYN'+str(dynamicsInc)+'_WP'+str(windpackInc)+'_LL'+str(leadlossInc)+'_AL'+str(atmlossInc)+'_WPF'+str(windPackFactorT)+'_WPT'+str(windPackThreshT)+'_LLF'+str(leadLossFactorT)+'-'+dxStr+extraStr+outStr
	
	print ('Saving to:', saveStr)
	 #'../../DataOutput/'

	savePath=outPath+saveFolder+'/'+saveStrNoDate
	# Declare empty arrays for compiling budgets
	if not os.path.exists(savePath+'/budgets/'):
		os.makedirs(savePath+'/budgets/')
	if not os.path.exists(savePath+'/final/'):
		os.makedirs(savePath+'/final/')

	global figpath
	figpath=figPathT+'/Diagnostic/'+dxStr+'/'+saveStrNoDate+'/'
	if not os.path.exists(figpath):
		os.makedirs(figpath)
	if not os.path.exists(figpath+'/daily_snow_depths/'):
		os.makedirs(figpath+'/daily_snow_depths/')

	precipDays, iceConcDays, windDays, tempDays, snowDepths, density, snowDiv, snowAdv, snowAcc, snowOcean, snowWindPack, snowWindPackLoss, snowWindPackGain, snowLead, snowAtm = genEmptyArrays(numDays, nx, ny)

	print('IC:', IC)
	if (IC>0):
		if (IC==1):
			# August Warren climatology snow depths
			ICSnowDepth = np.load(forcingPath+'InitialConditions/AugSnow'+dxStr, allow_pickle=True)
			print('Initialize with August Warren climatology')
		
		elif (IC==2):
			# Petty initiail conditions
			try:
				ICSnowDepth = np.load(forcingPath+'InitialConditions/'+icVar+'/ICsnow'+dxStr+'-'+str(year1)+extraStr, allow_pickle=True)
				print('Initialize with new v1.1 scaled initial conditions')
				print(np.amax(ICSnowDepth))
			except:
				print('No initial conditions file available')

		iceConcDayG, precipDayG, driftGdayG, windDayG, tempDayG =loadData(year1, startDay, precipVar, windVar, concVar, driftVar, dxStr, extraStr)
		ICSnowDepth[np.where(iceConcDayG<minConc)]=0

		#--------Split the initial snow depth over both layers
		snowDepths[0, 0]=ICSnowDepth*0.5
		snowDepths[0, 1]=ICSnowDepth*0.5

	#pF.plotSnow(m, xptsG, yptsG, densityT, date_string=str(startDay-1), out=figpath+'/Snow/2layer/densityD'+driftP+extraStr+reanalysisP+varStr+'_sy'+str(year1)+'d'+str(startDay)+outStr+'T0', units_lab=r'kg/m3', minval=180, maxval=360, base_mask=0, norm=0, cmap_1=cm.viridis)

	# Loop over days 
	for x in range(numDays-1):	
		day = x+startDay
		
		if (day>=numDaysYear1):
			# If day goes beyond the number of days in initial year, jump to the next year
			day=day-numDaysYear1
			yearCurrent=year2
		
		print ('Day of year:', day)
		print ('Date:', dates[x])
		
		#-------- Load daily data 
		iceConcDayG, precipDayG, driftGdayG, windDayG, tempDayG =loadData(yearCurrent, day, precipVar, windVar, concVar, driftVar, dxStr, extraStr)
		
		#-------- Apply CloudSat scaling if used
		if scaleCS:
			currentMonth = doyToMonth(day, yearCurrent) # get current month
			scalingFactor = monthlyScalingFactors.loc[currentMonth,:,:] # get scaling factor for current month
			# apply scaling to current day's precipitation
			precipDayG = applyScaling(precipDayG, scalingFactor,scaling_type='mul').values

		#-------- Calculate snow budgets
		calcBudget(xptsG, yptsG, snowDepths, iceConcDayG, precipDayG, driftGdayG, windDayG, tempDayG,
			density, precipDays, iceConcDays, windDays, tempDays, snowAcc, snowOcean, snowAdv, 
			snowDiv, snowLead, snowAtm, snowWindPackLoss, snowWindPackGain, snowWindPack, region_maskG, dx, x, day,
			densityType=densityTypeT, dynamicsInc=dynamicsInc, leadlossInc=leadlossInc, windpackInc=windpackInc, atmlossInc=atmlossInc)
		
		if (plotdaily==1):
			cF.plot_gridded_cartopy(lonG, latG, snowDepths[x+1, 0]+snowDepths[x+1, 1], proj=ccrs.NorthPolarStereo(central_longitude=-45), date_string='', out=figpath+'daily_snow_depths/snowTot_'+saveStrNoDate+str(x), units_lab='m', varStr='Snow depth', minval=0., maxval=0.6)
	
	#------ Load last data 
	iceConcDayG, precipDayG, _, windDayG, tempDayG =loadData(yearCurrent, day+1, precipVar, windVar, concVar, driftVar, dxStr, extraStr)
	precipDays[x+1]=precipDayG
	iceConcDays[x+1]=iceConcDayG
	windDays[x+1]=windDayG
	tempDays[x+1]=tempDayG
	
	if (saveData==1):
		# Output snow budget terms to netcdf datafiles
		cF.OutputSnowModelRaw(savePath, saveStr, snowDepths, density, precipDays, iceConcDays, windDays, snowAcc, snowOcean, snowAdv, snowDiv, snowLead, snowAtm, snowWindPack)
		cF.OutputSnowModelFinal(savePath, 'NESOSIMv11_'+dateOut, lonG, latG, xptsG, yptsG, snowDepths[:, 0]+snowDepths[:, 1], (snowDepths[:, 0]+snowDepths[:, 1])/iceConcDays, density, iceConcDays, precipDays, windDays, tempDays, dates)

	if (plotBudgets==1):
		# Plot final snow budget terms 
		cF.plot_budgets_cartopy(lonG, latG, precipDayG, windDayG, snowDepths[x+1], snowOcean[x+1], snowAcc[x+1], snowDiv[x+1], \
		snowAdv[x+1], snowLead[x+1], snowAtm[x+1], snowWindPack[x+1], snowWindPackLoss[x+1], snowWindPackGain[x+1], density[x+1], dates[-1], figpath, totalOutStr=saveStr)