Beispiel #1
0
def process_one_scene(scene_files,
                      out_path,
                      use_iband_res=False,
                      engine='h5netcdf',
                      all_channels=False,
                      pps_channels=False,
                      orbit_n=0):
    """Make level 1c files in PPS-format."""
    tic = time.time()
    scn_ = Scene(reader='viirs_sdr', filenames=scene_files)

    MY_MBAND = MBAND_DEFAULT
    MY_IBAND_I = IBAND_DEFAULT_I
    MY_IBAND_M = IBAND_DEFAULT_M

    if all_channels:
        MY_MBAND = MBANDS
        MY_IBAND_I = IBANDS
        MY_IBAND_M = MBANDS
    if pps_channels:
        MY_MBAND = MBAND_PPS
        MY_IBAND_I = IBAND_PPS_I
        MY_IBAND_M = IBAND_PPS_M

    if use_iband_res:
        scn_.load(MY_IBAND_I + ANGLE_NAMES + ['i_latitude', 'i_longitude'],
                  resolution=371)
        scn_.load(MY_IBAND_M, resolution=742)
        scn_ = scn_.resample(resampler='native')
    else:
        scn_.load(MY_MBAND + ANGLE_NAMES + ['m_latitude', 'm_longitude'],
                  resolution=742)

    # one ir channel
    irch = scn_['M15']

    # Set header and band attributes
    set_header_and_band_attrs(scn_, orbit_n=orbit_n)

    # Rename longitude, latitude to lon, lat.
    rename_latitude_longitude(scn_)

    # Convert angles to PPS
    convert_angles(scn_, delete_azimuth=True)
    update_angle_attributes(scn_, irch)

    filename = compose_filename(scn_, out_path, instrument='viirs', band=irch)
    scn_.save_datasets(writer='cf',
                       filename=filename,
                       header_attrs=get_header_attrs(scn_,
                                                     band=irch,
                                                     sensor='viirs'),
                       engine=engine,
                       include_lonlats=False,
                       flatten_attrs=True,
                       encoding=get_encoding_viirs(scn_))
    print("Saved file {:s} after {:3.1f} seconds".format(
        os.path.basename(filename),
        time.time() - tic))
    return filename
Beispiel #2
0
def process_one_scene(scene_files, out_path, engine='h5netcdf',
                      all_channels=False, pps_channels=False):
    """Make level 1c files in PPS-format."""
    tic = time.time()
    scn_ = Scene(
        reader='slstr_l1b',
        filenames=scene_files)

    MY_BANDNAMES = BANDNAMES_DEFAULT
    if all_channels:
        MY_BANDNAMES = BANDNAMES
    if pps_channels:
        MY_BANDNAMES = BANDNAMES_PPS

    scn_.load(MY_BANDNAMES + ['latitude', 'longitude'] + ANGLE_NAMES)

    # Everything should be on the same grid, to be saved as ppsleve1c
    scn_ = scn_.resample(resampler="native")

    # one ir channel
    irch = scn_['S8']

    # Set header and band attributes
    set_header_and_band_attrs(scn_)

    # Rename longitude, latitude to lon, lat.
    rename_latitude_longitude(scn_)

    # Convert angles to PPS
    convert_angles(scn_, delete_azimuth=True)
    update_angle_attributes(scn_, irch)

    filename = compose_filename(scn_, out_path, instrument='slstr', band=irch)
    scn_.save_datasets(writer='cf',
                       filename=filename,
                       header_attrs=get_header_attrs(scn_, band=irch, sensor='slstr'),
                       engine=engine,
                       include_lonlats=False,
                       flatten_attrs=True,
                       encoding=get_encoding_slstr(scn_))
    print("Saved file {:s} after {:3.1f} seconds".format(
        os.path.basename(filename),
        time.time()-tic))
Beispiel #3
0
    def read(self,
             fname,
             datavars,
             gvars,
             metadata,
             chans=None,
             sector_definition=None):

        # Use filename field for filename_datetime if it is available.
        dfn = DataFileName(os.path.basename(glob(os.path.join(fname, '*'))[0]))
        if dfn:
            sdfn = dfn.create_standard()
            metadata['top']['filename_datetime'] = sdfn.datetime

        metadata['top']['start_datetime'] = sdfn.datetime
        metadata['top']['end_datetime'] = sdfn.datetime
        metadata['top']['dataprovider'] = 'nesdisstar'
        metadata['top']['platform_name'] = sdfn.satname
        metadata['top']['source_name'] = 'seviri'
        # MUST be set on readers that sector at read time.
        # Affects how reading/processing is done in driver.py
        metadata['top']['sector_definition'] = sector_definition
        metadata['top']['SECTOR_ON_READ'] = True

        si = SatSensorInfo(metadata['top']['platform_name'],
                           metadata['top']['source_name'])
        if not si:
            from ..scifileexceptions import SciFileError
            raise SciFileError(
                'Unrecognized platform and source name combination: ' +
                metadata['top']['platform_name'] + ' ' +
                metadata['top']['source_name'])

        # chans == [] specifies we don't want to read ANY data, just metadata.
        # chans == None specifies that we are not specifying a channel list,
        #               and thus want ALL channels.
        if chans == []:
            # If NO CHANNELS were specifically requested, just return at this
            # point with the metadata fields populated. A dummy SciFile dataset
            # will be created with only metadata. This is for checking what
            # platform/source combination we are using, etc.
            return

        outdir = os.path.join(gpaths['LOCALSCRATCH'],
                              os.path.dirname(sdfn.name))
        self.decompress_msg(fname, outdir, chans)
        try:
            global_data = Scene(platform_name="Meteosat-8",
                                sensor="seviri",
                                reader="hrit_msg",
                                start_time=sdfn.datetime,
                                base_dir=outdir)
        except TypeError:
            global_data = Scene(
                filenames=glob(os.path.join(outdir, '*')),
                reader="hrit_msg",
                filter_parameters={'start_time': sdfn.datetime})
        metadata['top']['start_datetime'] = global_data.start_time
        metadata['top']['end_datetime'] = global_data.end_time

        # Loop through each dataset name found in the dataset_info property above.
        for dsname in self.dataset_info.keys():
            # Loop through the variables found in the current dataset
            # The dataset_info dictionary maps the geoips varname to the
            # varname found in the original datafile
            for geoipsvarname, spvarname in self.dataset_info[dsname].items():
                # If we requested specific channels, and the current channel
                # is not in the list, skip this variable.
                if chans and geoipsvarname not in chans:
                    continue
                # Read the current channel data into the datavars dictionary
                log.info('    Initializing ' + dsname + ' channel "' +
                         spvarname + '" from file into SciFile channel: "' +
                         geoipsvarname + '"...')
                global_data.load([spvarname])
                # Read spvarname from the original datafile into datavars[dsname][geoipsvarname]
        ad = sector_definition.area_definition
        log.info('    Sectoring data to ' + ad.name + ' ...')
        sectored_data = global_data.resample(ad)
        for spvarname in sectored_data.datasets.keys():
            for dsname in self.dataset_info.keys():
                for geoipsvarname in self.dataset_info[dsname].keys():
                    if self.dataset_info[dsname][
                            geoipsvarname] == spvarname.name:
                        if 'Longitude' not in gvars[dsname].keys():
                            log.info('    Saving Longitude to gvars')
                            gvars[dsname]['Longitude'] = np.ma.array(
                                ad.get_lonlats()[0])
                        if 'Latitude' not in gvars[dsname].keys():
                            log.info('    Saving Latitude to gvars')
                            gvars[dsname]['Latitude'] = np.ma.array(
                                ad.get_lonlats()[1])
                        if 'SunZenith' not in gvars[dsname].keys():
                            from geoips.scifile.solar_angle_calc import satnav
                            log.info(
                                '        Using satnav, can only calculate Sun Zenith angles'
                            )
                            gvars[dsname]['SunZenith'] = satnav(
                                'SunZenith', metadata['top']['start_datetime'],
                                gvars[dsname]['Longitude'],
                                gvars[dsname]['Latitude'])
                        self.set_variable_metadata(metadata, dsname,
                                                   geoipsvarname)
                        try:
                            datavars[dsname][geoipsvarname] =\
                             np.ma.array(sectored_data.datasets[spvarname.name].data,
                             mask=sectored_data.datasets[spvarname.name].mask)
                            log.warning('Sectored variable %s ' %
                                        (spvarname.name))
                        except AttributeError:
                            log.warning(
                                'Variable %s does not contain a mask, masking invalid values! Might take longer'
                                % (spvarname.name))
                            datavars[dsname][geoipsvarname] =\
                                np.ma.masked_invalid(sectored_data.datasets[spvarname.name].data)
Beispiel #4
0
from satpy.scene import Scene
#from satpy.utils import debug_on
# debug_on()

if __name__ == '__main__':

    scn = Scene(
        sensor='viirs',
        satid='NPP',
        filenames=[
            "/home/a000680/data/osisaf/S-OSI_-FRA_-NPP_-NARSST_FIELD-201609081300Z.nc"],
        reader='ghrsst_osisaf'
    )

    scn.load(['sea_surface_temperature'])
    lcd = scn.resample('euro4', radius_of_influence=2000)

    sstdata = lcd['sea_surface_temperature'][:]
    import numpy as np
    arr = np.ma.where(np.less_equal(sstdata, 0), 0, sstdata - 273.15)

    # Convert sst to numbers between 0 and 28, corresponding to the lut:
    data = np.ma.where(np.less(arr, 0), 28, 28.0 - arr)
    data = np.ma.where(np.greater(arr, 23.0), 4, data).round().astype('uint8')

    from trollimage.image import Image
    from satpy.imageo import palettes
    palette = palettes.sstlut_osisaf_metno()

    img = Image(data, mode='P', palette=palette)
    img.show()
Beispiel #5
0
cot = inputs[8]
reff = inputs[9]
cwp = inputs[10]
lat = inputs[11]
lon = inputs[12]

msg_con_quick = image.ImageContainerQuick(ir108.squeeze(), area_def)
area_con_quick = msg_con_quick.resample(euro_areadef)
result_data_quick = area_con_quick.image_data

# Create satpy scene
testscene = Scene(platform_name="msg",
                  sensor="seviri",
                  start_time=datetime(2013, 11, 12, 8, 30),
                  end_time=datetime(2013, 11, 12, 8, 45),
                  area=area_def)
array_kwargs = {'area': area_def}

testscene['ir108'] = Dataset(ir108.squeeze(), **array_kwargs)
print(testscene['ir108'])
testscene.show(
        'ir108',
        overlay={'coast_dir': '/home/mastho/data/', 'color': 'gray'})
resampscene = testscene.resample('germ')
print(resampscene.shape)

# Define custom fog colormap
fogcol = Colormap((0., (250 / 255.0, 200 / 255.0, 40 / 255.0)),
                  (1., (1.0, 1.0, 229 / 255.0)))
maskcol = (250 / 255.0, 200 / 255.0, 40 / 255.0)
Beispiel #6
0
        dayno = dt.timetuple().tm_yday
        lat = float(fields[8])
        lon = float(fields[9])

        # make up the grid to resample into
        lats = np.arange(lat - 1, lat + 1,
                         0.01)  # approx 1km resolution, 200km extent
        lons = np.arange(lon - 1, lon + 1,
                         0.01)  # approx 1km resolution, 200km extent
        lons, lats = np.meshgrid(lons, lats)
        grid_def = geometry.GridDefinition(lons=lons, lats=lats)

        # copy 11-micron band (C14) to local disk
        # See: https://www.goes-r.gov/education/ABI-bands-quick-info.html
        gcs_pattern = 'gs://gcp-public-data-goes-16/ABI-L1b-RadF/{0}/{1}/{2}/*C14*_s{0}{1}{2}00*'.format(
            dt.year, dayno, dt.hour)
        outfile = '{}/tmp_{}{}{}'.format(tmpdir, dt.year, dayno, dt.hour)
        os.mkdirs(tmpdir)
        copy_command = 'gsutil cp -m {} {}'.format(all_files, outfile)
        subprocess.check_call(copy_command.split())

        # create image
        scene = Scene(filenames=outfile, reader="abi_l1b")
        scene.resample(grid_def)
        outfile = '{}/ir_{}{}{}.png'.format(outdir, dt.year, dayno, dt.hour)
        scene.save_dataset('overview', outfile)

        # cleanup
        shutil.rmtree(tmpdir)
        exit
Beispiel #7
0
def msg1NDVI(dateSnap, avail_times, fldrs):
    """
    What does this function do?
    This definition/function is meant for computing NDVI from SEVIRI data

    Ref: https://nbviewer.jupyter.org/github/pytroll/pytroll-examples/blob/master/satpy/hrit_msg_tutorial.ipynb

    :param dateSnap:
    :param avail_times:
    :param fldrs:
    :return: NDVI
    """

    # Start the logic
    import os, sys, glob
    from satpy.utils import debug_on
    from satpy.scene import Scene
    from satpy.dataset import combine_metadata
    from datetime import datetime
    from myDefinitions import nc_write_sat_level_2, embellish, imResize

    debug_on()

    print("\n \t \t \t STARTING THE msg1NDVI run @ time: %s \t \t \t \n \n" % str(datetime.now()))
    print("\n.Processing Date set is: %s" % dateSnap)

    #  Test whether all data folders are appropriately set or not.
    basDir, datDir, outDir, logDir, webDir, geoTdir, GSHHS_ROOT = fldrs
    print("\n.Base directory is set to: %s" % basDir)
    print("\n.Data directory is set to %s" % datDir)
    print("\n.NetCDF output directory is set to: %s" % outDir)
    print("\n.Log directory is set to: %s" % logDir)
    print("\n.Web directory is set to: %s" % webDir)
    print("\n.GeoTiff directory is set to: %s" % geoTdir)

    avail_times = str(avail_times).split()
    for tt in avail_times:
        # Start for-loop-1
        print("..Started processing for time: %s" % tt)
        files = glob.glob(datDir + 'H-000-MSG1*' + dateSnap + tt + '-*')
        print(">>>>>>>>>>> Testing 123: <<<<<<<<<<<<<<<\n")
        print(files)

        # Start reading filename in satpy
        scn = Scene(filenames=files, reader='hrit_msg')

        #  start the NDVI computation
        scn.load(['VIS006', 0.6])
        scn.load(['VIS008', 0.8])
        ndvi = (scn[0.8] - scn[0.6]) / (scn[0.8] + scn[0.6])
        ndvi.attrs = combine_metadata(scn[0.6], scn[0.8])
        scn['ndvi'] = ndvi

        composite = 'ndvi'
        prodStr = 'ndvi'
        capStr = 'NDVI'

        # resample the data to Indian region
        indScn = scn.resample('IndiaSC')

        #  save the data
        # # Save as netCDF data ---- TO BE IMPLEMENTED ----
        outImgStr1 = outDir + 'ind_MSG-1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.nc'
        nc_write_sat_level_2(indScn, outImgStr1, prodStr)

        # Save as Full Resolution GeoTIFF files
        outImgStr2 = geoTdir + 'ind_MSG-1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.tiff'
        indScn.save_dataset(composite, filename = outImgStr2, writer = 'geotiff')
        # Add graphics
        # img2 = embellish(basDir, GSHHS_ROOT, outImgStr2, capStr, dateSnap, tt)
        # img2.save(outImgStr2)

        # Save the data as resized png files
        outImgStr3 = webDir + 'ind_MSG1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.png'
        indScn.save_dataset(composite, filename = outImgStr3, writer = "simple_image")
        outImgStr3 = imResize(outImgStr3)
        # Add graphics
        img3 = embellish(basDir, GSHHS_ROOT, outImgStr3, capStr, dateSnap, tt)
        img3.save(outImgStr3)
        print("msg1NDVI() says: Finished with processing of time-slot - %s - at: %s " % (tt, str(datetime.now())))
# Save the unedited, uncorrected, raw swath image
# -----------------------------------------------
scn.save_dataset('true_color', 'test_image_true3.png')

# Set up latitude and longitude bounds for the image
# --------------------------------------------------
lat_lims = [39.0, 42.5]
lon_lims = [-123., -119.]

# Set the map projection and center the data
# ------------------------------------------
my_area = scn['true_color'].attrs['area'].compute_optimal_bb_area({\
    'proj':'lcc', 'lon_0': lon_lims[0], 'lat_0': lat_lims[0], \
    'lat_1': lat_lims[0], 'lat_2': lat_lims[0]})
new_scn = scn.resample(my_area)

# Enhance the image for plotting
# ------------------------------
var = get_enhanced_image(new_scn['true_color']).data
var = var.transpose('y', 'x', 'bands')

# Extract the map projection from the data for plotting
# -----------------------------------------------------
crs = new_scn['true_color'].attrs['area'].to_cartopy_crs()

# Plot the true-color data
# ------------------------
plt.close('all')
ax = plt.axes(projection=crs)
ax.imshow(var.data, transform = crs, extent=(var.x[0], var.x[-1], var.y[-1], \
Beispiel #9
0
def msg1Proc1_5(dateSnap, avail_times, fldrs):
    """
    What does this definition do?
    This script processes the raw MSG-1 Level 1.5 data to produces radiance/reflectance image
    files in netCDF,-4 geoTIFF & png file formats.

    :param dateSnap:
    :param avail_times:  A single string NOT an array
    :param fldrs:
    :return:
    """
    #- Start coding
    # import necessary modules
    import os, sys, glob
    from satpy.utils import debug_on
    from satpy.scene import Scene
    from datetime import datetime
    from myDefinitions import nc_write_sat_level_1_5, embellish, imResize

    # Start the logic
    debug_on()
    print("\n \t \t \t STARTING THE msg1Proc1_5 run @ time: %s \t \t \t \n \n" % str(datetime.now()))
    print("\n.Processing Date set is: %s" % dateSnap)

    #  Test whether all data folders are appropriately set or not.
    basDir, datDir, outDir, logDir, webDir, geoTdir, GSHHS_ROOT = fldrs
    print("\n.Base directory is set to: %s" % basDir)
    print("\n.Data directory is set to %s" % datDir)
    print("\n.NetCDF output directory is set to: %s" % outDir)
    print("\n.Log directory is set to: %s" % logDir)
    print("\n.Web directory is set to: %s" % webDir)
    print("\n.GeoTiff directory is set to: %s" % geoTdir)

    avail_times = str(avail_times).split()
    for tt in avail_times:
        try:
            # Start for-loop-1
            print("..Started processing for time: %s" % tt)
            searchStr = datDir + 'H-000-MSG1*' + dateSnap + tt + '-*'
            files = glob.glob(searchStr)
            #  for testing
            print(">>>>>>>>>> For Testing <<<<<<<<<<")
            print("datDir is set to %s: " % datDir)
            print("Search string is %s" % searchStr)
            print(files)

            # Start reading filename in satpy
            scn = Scene(filenames=files, reader='hrit_msg')

            # Get the dataset names in the scene
            allChnls = scn.all_dataset_names()
            allChnls.remove('HRV')          # due to higher resolution

            # Save the individual channels (except HRV) as separate gray-scale GeoTIFF files..
            for ii in allChnls:
                try:
                    str(ii).split()
                    print("Working on channel: %s" % ii)
                    scn.load(str(ii).split())
                    indImg = scn.resample('IndiaSC')

                    # Save as netCDF data
                    outImgStr1 = outDir + 'ind_MSG1-Band_' + ii + '_' + dateSnap + '_' + tt + '.nc'
                    nc_write_sat_level_1_5(indImg, outImgStr1, ii)

                    # Save as Full Resolution GeoTIFF files
                    outImgStr2 = geoTdir + 'ind_' + ii + '_' + dateSnap + '_' + tt + '.tiff'
                    indImg.save_dataset(ii, filename = outImgStr2, writer = 'geotiff')
                    # Add graphics
                    # img2 = embellish(basDir, GSHHS_ROOT, outImgStr2, ii, dateSnap, tt)
                    # img2.save(outImgStr2)

                    # Save the data as resized png files
                    outImgStr3 = webDir + 'ind_' + ii + '_' + dateSnap + '_' + tt + '.png'
                    indImg.save_dataset(ii, filename = outImgStr3, writer = "simple_image")
                    outImgStr3 = imResize(outImgStr3)
                    # Add graphics
                    img3 = embellish(basDir, GSHHS_ROOT, outImgStr3, ii, dateSnap, tt)
                    img3.save(outImgStr3)

                    # unload the read channel data
                    scn.unload(str(ii).split())
                    print("Finished processing for channel: %s " % ii)
                except:
                    print("Something went wrong with this Channel: %s" % ii)
                    continue
                # end try-except block
            #end for-loop
            print("Finished processing for time-stamp: %s" % tt)
        except:
            print("Something went wrong with this time: %s" % tt)
            continue
Beispiel #10
0
def msg1RGBProc(dateSnap, avail_times, fldrs):
    """
    What does this definition do?
    This script processes the raw MSG-1 data into RGB Data Products in netCDF-4, geoTIFF &
    png file formats

    :param dateSnap:
    :param avail_times: A single string NOT an array
    :param fldrs:
    :return:
    """
    #-Start coding
    # start the logic
    import os, sys, glob
    from satpy.utils import debug_on
    from satpy.scene import Scene
    from datetime import datetime
    from myDefinitions import nc_write_sat_level_1_5, embellish, imResize

    # Start the logic
    debug_on()
    print("\n \t \t \t STARTING THE msg1RGBProc run @ time: %s \t \t \t \n \n" % str(datetime.now()))
    print("\n.Processing Date set is: %s" % dateSnap)

    #  Test whether all data folders are appropriately set or not.
    basDir, datDir, outDir, logDir, webDir, geoTdir, GSHHS_ROOT = fldrs
    print("\n.Base directory is set to: %s" % basDir)
    print("\n.Data directory is set to %s" % datDir)
    print("\n.NetCDF output directory is set to: %s" % outDir)
    print("\n.Log directory is set to: %s" % logDir)
    print("\n.Web directory is set to: %s" % webDir)
    print("\n.GeoTiff directory is set to: %s" % geoTdir)

    avail_times = str(avail_times).split()
    for tt in avail_times:
        # Start for-loop-1
        print("..Started processing for time: %s" % tt)
        files = glob.glob(datDir + 'H-000-MSG1*' + dateSnap + tt + '-*')

        # Start reading filename in satpy
        scn = Scene(filenames=files, reader='hrit_msg')

        # loop into available composites
        for composite in ['natural', 'ir_overview', 'night_fog', 'convection', 'dust', 'airmass', 'cloud_top_temperature', 'cloud_top_height', 'cloudtype', 'cloud_top_phase', 'cloud_top_pressure', 'cloudmask']:
            if (composite == 'natural'):
                prodStr = 'NAT'
                capStr = 'Quasi True Colour'
            elif (composite == 'night_fog'):
                prodStr = 'NFog'
                capStr = 'Night Fog'
            elif (composite == 'convection'):
                prodStr = 'CON'  # Problematic
                capStr = 'Convection Activity'
            elif (composite == 'cloud_optical_thickness'):
                prodStr = 'COP'  # 2 much Problematic
                capStr = 'Cloud Optical Thickness'
            elif (composite == 'realistic_colors'):
                prodStr = 'REAL'  # problematic
                capStr = 'Realistic RGB Colors'
            elif (composite == 'ir_overview'):
                prodStr = 'IR'
                capStr = 'Infra-Red'
            elif (composite == 'cloud_top_temperature'):
                prodStr = 'CTT'  # problematic
                capStr = 'Cloud Top Temperature'
            elif (composite == 'airmass'):
                prodStr = 'airM'
                capStr = 'Air Mass'
            elif (composite == 'dust'):
                prodStr = 'dust'
                capStr = 'DUST'
            elif (composite == 'cloud_top_height'):
                prodStr = 'CTH'
                capStr = 'Cloud Top Height'
            elif (composite == 'cloudtype'):
                prodStr = 'CType'
                capStr = 'Cloud Type'
            elif (composite == 'cloud_top_pressure'):
                prodStr = 'CTP'
                capStr = 'Cloud Top Pressure'
            elif (composite == 'cloud_top_phase'):
                prodStr = 'CTPh'
                capStr = 'Cloud Top Phase'
            elif (composite == 'cloudmask'):
                prodStr = 'CMask'
                capStr = 'Cloud Mask'
            # end if condition

            try:
                # Load the scene
                scn.load([composite])

                # India Specific Scene
                indScn = scn.resample("IndiaSC")
                indScn.load([composite])

                # # Save as netCDF data ---- TO BE IMPLEMENTED ----
                # outImgStr1 = outDir + 'ind_MSG-1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.nc'
                # # indImg.save_datasets(writer = 'cf', filename = outImgStr1)
                # nc_write_sat_level_1_5(indScn, outImgStr1, prodStr)

                # Save as Full Resolution GeoTIFF files
                outImgStr2 = geoTdir + 'ind_MSG-1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.tiff'
                indScn.save_dataset(composite, filename = outImgStr2, writer = 'geotiff')
                # Add graphics
                # img2 = embellish(basDir, GSHHS_ROOT, outImgStr2, capStr, dateSnap, tt)
                # img2.save(outImgStr2)

                # Save the data as resized png files
                outImgStr3 = webDir + 'ind_MSG1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.png'
                indScn.save_dataset(composite, filename = outImgStr3, writer = "simple_image")
                outImgStr3 = imResize(outImgStr3)
                # Add graphics
                img3 = embellish(basDir, GSHHS_ROOT, outImgStr3, capStr, dateSnap, tt)
                img3.save(outImgStr3)

                # unload the read channel data
                scn.unload([composite])
                indScn.unload([composite])
                print("Finished processing for RGB Composite: %s " % composite)
            except:
                print("Something went wrong with this RGB composite: %s" % composite)
                continue
            # end try-except block
        # end for-loop

        #Finished time slots
        finTmStmps = [tt]
        print("\n.Reading Finished Time slots as: %s" % finTmStmps)
        finTmsFile = logDir + "finishedTimeSlots_" + dateSnap + ".txt"
        fp = open(finTmsFile, 'a+')
        for item in finTmStmps:
            fp.write("%s \n" % item)
        # end for loop to write
        fp.close()
        # end for-loop
    # end-for-loop
    print("msg1RGBProc() says: Finished with processing of time-slot - %s - at: %s " % (tt, str(datetime.now())))
Beispiel #11
0
    #             int(sat_pos_time.strftime('%H')),
    #             int(sat_pos_time.strftime('%M')),
    #             0)
    #print("---")
    #print(orb.get_lonlatalt(dtobj))
    #print("---")

    #lonlat = orb.get_lonlatalt(dtobj)

    #if lonlat[0] >= -10. and lonlat[0] <= 20. and lonlat[1] >= 40 and lonlat[1] <= 60:

    granule_pass = Pass("Metop-"+satname, glbl.start_time, glbl.end_time, instrument=glbl['natural_color'].sensor)
    if granule_pass.area_coverage(europe) > 0 :
       print("Region over Switzerland, making CCS4 domain...")
       #local_data = glbl.resample("ccs4large")
       local_data = glbl.resample(europe)
       local_data.save_dataset('night_fog', outDir+"METOP-"+satname+"_fog-europe_"+st+".jpg")
       local_data.save_dataset('natural_color', outDir+"METOP-"+satname+"_overview-europe_"+st+".jpg")
       cw.add_coastlines_to_file(outDir+"METOP-"+satname+"_fog-europe_"+st+".jpg", europe, resolution='l', level=1, outline=(255, 255, 255))
       cw.add_coastlines_to_file(outDir+"METOP-"+satname+"_overview-europe_"+st+".jpg", europe, resolution='l', level=1, outline=(255, 255, 255))
       cw.add_borders_to_file(outDir+"METOP-"+satname+"_fog-europe_"+st+".jpg", europe, outline=(255, 255, 255),resolution='i')
       cw.add_borders_to_file(outDir+"METOP-"+satname+"_overview-europe_"+st+".jpg", europe, outline=(255, 255, 255),resolution='i')
       if os.path.getsize(outDir+"METOP-"+satname+"_fog-europe_"+st+".jpg") < 170000:
           os.remove(outDir+"METOP-"+satname+"_fog-europe_"+st+".jpg")
       if os.path.getsize(outDir+"METOP-"+satname+"_overview-europe_"+st+".jpg") < 170000:
           os.remove(outDir+"METOP-"+satname+"_overview-europe_"+st+".jpg")

    print("Making world map...")
    #local_data = glbl.resample("world_plat_1350_675")
    local_data = glbl.resample(world)
    local_data.save_dataset('night_fog', outDir+"METOP-"+satname+"_fog-world_"+st+".jpg")
Beispiel #12
0
from satpy.scene import Scene
from satpy.utils import debug_on
from pycoast import ContourWriterAGG
import aggdraw
import PIL
from PIL import Image, ImageFont, ImageDraw
from mpop.projector import get_area_def

debug_on()
fname="msg4-alps-snow.png"
my_area="europe_center"
# Load data by filenames
files = glob("data/H-*")
scn = Scene(reader="hrit_msg", filenames=files)
scn.load(["natural"])
lscn = scn.resample(my_area)
# Save RGB geotiff
lscn.save_dataset("natural", filename=fname)

cw = ContourWriterAGG('/opt/pytroll/shapes')
europe = get_area_def(my_area)
cw.add_coastlines_to_file(fname, europe, resolution='l', level=1, outline=(255, 255, 255))
cw.add_borders_to_file(fname, europe, outline=(255, 255, 255),resolution='i')

img = Image.open(fname)
draw = ImageDraw.Draw(img)
print(img.size)
draw.rectangle([(0, 0), (img.size[0], 25)], fill=(255,165,0,200))
font = ImageFont.truetype("/usr/openv/java/jre/lib/fonts/LucidaTypewriterBold.ttf", 18)
textSizeName = draw.textsize("Meteosat 11", font=font)
textSizeDate = draw.textsize("2018-03-11 10:45 UTC", font=font)
Beispiel #13
0
from satpy.scene import Scene
#from satpy.utils import debug_on
# debug_on()

if __name__ == '__main__':

    scn = Scene(
        sensor='viirs',
        satid='NPP',
        filenames=[
            "/home/a000680/data/osisaf/S-OSI_-FRA_-NPP_-NARSST_FIELD-201609081300Z.nc"
        ],
        reader='ghrsst_osisaf')

    scn.load(['sea_surface_temperature'])
    lcd = scn.resample('euro4', radius_of_influence=2000)

    sstdata = lcd['sea_surface_temperature'][:]
    import numpy as np
    arr = np.ma.where(np.less_equal(sstdata, 0), 0, sstdata - 273.15)

    # Convert sst to numbers between 0 and 28, corresponding to the lut:
    data = np.ma.where(np.less(arr, 0), 28, 28.0 - arr)
    data = np.ma.where(np.greater(arr, 23.0), 4, data).round().astype('uint8')

    from trollimage.image import Image
    from satpy.imageo import palettes
    palette = palettes.sstlut_osisaf_metno()

    img = Image(data, mode='P', palette=palette)
    img.show()
Beispiel #14
0
def process_one_scan(tslot_files,
                     out_path,
                     process_buggy_satellite_zenith_angles=False):
    """ Make level 1c files in PPS-format """
    tic = time.time()
    image_num = 0  # name of first dataset is image0
    #if len(tslot_files) != 8 * len(BANDNAMES) + 2:
    #    raise Exception("Some data is missing")
    platform_shortname = p__.parse(os.path.basename(
        tslot_files[0]))['platform_shortname']
    start_time = p__.parse(os.path.basename(tslot_files[0]))['start_time']
    platform_name = PLATFORM_SHORTNAMES[platform_shortname]
    #Load channel data for one scene and set some attributes
    coefs = get_calibration_for_time(platform=platform_shortname,
                                     time=start_time)

    scn_ = Scene(reader='seviri_l1b_hrit',
                 filenames=tslot_files,
                 reader_kwargs={
                     'calib_mode': CALIB_MODE,
                     'ext_calib_coefs': coefs
                 })
    scn_.attrs['platform_name'] = platform_name

    #SEVIRI data only
    if scn_.attrs['sensor'] == {'seviri'}:
        sensor = 'seviri'
        scn_.load(BANDNAMES)
    for band in BANDNAMES:
        idtag = PPS_TAGNAMES[band]
        scn_[band].attrs['id_tag'] = idtag
        scn_[band].attrs['description'] = 'SEVIRI ' + str(band)
        scn_[band].attrs['sun_earth_distance_correction_applied'] = 'False'
        scn_[band].attrs['sun_earth_distance_correction_factor'] = 1.0
        scn_[band].attrs['sun_zenith_angle_correction_applied'] = 'False'
        scn_[band].attrs['name'] = "image{:d}".format(image_num)
        scn_[band].attrs['coordinates'] = 'lon lat'
        image_num += 1

    #correct area
    area_corr = pyresample.geometry.AreaDefinition(
        'seviri-corrected', 'Corrected SEVIRI L1.5 grid (since Dec 2017)',
        'geosmsg', {
            'a': 6378169.00,
            'b': 6356583.80,
            'h': 35785831.0,
            'lon_0': 0.0,
            'proj': 'geos',
            'units': 'm'
        }, 3712, 3712, (5567248.28340708, 5570248.686685662,
                        -5570248.686685662, -5567248.28340708))
    if not scn_['IR_108'].attrs['georef_offset_corrected']:
        scn_ = scn_.resample(area_corr)
        print(scn_['IR_108'].attrs['georef_offset_corrected'])

    #import pdb;pdb.set_trace()
    #Set som header attributes:
    scn_.attrs['platform'] = platform_name
    scn_.attrs['instrument'] = sensor.upper()
    scn_.attrs['source'] = "seviri2pps.py"
    scn_.attrs['orbit_number'] = "99999"
    #scn_.attrs['orbit'] = "99999"
    nowutc = datetime.utcnow()
    scn_.attrs['date_created'] = nowutc.strftime("%Y-%m-%dT%H:%M:%SZ")
    #Find lat/lon data
    irch = scn_['IR_108']
    lons, lats = irch.attrs['area'].get_lonlats()
    lons[lons > 360] = -999.0
    lons[lons < -360] = -999.0
    lats[lats > 360] = -999.0
    lats[lats < -360] = -999.0

    #Find angles data
    sunalt, suna = get_alt_az(irch.attrs['start_time'],
                              *irch.attrs['area'].get_lonlats())
    suna = np.rad2deg(suna)
    sunz = sun_zenith_angle(irch.attrs['start_time'],
                            *irch.attrs['area'].get_lonlats())

    # if:
    #   Buggy data is requested buggy data is prepared!
    # elif:
    #   1) get_observer_look() gives wrong answer ...
    #   ... for satellite altitude in m. AND
    #   2) get_observer_look() gives correct answer ...
    #   ....  for satellite altitude in km. AND
    #   3) Satellite altitude is m.:
    #    => Satellite alltitude need to be converted to km.
    # else:
    #    => There have been updates to SatPy and this script
    #       need to be modified.
    if process_buggy_satellite_zenith_angles:
        print(" Making buggy satellite zenith angels on purpose!")
        sata, satel = get_observer_look(
            irch.attrs['orbital_parameters']['satellite_actual_longitude'],
            irch.attrs['orbital_parameters']['satellite_actual_latitude'],
            irch.attrs['orbital_parameters']['satellite_actual_altitude'],
            irch.attrs['start_time'], lons, lats, 0)
    elif (get_observer_look(0, 0, 36000 * 1000, datetime.utcnow(),
                            np.array([16]), np.array([58]), np.array(
                                [0]))[1] > 30
          and get_observer_look(0, 0, 36000, datetime.utcnow(), np.array([16]),
                                np.array([58]), np.array([0]))[1] < 23
          and irch.attrs['orbital_parameters']['satellite_actual_altitude'] >
          38000):
        sata, satel = get_observer_look(
            irch.attrs['orbital_parameters']['satellite_actual_longitude'],
            irch.attrs['orbital_parameters']['satellite_actual_latitude'],
            0.001 *
            irch.attrs['orbital_parameters']['satellite_actual_altitude'],
            irch.attrs['start_time'], lons, lats, 0)
    else:
        raise UnexpectedSatpyVersion(
            "You might have a newer version of satpy/pyorbital that"
            "handles units. In that case the m => km conversion might"
            "be unneeded and wrong.")

    satz = 90 - satel
    azidiff = make_azidiff_angle(sata, suna, -32767)
    #Add lat/lon  and angles datasets to the scen object
    my_coords = scn_['IR_108'].coords
    my_coords['time'] = irch.attrs['start_time']
    scn_['lat'] = xr.DataArray(da.from_array(lats, chunks=(53, 3712)),
                               dims=['y', 'x'],
                               coords={
                                   'y': scn_['IR_108']['y'],
                                   'x': scn_['IR_108']['x']
                               })
    scn_['lat'].attrs['long_name'] = 'latitude coordinate'
    scn_['lat'].attrs['standard_name'] = 'latitude'
    scn_['lat'].attrs['units'] = 'degrees_north'
    scn_['lat'].attrs['start_time'] = irch.attrs['start_time']
    scn_['lat'].attrs['end_time'] = irch.attrs['end_time']
    scn_['lon'] = xr.DataArray(da.from_array(lons, chunks=(53, 3712)),
                               dims=['y', 'x'],
                               coords={
                                   'y': scn_['IR_108']['y'],
                                   'x': scn_['IR_108']['x']
                               })
    scn_['lon'].attrs['long_name'] = 'longitude coordinate'
    scn_['lon'].attrs['standard_name'] = 'longitude'
    scn_['lon'].attrs['units'] = 'degrees_east'
    scn_['lon'].attrs['start_time'] = irch.attrs['start_time']
    scn_['lon'].attrs['end_time'] = irch.attrs['end_time']
    #sunzenith
    scn_['sunzenith'] = xr.DataArray(da.from_array(sunz[:, :],
                                                   chunks=(53, 3712)),
                                     dims=['y', 'x'],
                                     coords=my_coords)
    scn_['sunzenith'].attrs['id_tag'] = 'sunzenith'
    scn_['sunzenith'].attrs['long_name'] = 'sun zenith angle'
    scn_['sunzenith'].attrs['standard_name'] = 'solar_zenith_angle'
    scn_['sunzenith'].attrs['valid_range'] = [0, 18000]
    scn_['sunzenith'].attrs['name'] = "image{:d}".format(image_num)
    image_num += 1
    #satzenith
    scn_['satzenith'] = xr.DataArray(da.from_array(satz[:, :],
                                                   chunks=(53, 3712)),
                                     dims=['y', 'x'],
                                     coords=my_coords)
    scn_['satzenith'].attrs['id_tag'] = 'satzenith'
    scn_['satzenith'].attrs['long_name'] = 'satellite zenith angle'
    scn_['satzenith'].attrs['standard_name'] = 'platform_zenith_angle'
    scn_['satzenith'].attrs['valid_range'] = [0, 9000]
    scn_['satzenith'].attrs['name'] = "image{:d}".format(image_num)
    image_num += 1
    #azidiff
    scn_['azimuthdiff'] = xr.DataArray(da.from_array(azidiff[:, :],
                                                     chunks=(53, 3712)),
                                       dims=['y', 'x'],
                                       coords=my_coords)
    scn_['azimuthdiff'].attrs['id_tag'] = 'azimuthdiff'
    #scn_['azimuthdiff'].attrs['standard_name'] = (
    #    'angle_of_rotation_from_solar_azimuth_to_platform_azimuth')
    scn_['azimuthdiff'].attrs[
        'long_name'] = 'absoulte azimuth difference angle'
    scn_['azimuthdiff'].attrs['valid_range'] = [0, 18000]
    scn_['azimuthdiff'].attrs['name'] = "image{:d}".format(image_num)
    image_num += 1
    for angle in ['azimuthdiff', 'satzenith', 'sunzenith']:
        scn_[angle].attrs['units'] = 'degree'
        for attr in irch.attrs.keys():
            if attr in [
                    "start_time", "end_time", "navigation",
                    "georef_offset_corrected", "projection"
            ]:
                scn_[angle].attrs[attr] = irch.attrs[attr]

    #Get filename
    start_time = scn_['IR_108'].attrs['start_time']
    end_time = scn_['IR_108'].attrs['end_time']
    filename = os.path.join(
        out_path, "S_NWC_seviri_{:s}_{:s}_{:s}Z_{:s}Z.nc".format(
            platform_name.lower().replace('-', ''), "99999",
            start_time.strftime('%Y%m%dT%H%M%S%f')[:-5],
            end_time.strftime('%Y%m%dT%H%M%S%f')[:-5]))

    #Encoding for channels
    save_info = {}
    for band in BANDNAMES:
        idtag = PPS_TAGNAMES[band]
        name = scn_[band].attrs['name']
        scn_[band].attrs.pop('area', None)
        # Add time coordinate. To make cfwriter aware that we want 3D data.
        my_coords = scn_[band].coords
        my_coords['time'] = irch.attrs['start_time']

        if 'tb' in idtag:
            save_info[name] = {
                'dtype': 'int16',
                'scale_factor': 0.01,
                '_FillValue': -32767,
                'zlib': True,
                'complevel': 4,
                'add_offset': 273.15
            }
        else:
            save_info[name] = {
                'dtype': 'int16',
                'scale_factor': 0.01,
                'zlib': True,
                'complevel': 4,
                '_FillValue': -32767,
                'add_offset': 0.0
            }
    #Encoding for angles and lat/lon
    for name in ['image11', 'image12', 'image13']:
        save_info[name] = {
            'dtype': 'int16',
            'scale_factor': 0.01,
            'zlib': True,
            'complevel': 4,
            '_FillValue': -32767,
            'add_offset': 0.0
        }

    for name in ['lon', 'lat']:
        save_info[name] = {
            'dtype': 'float32',
            'zlib': True,
            'complevel': 4,
            '_FillValue': -999.0
        }
    header_attrs = scn_.attrs.copy()
    header_attrs['start_time'] = time.strftime(
        "%Y-%m-%d %H:%M:%S", irch.attrs['start_time'].timetuple())
    header_attrs['end_time'] = time.strftime(
        "%Y-%m-%d %H:%M:%S", irch.attrs['end_time'].timetuple())
    header_attrs['sensor'] = sensor.lower()
    header_attrs.pop('platform_name', None)

    scn_.save_datasets(writer='cf',
                       filename=filename,
                       header_attrs=header_attrs,
                       engine='netcdf4',
                       encoding=save_info,
                       include_lonlats=False,
                       pretty=True,
                       flatten_attrs=True,
                       exclude_attrs=['raw_metadata'])
    print("Saved file {:s} after {:3.1f} seconds".format(
        os.path.basename(filename),
        time.time() - tic))  #About 40 seconds
    return filename
Beispiel #15
0
Change line 21 and 22 to define the input and output files and change the
region in line 92 and run the code
"""

from satpy.scene import Scene
import glob
import os

os.chdir(r"C:\Users\timhe\Documents\VIIRStest")
filenames = glob.glob(
    "GIMGO-SVI05_npp_d20190910_t1038162_e1043566_b40776_c20190912092651273026_noac_ops*"
)

global_scene = Scene(reader="viirs_sdr", filenames=filenames)
nebraska_scene = global_scene.resample('nebraska', resampler='nearest')

nebraska_scene.save_datasets()

from satpy import MultiScene

mscn = MultiScene(global_scene)
mscn.load(['I05'])

my_area = global_scene['I05'].load().attrs['area'].compute_optimal_bb_area({
    'proj':
    'lcc',
    'lon_0':
    -95.,
    'lat_0':
    25.,
Beispiel #16
0
def msg1NDVI(dateSnap, avail_times, fldrs):
    """
    What does this function do?
    This definition/function is meant for computing NDVI from SEVIRI data

    Ref: https://nbviewer.jupyter.org/github/pytroll/pytroll-examples/blob/master/satpy/hrit_msg_tutorial.ipynb

    :param dateSnap:
    :param avail_times:
    :param fldrs:
    :return: NDVI
    """

    # Start the logic
    import os, sys, glob
    #from satpy.utils import debug_on
    from satpy.scene import Scene
    from satpy.dataset import combine_metadata
    from datetime import datetime
    from trollimage.colormap import greys, greens
    from trollimage.image import Image
    from myDefinitions import nc_write_sat_level_2, embellish, imResize

    #debug_on()

    print("\n \t \t \t STARTING THE msg1NDVI run @ time: %s \t \t \t \n \n" %
          str(datetime.now()))
    print("\n.Processing Date set is: %s" % dateSnap)

    #  Test whether all data folders are appropriately set or not.
    basDir, datDir, outDir, logDir, webDir, geoTdir, msg1Src, exeDir, GSHHS_ROOT, tmpDir = fldrs
    print("\n.Base directory is set to: %s" % basDir)
    print("\n.Data directory is set to %s" % datDir)
    print("\n.NetCDF output directory is set to: %s" % outDir)
    print("\n.Log directory is set to: %s" % logDir)
    print("\n.Web directory is set to: %s" % webDir)
    print("\n.GeoTiff directory is set to: %s" % geoTdir)
    print("\n.msg1Src directory is set to: %s" % msg1Src)
    print("\n.exeDir directory is set to: %s" % exeDir)
    print("\n.GSHHS directory is set to: %s" % GSHHS_ROOT)
    print("\n.tmpDir directory is set to: %s" % tmpDir)

    avail_times = str(avail_times).split()
    for tt in avail_times:
        # Start for-loop-1
        print("..Started processing for time: %s" % tt)
        searchStr = datDir + 'H-000-MSG1*' + dateSnap + tt + '-*'
        print("\n \t \t Testing 123: \n \n ")
        print(searchStr)
        files = glob.glob(searchStr)
        #print("\n Testing 123: \n")
        #print(files)

        # Start reading filename in satpy
        scn = Scene(filenames=files, reader='hrit_msg')

        #  start the NDVI computation
        scn.load(['VIS006', 0.6])
        scn.load(['VIS008', 0.8])
        ndvi = (scn[0.8] - scn[0.6]) / (scn[0.8] + scn[0.6])
        ndvi.attrs = combine_metadata(scn[0.6], scn[0.8])
        scn['ndvi'] = ndvi

        composite = 'ndvi'
        prodStr = 'NDVI'
        capStr = 'NDVI'

        # resample the data to Indian region
        indScn = scn.resample('India_SC')

        #  save the data
        # # # Save as netCDF data ---- TO BE IMPLEMENTED ----
        # outImgStr1 = outDir + 'ind_MSG-1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.nc'
        # nc_write_sat_level_2(indScn, outImgStr1, prodStr)
        #
        # # Save as Full Resolution GeoTIFF files
        # outImgStr2 = geoTdir + 'ind_MSG-1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.tiff'
        # indScn.save_dataset(composite, filename = outImgStr2, writer = 'geotiff')
        # # Add graphics
        # # img2 = embellish(basDir, GSHHS_ROOT, outImgStr2, capStr, dateSnap, tt)
        # # img2.save(outImgStr2)

        # Save the data as resized png files
        outImgStr3 = tmpDir + 'ind_MSG1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.png'
        outImgStr3w = webDir + 'ind_MSG1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.png'

        #  Apply color palette from trollimage
        ndvi_data = indScn['ndvi'].compute().data
        ndvi_img = Image(ndvi_data, mode="L")
        # greys.set_range(ndvi_data.min(), -0.00001)
        # greens.set_range(0,ndvi_data.max())
        greys.set_range(-0.8, -0.00001)
        greens.set_range(0, 0.8)
        my_cm = greys + greens
        ndvi_img.colorize(my_cm)
        ndvi_img.save(outImgStr3)
        # indScn.save_dataset(composite, filename = outImgStr3, writer = "simple_image")
        outImgStr3 = imResize(outImgStr3)
        # Add graphics
        img3 = embellish(basDir, GSHHS_ROOT, outImgStr3, capStr, dateSnap, tt)
        img3.save(outImgStr3)

        #  move the tmp files to proper web area
        mv2WebCmd = 'mv ' + outImgStr3 + ' ' + outImgStr3w
        os.system(mv2WebCmd)

        print(
            "msg1NDVI() says: Finished with processing of time-slot - %s - at: %s "
            % (tt, str(datetime.now())))
Beispiel #17
0
def msg1Proc1_5(dateSnap, avail_times, fldrs):
    """
    What does this definition do?
    This script processes the raw MSG-1 Level 1.5 data to produces radiance/reflectance image
    files in netCDF,-4 geoTIFF & png file formats.

    :param dateSnap:
    :param avail_times:  A single string NOT an array
    :param fldrs:
    :return:
    """
    #- Start coding
    # import necessary modules
    import os, sys, glob
    #from satpy.utils import debug_on
    from satpy.scene import Scene
    from datetime import datetime
    from myDefinitions import nc_write_sat_level_1_5, embellish, imResize

    # Start the logic
    #debug_on()
    print(
        "\n \t \t \t STARTING THE msg1Proc1_5 run @ time: %s \t \t \t \n \n" %
        str(datetime.now()))
    print("\n.Processing Date set is: %s" % dateSnap)

    #  Test whether all data folders are appropriately set or not.
    basDir, datDir, outDir, logDir, webDir, geoTdir, msg1Src, exeDir, GSHHS_ROOT, tmpDir = fldrs
    print("\n.Base directory is set to: %s" % basDir)
    print("\n.Data directory is set to %s" % datDir)
    print("\n.NetCDF output directory is set to: %s" % outDir)
    print("\n.Log directory is set to: %s" % logDir)
    print("\n.Web directory is set to: %s" % webDir)
    print("\n.GeoTiff directory is set to: %s" % geoTdir)
    print("\n.msg1Src directory is set to: %s" % msg1Src)
    print("\n.exeDir directory is set to: %s" % exeDir)
    print("\n.GSHHS directory is set to: %s" % GSHHS_ROOT)
    print("\n.tmpDir directory is set to: %s" % tmpDir)

    avail_times = str(avail_times).split()
    for tt in avail_times:
        try:
            # Start for-loop-1
            print("..Started processing for time: %s" % tt)
            searchStr = datDir + 'H-000-MSG1*' + dateSnap + tt + '-*'
            # searchStr = msg1Src + 'H-000-MSG1*' + dateSnap + tt + '*'

            files = glob.glob(searchStr)

            # Start reading filename in satpy
            scn = Scene(filenames=files, reader='hrit_msg')

            available_comps = scn.available_composite_names()
            channels_inverted = [s for s in available_comps if "_inv" in s]

            # add the remaining 3 non-inverted channels & 3d channel
            #allChnls = channels_inverted  + ["IR_016", "VIS006", "VIS008", "ir108_3d"]
            allChnls = channels_inverted + ["ir108_3d"]

            # Save the individual channels (except HRV) as separate gray-scale GeoTIFF files..
            for ii in allChnls:
                try:
                    str(ii).split()
                    print("Working on channel: %s" % ii)
                    scn.load(str(ii).split())
                    indImg = scn.resample('India_SC')

                    # # Save as netCDF data
                    # outImgStr1 = outDir + 'ind_MSG1-Band_' + ii + '_' + dateSnap + '_' + tt + '.nc'
                    # nc_write_sat_level_1_5(indImg, outImgStr1, ii)

                    # # Save as Full Resolution GeoTIFF files
                    # outImgStr2 = geoTdir + 'ind_' + ii + '_' + dateSnap + '_' + tt + '.tiff'
                    # indImg.save_dataset(ii, filename = outImgStr2, writer = 'geotiff')

                    # Add graphics
                    # img2 = embellish(basDir, GSHHS_ROOT, outImgStr2, ii, dateSnap, tt)
                    # img2.save(outImgStr2)

                    # Save the data as resized png files
                    outImgStr3 = tmpDir + 'ind_' + ii + '_' + dateSnap + '_' + tt + '.png'
                    outImgStr3w = webDir + 'ind_' + ii + '_' + dateSnap + '_' + tt + '.png'
                    indImg.save_dataset(ii,
                                        filename=outImgStr3,
                                        writer="simple_image")
                    outImgStr3 = imResize(outImgStr3)

                    # Add graphics
                    img3 = embellish(basDir, GSHHS_ROOT, outImgStr3, ii,
                                     dateSnap, tt)
                    img3.save(outImgStr3)

                    #  move the tmp files to proper web area
                    mv2WebCmd = 'mv ' + outImgStr3 + ' ' + outImgStr3w
                    os.system(mv2WebCmd)

                    # unload the read channel data
                    scn.unload(str(ii).split())
                    print("Finished processing for channel: %s " % ii)
                except:
                    print("Something went wrong with this Channel: %s" % ii)
                    continue
                # end try-except block
            #end for-loop
            print("Finished processing for time-stamp: %s" % tt)
        except:
            print("Something went wrong with this time: %s" % tt)
            continue