Пример #1
0
def wrapper(date):
    print date
    test1 = datetime.datetime.now()

    # Pull out cloud mask data to be regridded and added to the nc file
    # glob is used for a wildcard
    try:
        clouddata = pygrib.open(
            glob.glob(
                '/soge-home/projects/seviri_dust/raw_seviri_data/cloudmask_grib/MSG*-'
                'SEVI-MSGCLMK-0100-0100-' + date.strftime("%Y%m%d%H%M%S") +
                '*')[0])
        grb = clouddata.select()[0]
        cloudmaskarray = grb.values[:, ::-1]
        cloudlats, cloudlons = grb.latlons()

        cloudmaskarray[cloudmaskarray >= 3] = np.nan
        cloudlats[cloudlats > 90] = np.nan
        cloudlons[cloudlons > 180] = np.nan

        target_area = utils.load_area(
            '/soge-home/projects/seviri_dust/areas.def', 'NorthAfrica')
        lons, lats = target_area.get_lonlats()

        # Generate a regular lat/lon grid for the cloud mask
        #regular_lons = np.linspace(np.min(lons), np.max(lons), lons.shape[1])
        #regular_lats = np.linspace(np.min(lats), np.max(lats), lats.shape[0])

        # Regrid cloud mask data to the same grid as the BT data
        cloudmask_regridded = pinkdust.regrid_data(cloudlons,
                                                   cloudlats,
                                                   lons,
                                                   lats,
                                                   cloudmaskarray,
                                                   mesh=True)

        f = tables.open_file(
            '/soge-home/projects/seviri_dust/raw_seviri_data'
            '/intermediary_files/cloudmask/cloudmask_' +
            date.strftime('%Y%m%d%H%M%S.hdf'), 'w')
        atom = tables.Atom.from_dtype(cloudmask_regridded.dtype)
        filters = tables.Filters(complib='blosc', complevel=5)
        ds = f.create_carray(f.root,
                             'data',
                             atom,
                             cloudmask_regridded.shape,
                             filters=filters)
        ds[:] = cloudmask_regridded
        f.close()
    except:
        with open(
                '/soge-home/projects/seviri_dust'
                '/raw_seviri_data/bt_native/missing_msgnative_dates'
                '.txt', 'a') as my_file:
            my_file.write('\n')
            my_file.write(date.strftime('%Y%m%d%H%M%S') + ' clouds')
Пример #2
0
def sub_wrapper(day_datetimes, j):
    print day_datetimes[j]
    try:
        clouddata = Dataset('/soge-home/data/satellite/meteosat/seviri/15-min/'
                            'native/cloudmask'
                            '/nc/' + day_datetimes[j].strftime("%B").upper() +
                            str(day_datetimes[j].year) +
                            '_CLOUDS/eumetsat.cloud.' +
                            day_datetimes[j].strftime("%Y%m%d%H%M") + '.nc')
        cloudmaskarray = clouddata.variables['cmask'][:]
        cloudmask_regridded = pinkdust.regrid_data(cloudlons,
                                                   cloudlats,
                                                   lons,
                                                   lats,
                                                   cloudmaskarray,
                                                   mesh=True)
    except:
        print 'Missing cloud mask data for', \
            day_datetimes[j]
        cloudmask_regridded = np.zeros((lats.shape[0], lons.shape[1]))
        cloudmask_regridded[:] = np.nan

    # Now, instead of writing to day array, you write to hdf
    f = tables.open_file(
        '/soge-home/projects/seviri_dust/raw_seviri_data'
        '/intermediary_files/cloudmask_' +
        day_datetimes[j].strftime('%Y%m%d%H%M%S.hdf'), 'w')
    atom = tables.Atom.from_dtype(cloudmask_regridded.dtype)
    filters = tables.Filters(complib='blosc', complevel=5)
    ds = f.create_carray(f.root,
                         'data',
                         atom,
                         cloudmask_regridded.shape,
                         filters=filters)
    ds[:] = cloudmask_regridded
    f.close()

    print 'Wrote', day_datetimes[j]
Пример #3
0
def sub_wrapper(day_datetimes, j):
    print day_datetimes[j]
    try:
        clouddata = pygrib.open(
            glob.glob(
                '/soge-home/projects/seviri_dust/raw_seviri_data/cloudmask_grib/MSG*-'
                'SEVI-MSGCLMK-0100-0100-' +
                day_datetimes[j].strftime("%Y%m%d%H%M%S") + '*')[0])
        grb = clouddata.select()[0]
        cloudmaskarray = grb.values[:, ::-1]
        cloudmask_regridded = pinkdust.regrid_data(cloudlons,
                                                   cloudlats,
                                                   lons,
                                                   lats,
                                                   cloudmaskarray,
                                                   mesh=True)
    except:
        print 'Missing cloud mask data for', \
            day_datetimes[j]
        cloudmask_regridded = np.zeros((lats.shape[0], lons.shape[1]))
        cloudmask_regridded[:] = np.nan

    # Now, instead of writing to day array, you write to hdf
    f = tables.open_file(
        '/soge-home/projects/seviri_dust/raw_seviri_data'
        '/intermediary_files/cloudmask_' +
        day_datetimes[j].strftime('%Y%m%d%H%M%S.hdf'), 'w')
    atom = tables.Atom.from_dtype(cloudmask_regridded.dtype)
    filters = tables.Filters(complib='blosc', complevel=5)
    ds = f.create_carray(f.root,
                         'data',
                         atom,
                         cloudmask_regridded.shape,
                         filters=filters)
    ds[:] = cloudmask_regridded
    f.close()

    print 'Wrote', day_datetimes[j]
Пример #4
0
def cloud_mask_mw(i):
    """
    Moving window cloud masking to be used by multiprocessing
    :return:
    """

    #print '1'

    date = oneday_datetimes[i]
    window_datetime_lower = datetime.datetime(year_lower, month_lower,
                                              day_lower, hour_lower,
                                              minute_lower) \
                            - datetime.timedelta(days=7)
    window_datetime_upper = datetime.datetime(year_upper, month_upper,
                                              day_upper, hour_upper,
                                              minute_upper) \
                            + datetime.timedelta(days=7)

    # Get datetime objects between the above bounds
    time_params_7dayw = np.array([window_datetime_lower.year,
                                  window_datetime_upper.year,
                                  window_datetime_lower.month,
                                  window_datetime_upper.month,
                                  window_datetime_lower.day,
                                  window_datetime_upper.day,
                                  date.hour,
                                  date.hour,
                                  date.minute,
                                  date.minute])
    datetimes_7dayw = utilities.get_daily_datetime_objects(
        time_params_7dayw)

    bt_15day = np.zeros((datetimes_7dayw.shape[0], 3,
                         ianlats.shape[0],
                         ianlons.shape[0]))

    #print '2'

    # bt_15day[:] = np.nan

    cloudscreenedbt_15day = np.zeros((datetimes_7dayw.shape[0], 3,
                                      ianlats.shape[0],
                                      ianlons.shape[0]))

    # cloudscreenedbt_15day[:] = np.nan

    f = tables.open_file(
        '/soge-home/projects/seviri_dust/sdf/intermediary_files/'
        'cloud_masked_bt_15d_' + oneday_datetimes[i].strftime(
            "%Y_%H_%M") + '.hdf', 'w')
    atom = tables.Atom.from_dtype(cloudscreenedbt_15day.dtype)
    filters = tables.Filters(complib='blosc', complevel=5)
    cs = f.create_carray(f.root, 'data', atom,
                         cloudscreenedbt_15day.shape,
                         filters=filters)

    pickup = False
    pickup_i = 0

    #print '3'

    if pickup == False:
        g = tables.open_file(
            '/soge-home/projects/seviri_dust/sdf/intermediary_files/bt_15d_' +
            oneday_datetimes[i].strftime(
                "%Y_%H_%M") + '.hdf', 'w')
        atom = tables.Atom.from_dtype(bt_15day.dtype)
        filters = tables.Filters(complib='blosc', complevel=5)
        bts = g.create_carray(g.root, 'data', atom,
                              bt_15day.shape,
                              filters=filters)
    else:
        g = tables.open_file(
            '/soge-home/projects/seviri_dust/sdf/intermediary_files/bt_15d_' +
            oneday_datetimes[i].strftime(
                "%Y_%H_%M") + '.hdf', 'a')

    #print '4 - loop starting'

    # Loop through each day of the time window for this time of day
    for j in np.arange(pickup_i, len(datetimes_7dayw)):
    #for j in np.arange(pickup_i, pickup_i+1):
    #for j in np.arange(pickup_i, 1):
        print str(oneday_datetimes[i].hour) + str(
            oneday_datetimes[i].minute), 'day ', j+1
        date_w = datetimes_7dayw[j]

        #print '5'

        # Extract BT data for this timestep
        filename = '/ouce-home/projects/seviri_dust/raw_seviri_data/bt_nc/' \
                    + str(date_w.strftime(
            '%B')) + str(date_w.year) + \
                   '/BT_' + str(
            date_w.strftime('%Y')) + str(date_w.strftime('%m')) + \
                   str(date_w.strftime('%d'))+ '.nc'

        try:
            btdata = Dataset(filename, 'r')
        except:
            print 'Found no BT data for ' + filename
            continue

        #print '6'

        # Extract cloud mask data for this timestep
        cloudfilename = \
            '/ouce-home/projects/seviri_dust/raw_seviri_data/cloudmask_nc/' \
                        + str(date_w.strftime('%B')) + str(date_w.year) +\
                        '/cloudmask_' \
                        + str(date_w.strftime('%Y')) \
                        + str(date_w.strftime('%m')) \
                        + str(date_w.strftime('%d')) + '.nc'
        try:
            clouddata = Dataset(cloudfilename, 'r')
        except:
            print 'Found no cloud data for ' + cloudfilename
            continue

        #print '7'

        # Apply cloud screening
        cloudscreenedbt_15day_array, bt087, bt108, \
        bt120 = sdf.cloud_screen_daily(btdata, clouddata, date_w)

        regcs_array = np.zeros((len(cloudscreenedbt_15day_array),
                                ianlats.shape[
            0], ianlons.shape[0]))

        #print '8'

        for k in np.arange(0, len(cloudscreenedbt_15day_array)):
            regcs = pinkdust.regrid_data(tchlons, tchlats, ianlons, ianlats,
                                         cloudscreenedbt_15day_array[k],
                                                        mesh=False)
            regcs_array[k] = regcs

        #print '9'

        regbt087 = pinkdust.regrid_data(tchlons, tchlats, ianlons, ianlats,
                                        bt087, mesh=False)
        regbt108 = pinkdust.regrid_data(tchlons, tchlats, ianlons, ianlats,
                                        bt108, mesh=False)
        regbt120 = pinkdust.regrid_data(tchlons, tchlats, ianlons, ianlats,
                                        bt120, mesh=False)

        #print '10'

        cs[j] = regcs_array
        bts[j, 0] = regbt087
        bts[j, 1] = regbt108
        bts[j, 2] = regbt120
        btdata.close()

        #print g.root.data[0]
        #print f.root.data[0]

        #print '11'

    # Save cloud masked data for this time of day to file

    f.close()

    # Save cloud masked data for this time of day to file

    g.close()
Пример #5
0
def cloud_mask_mw(i):
    """
    Moving window cloud masking to be used by multiprocessing
    :return:
    """

    date = oneday_datetimes[i]
    window_datetime_lower = datetime.datetime(year_lower, month_lower,
                                              day_lower, hour_lower,
                                              minute_lower) \
                            - datetime.timedelta(days=7)
    window_datetime_upper = datetime.datetime(year_upper, month_upper,
                                              day_upper, hour_upper,
                                              minute_upper) \
                            + datetime.timedelta(days=7)

    # Get datetime objects between the above bounds
    time_params_7dayw = np.array([
        window_datetime_lower.year, window_datetime_upper.year,
        window_datetime_lower.month, window_datetime_upper.month,
        window_datetime_lower.day, window_datetime_upper.day, date.hour,
        date.hour, date.minute, date.minute
    ])
    datetimes_7dayw = utilities.get_daily_datetime_objects(time_params_7dayw)

    bt_15day = np.zeros(
        (datetimes_7dayw.shape[0], 3, ianlats.shape[0], ianlons.shape[0]))

    #bt_15day[:] = np.nan

    cloudscreenedbt_15day = np.zeros(
        (datetimes_7dayw.shape[0], 3, ianlats.shape[0], ianlons.shape[0]))

    #cloudscreenedbt_15day[:] = np.nan

    f = tables.open_file(
        '/soge-home/projects/seviri_dust/sdf/intermediary_files/'
        'cloud_masked_bt_15d_' + oneday_datetimes[i].strftime("%Y_%H_%M") +
        '.hdf', 'w')
    atom = tables.Atom.from_dtype(cloudscreenedbt_15day.dtype)
    filters = tables.Filters(complib='blosc', complevel=5)
    cs = f.create_carray(f.root,
                         'data',
                         atom,
                         cloudscreenedbt_15day.shape,
                         filters=filters)

    g = tables.open_file(
        '/soge-home/projects/seviri_dust/sdf/intermediary_files/bt_15d_' +
        oneday_datetimes[i].strftime("%Y_%H_%M") + '.hdf', 'w')
    atom = tables.Atom.from_dtype(bt_15day.dtype)
    filters = tables.Filters(complib='blosc', complevel=5)
    bts = g.create_carray(g.root,
                          'data',
                          atom,
                          bt_15day.shape,
                          filters=filters)

    # Loop through each day of the time window for this time of day
    for j in np.arange(0, len(datetimes_7dayw)):
        print str(oneday_datetimes[i].hour) + str(
            oneday_datetimes[i].minute), 'day ', j + 1
        date_w = datetimes_7dayw[j]

        # Extract BT data for this timestep
        filename = '/ouce-home/data/satellite/meteosat/seviri/15-min/' \
                   'native/bt/nc' \
                   '/' + str(date_w.strftime(
            '%B').upper()) + str(date_w.year) + \
                   '/H-000-MSG2__-MSG2________-' \
                   'IR_BrightnessTemperatures___-000005___-' + str(
            date_w.strftime('%Y')) + str(date_w.strftime('%m')) + \
                   str(date_w.strftime('%d')) + str(date_w.strftime('%H')) \
                   + str(date_w.strftime('%M')) + '-__.nc'

        try:
            btdata = Dataset(filename, 'r')
        except:
            print 'Found no BT data for ' + filename
            continue

        # Extract cloud mask data for this timestep
        cloudfilename = '/ouce-home/data/satellite/meteosat/' \
                        'seviri/15-min/' \
                        'native/cloudmask/nc' \
                        '/' + str(date_w.strftime('%B').upper()) \
                        + str(date_w.year) + '_CLOUDS/eumetsat.cloud.' \
                        + str(date_w.strftime('%Y')) \
                        + str(date_w.strftime('%m')) \
                        + str(date_w.strftime('%d')) + str(
            date_w.strftime('%H')) \
                        + str(date_w.strftime('%M')) + '.nc'
        try:
            clouddata = Dataset(cloudfilename, 'r')
        except:
            print 'Found no cloud data for ' + cloudfilename
            continue

        # Apply cloud screening
        cloudscreenedbt_15day_array, bt087, bt108, \
        bt120 = sdf.cloud_screen(btdata, clouddata)

        regcs_array = np.zeros((len(cloudscreenedbt_15day_array),
                                ianlats.shape[0], ianlons.shape[0]))

        for k in np.arange(0, len(cloudscreenedbt_15day_array)):
            regcs = pinkdust.regrid_data(lons,
                                         lats,
                                         ianlons,
                                         ianlats,
                                         cloudscreenedbt_15day_array[k],
                                         mesh=False)
            regcs_array[k] = regcs

        regbt087 = pinkdust.regrid_data(lons,
                                        lats,
                                        ianlons,
                                        ianlats,
                                        bt087,
                                        mesh=False)
        regbt108 = pinkdust.regrid_data(lons,
                                        lats,
                                        ianlons,
                                        ianlats,
                                        bt108,
                                        mesh=False)
        regbt120 = pinkdust.regrid_data(lons,
                                        lats,
                                        ianlons,
                                        ianlats,
                                        bt120,
                                        mesh=False)

        cs[j] = regcs_array
        bts[j, 0] = regbt087
        bts[j, 1] = regbt108
        bts[j, 2] = regbt120
        btdata.close()

    # Save cloud masked data for this time of day to file

    f.close()

    # Save cloud masked data for this time of day to file

    g.close()
Пример #6
0
def detect_cpo(btdiff_2_anom_prev,
               btdiff_2_anom_prev_2,
               btdiff_2_anom_prev_3,
               datetimes,
               datestrings,
               date_i,
               lons,
               lats,
               cloud_lons,
               cloud_lats,
               daily_clouds=False,
               double_digits=False,
               mesh=False,
               daily_bt=False):

    used_ids = []

    runtime = datetimes[date_i] - datetimes[0]
    #print '\n' + datestrings[date_i] + '\n'
    totaltest = datetime.datetime.now()

    found_file = True

    if daily_bt == False:
        if os.path.isfile('/ouce-home/data/satellite/meteosat/seviri/15-min/'
                          '0.03x0.03/bt'
                          '/nc/' + datetimes[date_i].strftime("%B").upper() +
                          str(datetimes[date_i].year) + '/H-000-MSG2__'
                          '-MSG2________-'
                          'IR_BrightnessTemperatures___'
                          '-000005___-' + datestrings[date_i] + '-__.nc'):
            bt = Dataset('/ouce-home/data/satellite/meteosat/seviri/15-min/'
                         '0.03x0.03/bt'
                         '/nc/' + datetimes[date_i].strftime("%B").upper() +
                         str(datetimes[date_i].year) + '/H-000-MSG2__'
                         '-MSG2________-'
                         'IR_BrightnessTemperatures___'
                         '-000005___-' + datestrings[date_i] + '-__.nc')
            found_file = True
        elif os.path.isfile('/ouce-home/data/satellite/meteosat/seviri/15-min/'
                            '0.03x0.03/bt'
                            '/nc/' + datetimes[date_i].strftime("%B").upper() +
                            str(datetimes[date_i].year) + '/H-000-MSG1__'
                            '-MSG1________-'
                            'IR_BrightnessTemperatures___'
                            '-000005___-' + datestrings[date_i] + '-__.nc'):
            bt = Dataset('/ouce-home/data/satellite/meteosat/seviri/15-min/'
                         '0.03x0.03/bt'
                         '/nc/' + datetimes[date_i].strftime("%B").upper() +
                         str(datetimes[date_i].year) + '/H-000-MSG1__'
                         '-MSG1________-'
                         'IR_BrightnessTemperatures___'
                         '-000005___-' + datestrings[date_i] + '-__.nc')
            found_file = True
        else:
            found_file = False

    if daily_clouds:
        try:
            cloudmask = Dataset('/soge-home/projects/seviri_dust/raw_seviri_'
                                'data/cloudmask_nc/' +
                                datetimes[date_i].strftime("%B%Y") +
                                '/cloudmask_' +
                                datetimes[date_i].strftime("%Y%m%d") + '.nc')
            cloudmask_times = num2date(cloudmask.variables['time'][:],
                                       cloudmask.variables['time'].units)
            cloudmask_times = np.asarray([
                datetime.datetime(j.year, j.month, j.day, j.hour, j.minute)
                for j in cloudmask_times
            ])

            cloudmask_bool = cloudmask_times == datetimes[date_i]
            print np.all(cloudmask.variables['cloud_mask'][0] == \
                  cloudmask.variables['cloud_mask'][30])
            clouds_now = cloudmask.variables['cloud_mask'][cloudmask_bool][0]
            found_file = True
        except:
            print 'Found no cloud mask file!'
            clouds_now = np.zeros(cloud_lons.shape)
            found_file = False
    else:
        try:
            cloudmask = Dataset(
                '/soge-home/data/satellite/meteosat/seviri/15-min/'
                '0.03x0.03/cloudmask'
                '/nc/' + datetimes[date_i].strftime("%B").upper() +
                str(datetimes[date_i].year) + '_CLOUDS/eumetsat.cloud.' +
                datestrings[date_i] + '.nc')
            clouds_now = cloudmask.variables['cmask'][:][0]
            cloud_lons = cloudmask.variables['lon'][:]
            cloud_lats = cloudmask.variables['lat'][:]
        except:
            clouds_now = np.zeros(cloud_lons.shape)
            found_file = False
            print 'Found no cloud mask file!'

    if found_file:

        if daily_bt == False:
            bt087 = bt.variables['bt087'][:][0]
            bt12 = bt.variables['bt120'][:][0]
            orig_lons = bt.variables['longitude'][:]
            orig_lats = bt.variables['latitude'][:]
            orig_lons, orig_lats = np.meshgrid(orig_lons, orig_lats)
        else:
            orig_lons = lons
            orig_lats = lats

        # print bt12.shape
        # print clouds_now.shape

        window_datetime_lower = datetime.datetime(datetimes[0].year,
                                                  datetimes[0].month,
                                                  datetimes[0].day,
                                                  datetimes[0].hour,
                                                  datetimes[0].minute) \
                                - datetime.timedelta(days=7)
        window_datetime_upper = datetime.datetime(datetimes[-1].year,
                                                  datetimes[-1].month,
                                                  datetimes[-1].day,
                                                  datetimes[-1].hour,
                                                  datetimes[-1].minute) \
                                + datetime.timedelta(days=7)

        BT_15_day_lower_bound = datetimes[date_i] - datetime.timedelta(days=7)
        BT_15_day_upper_bound = datetimes[date_i] + datetime.timedelta(days=7)

        # Get datetime objects between the above bounds
        time_params_7dayw = np.array([
            window_datetime_lower.year, window_datetime_upper.year,
            window_datetime_lower.month, window_datetime_upper.month,
            window_datetime_lower.day, window_datetime_upper.day,
            datetimes[date_i].hour, datetimes[date_i].hour,
            datetimes[date_i].minute, datetimes[date_i].minute
        ])
        datetimes_7dayw = utilities.get_daily_datetime_objects(
            time_params_7dayw)

        indices = np.arange(0, len(datetimes_7dayw))
        lower_ind = datetimes_7dayw == BT_15_day_lower_bound
        lower_ind = indices[lower_ind][0]
        upper_ind = datetimes_7dayw == BT_15_day_upper_bound
        upper_ind = indices[upper_ind][0]
        current_ind = datetimes_7dayw == datetimes[date_i]
        current_ind = indices[current_ind][0]

        if double_digits:
            f = tables.open_file(
                '/soge-home/projects/seviri_dust/sdf/intermediary_files'
                '/bt_15d_' + datetimes[date_i].strftime('%Y_%H_%M') + '.hdf')
            BT_15_days = f.root.data[lower_ind:upper_ind]
            bt_data = f.root.data[current_ind]
            f.close()
        else:
            f = tables.open_file(
                '/soge-home/projects/seviri_dust/sdf/intermediary_files'
                '/bt_15d_' + str(datetimes[date_i].year) + '_' +
                str(datetimes[date_i].hour) + '_' +
                str(datetimes[date_i].minute) + '.hdf')
            BT_15_days = f.root.data[lower_ind:upper_ind]
            bt_data = f.root.data[current_ind]
            f.close()

        if daily_bt:
            bt087 = bt_data[0]
            bt12 = bt_data[2]

        bt_15day_087 = BT_15_days[:, 0]
        #bt_15day_108 = BT_15_days[:, 1]
        bt_15day_120 = BT_15_days[:, 2]

        bt_15day_087_mean = np.nanmean(bt_15day_087, axis=0)
        #bt_15day_108_mean = np.nanmean(bt_15day_108, axis=0)
        bt_15day_120_mean = np.nanmean(bt_15day_120, axis=0)

        btdiff_2_15daymean = bt_15day_120_mean - bt_15day_087_mean

        if mesh:
            cloud_lons, cloud_lats = np.meshgrid(cloud_lons, cloud_lats)

        clouds_now_regridded = pinkdust.regrid_data(cloud_lons,
                                                    cloud_lats,
                                                    orig_lons,
                                                    orig_lats,
                                                    clouds_now,
                                                    mesh=True)
        """

        bt087_regridded = pinkdust.regrid_data(orig_lons, orig_lats,
                                               cloud_lons,
                                               cloud_lats, bt087, mesh=True)

        bt12_regridded = pinkdust.regrid_data(orig_lons, orig_lats,
                                              cloud_lons,
                                              cloud_lats, bt12, mesh=True)

        btdiff_2_15daymean_regridded = pinkdust.regrid_data(orig_lons,
                                                            orig_lats,
                                                            cloud_lons,
                                                            cloud_lats,
                                                            btdiff_2_15daymean, mesh=True)

        """

        btdiff_2 = bt12 - bt087
        btdiff_2_anom = btdiff_2 - btdiff_2_15daymean

        if btdiff_2_anom_prev_3 != None:

            btdiff_2_anom_diff = btdiff_2_anom - btdiff_2_anom_prev_3
            btdiff_2_anom_diff += \
                (btdiff_2_anom - btdiff_2_anom_prev_2)
            btdiff_2_anom_diff += \
                (btdiff_2_anom - btdiff_2_anom_prev)
        else:
            btdiff_2_anom_diff = np.zeros((btdiff_2_anom.shape))

        if date_i == 0:
            btdiff_2_anom_prev = deepcopy(btdiff_2_anom)
        elif date_i == 1:
            btdiff_2_anom_prev_2 = deepcopy(btdiff_2_anom_prev)
            btdiff_2_anom_prev = deepcopy(btdiff_2_anom)
        elif date_i == 2:
            btdiff_2_anom_prev_3 = deepcopy(btdiff_2_anom_prev_2)
            btdiff_2_anom_prev_2 = deepcopy(btdiff_2_anom_prev)
            btdiff_2_anom_prev = deepcopy(btdiff_2_anom)
        elif date_i > 2:
            btdiff_2_anom_prev_3 = deepcopy(btdiff_2_anom_prev_2)
            btdiff_2_anom_prev_2 = deepcopy(btdiff_2_anom_prev)
            btdiff_2_anom_prev = deepcopy(btdiff_2_anom)

        if daily_clouds:
            clouds_now_regridded = clouds_now_regridded > 1

        lat_grad, lon_grad = np.gradient(btdiff_2_anom)
        total_grad = np.sqrt(lat_grad**2 + lon_grad**2)
        convolution = scipy.signal.convolve2d(clouds_now_regridded,
                                              np.ones((5, 5)),
                                              mode='same')
        clouds_now = convolution > 0
        total_grad[clouds_now == 1] = np.nan

        ### PASS I ###
        # In the FIRST PASS the LORD sayeth unto the image, 'Let all
        # whose BTD is below -10K be classified as CPOs, and remove the
        # tiny ones'
        # And those who fulfilled this condition were classified,
        # and it was good

        convolution = scipy.signal.convolve2d(clouds_now,
                                              np.ones((5, 5)),
                                              mode='same')
        clouds_now = convolution > 0

        btdiff_2_anom_diff_um = deepcopy(btdiff_2_anom_diff)
        btdiff_2_anom_diff[clouds_now > 0] = np.nan

        cpo_mask_pass_1 = btdiff_2_anom_diff < -7

        label_objects, nb_labels = ndi.label(cpo_mask_pass_1)

        sizes = np.bincount(label_objects.ravel())
        # Set clusters smaller than size 20 to zero
        mask_sizes = sizes > 20
        mask_sizes[0] = 0
        cpo_mask_pass_1 = mask_sizes[label_objects]

        ### PASS II ###
        # In the SECOND PASS the LORD sayeth unto the image, 'Let all
        # those included in the first pass which contain pixels which
        # are below -15K be classified'
        # And those who fulfilled this condition were classified,
        # and it was better

        cpo_mask_pass_2 = deepcopy(cpo_mask_pass_1)

        # Label the image and get all connected elements
        cpo_mask_pass_1, num = measurements.label(cpo_mask_pass_1)

        # Then loop through each labeled blob and find if one of the
        # pixels has a -15K. If it does, the whole blob is a freaking CPO.
        blob_ids = np.unique(cpo_mask_pass_1)
        blob_ids = blob_ids[blob_ids != 0]

        for i in np.arange(0, len(blob_ids)):
            target_region = cpo_mask_pass_1 == blob_ids[i]
            # First check if this region is within the original
            # time-gradient identified region (i.e. not one introduced
            # with the new generous gradient checker)
            if np.any(btdiff_2_anom_diff[target_region == 1] < -15):
                # Next check if there is a generous gradient contained
                # within this region somewhere
                pass
            else:
                cpo_mask_pass_2[target_region == 1] = 0

        # For identified CPO regions, undo the convolution on the cloud
        # mask

        cpo_mask_um = btdiff_2_anom_diff_um < -7
        # Label the image and get all connected elements
        cpo_mask_um, num = measurements.label(cpo_mask_um)

        # Then loop through each labeled blob and find if one of the
        # pixels has a -15K. If it does, the whole blob is a freaking CPO.
        blob_ids = np.unique(cpo_mask_um)
        blob_ids = blob_ids[blob_ids != 0]

        if 1 in cpo_mask_pass_2:
            for i in np.arange(0, len(blob_ids)):
                target_region = cpo_mask_um == blob_ids[i]
                if 1 in cpo_mask_pass_2[target_region]:
                    #if np.any(cpo_mask_pass_2[target_region == 1] == 1):
                    cpo_mask_pass_2[target_region] = 1

        extent = (np.min(orig_lons), np.max(orig_lons), np.min(orig_lats),
                  np.max(orig_lats))
        m = Basemap(projection='cyl',
                    llcrnrlon=extent[0],
                    urcrnrlon=extent[1],
                    llcrnrlat=extent[2],
                    urcrnrlat=extent[3],
                    resolution='i')

        m.drawcoastlines(linewidth=0.5)
        m.drawcountries(linewidth=0.5)
        parallels = np.arange(10., 40, 2.)
        # labels = [left,right,top,bottom]
        m.drawparallels(parallels,
                        labels=[False, True, True, False],
                        linewidth=0.5)
        meridians = np.arange(-20., 17., 4.)
        m.drawmeridians(meridians,
                        labels=[True, False, False, True],
                        linewidth=0.5)

        min = 5
        max = -15

        levels = MaxNLocator(nbins=15).tick_values(min, max)

        cmap = cm.get_cmap('Blues_r')
        norm = BoundaryNorm(levels, ncolors=cmap.N, clip=True)

        #print np.unique(btdiff_2_anom_diff[np.isfinite(btdiff_2_anom_diff)])

        m.pcolormesh(orig_lons,
                     orig_lats,
                     btdiff_2_anom_diff,
                     cmap=cmap,
                     vmin=min,
                     vmax=max,
                     norm=norm)

        cbar = plt.colorbar(orientation='horizontal', fraction=0.056, pad=0.06)
        cbar.ax.set_xlabel('CPO mask pass 2')
        plt.tight_layout()
        plt.savefig('btdiff_' + datestrings[date_i] + '.png',
                    bbox_inches='tight')

        plt.close()

        return cpo_mask_pass_2, btdiff_2_anom_prev, btdiff_2_anom_prev_2,\
               btdiff_2_anom_prev_3

    else:
        if mesh:
            empty_arr = np.zeros((lats.shape[0], lons.shape[1]))
        else:
            empty_arr = np.zeros((lats.shape[0], lons.shape[0]))
        empty_arr[:] = np.nan
        return empty_arr, None, None, None
Пример #7
0
                                                  for j in datetimes_7dayw])]

            bt_15day_087 = BT_15_days[:, 0]
            bt_15day_108 = BT_15_days[:, 1]
            bt_15day_120 = BT_15_days[:, 2]

            bt_15day_087_mean = np.nanmean(bt_15day_087, axis=0)
            bt_15day_108_mean = np.nanmean(bt_15day_108, axis=0)
            bt_15day_120_mean = np.nanmean(bt_15day_120, axis=0)

            btdiff_2_15daymean = bt_15day_120_mean - bt_15day_087_mean

            orig_lons, orig_lats = np.meshgrid(orig_lons, orig_lats)

            bt087_regridded = pinkdust.regrid_data(orig_lons, orig_lats,
                                                   cloud_lons, cloud_lats,
                                                   bt087)

            bt12_regridded = pinkdust.regrid_data(orig_lons, orig_lats,
                                                  cloud_lons, cloud_lats, bt12)

            btdiff_2_15daymean_regridded = pinkdust.regrid_data(
                orig_lons, orig_lats, cloud_lons, cloud_lats,
                btdiff_2_15daymean)

            btdiff_2 = bt12_regridded - bt087_regridded
            btdiff_2_anom = btdiff_2 - btdiff_2_15daymean_regridded

            if btdiff_2_anom_prev_3 != None:

                btdiff_2_anom_diff = btdiff_2_anom - btdiff_2_anom_prev_3
Пример #8
0
def wrapper(bt_120_108_anom_m_prev_1,
            bt_120_108_anom_m_prev_2,
            bt_120_108_anom_m_prev_3,
            bt_108_087_anom_m_prev_1,
            bt_108_087_anom_m_prev_2,
            bt_108_087_anom_m_prev_3,
            bt_120_087_anom_m_prev_1,
            bt_120_087_anom_m_prev_2,
            bt_120_087_anom_m_prev_3,
            datetimes,
            datestrings,
            date_i,
            lons,
            lats,
            cloud_lons,
            cloud_lats,
            daily_clouds=False,
            double_digits=False,
            mesh=False,
            daily_bt=False):

    used_ids = []

    runtime = datetimes[date_i] - datetimes[0]
    # print '\n' + datestrings[date_i] + '\n'
    totaltest = datetime.datetime.now()

    found_file = True

    if daily_bt == False:
        if os.path.isfile('/ouce-home/data/satellite/meteosat/seviri/15-min/'
                          '0.03x0.03/bt'
                          '/nc/' + datetimes[date_i].strftime("%B").upper() +
                          str(datetimes[date_i].year) + '/H-000-MSG2__'
                          '-MSG2________-'
                          'IR_BrightnessTemperatures___'
                          '-000005___-' + datestrings[date_i] + '-__.nc'):
            bt = Dataset('/ouce-home/data/satellite/meteosat/seviri/15-min/'
                         '0.03x0.03/bt'
                         '/nc/' + datetimes[date_i].strftime("%B").upper() +
                         str(datetimes[date_i].year) + '/H-000-MSG2__'
                         '-MSG2________-'
                         'IR_BrightnessTemperatures___'
                         '-000005___-' + datestrings[date_i] + '-__.nc')
            found_file = True
        elif os.path.isfile('/ouce-home/data/satellite/meteosat/seviri/15-min/'
                            '0.03x0.03/bt'
                            '/nc/' + datetimes[date_i].strftime("%B").upper() +
                            str(datetimes[date_i].year) + '/H-000-MSG1__'
                            '-MSG1________-'
                            'IR_BrightnessTemperatures___'
                            '-000005___-' + datestrings[date_i] + '-__.nc'):
            bt = Dataset('/ouce-home/data/satellite/meteosat/seviri/15-min/'
                         '0.03x0.03/bt'
                         '/nc/' + datetimes[date_i].strftime("%B").upper() +
                         str(datetimes[date_i].year) + '/H-000-MSG1__'
                         '-MSG1________-'
                         'IR_BrightnessTemperatures___'
                         '-000005___-' + datestrings[date_i] + '-__.nc')
            found_file = True
        else:
            found_file = False

    if daily_clouds:
        try:
            cloudmask = Dataset('/soge-home/projects/seviri_dust/raw_seviri_'
                                'data/cloudmask_nc/' +
                                datetimes[date_i].strftime("%B%Y") +
                                '/cloudmask_' +
                                datetimes[date_i].strftime("%Y%m%d") + '.nc')
            cloudmask_times = num2date(cloudmask.variables['time'][:],
                                       cloudmask.variables['time'].units)
            cloudmask_times = np.asarray([
                datetime.datetime(j.year, j.month, j.day, j.hour, j.minute)
                for j in cloudmask_times
            ])

            cloudmask_bool = cloudmask_times == datetimes[date_i]
            clouds_now = cloudmask.variables['cloud_mask'][cloudmask_bool][0]
            found_file = True
        except:
            print 'Found no cloud mask file!'
            clouds_now = np.zeros(cloud_lons.shape)
            found_file = False

        sdf_root = '/soge-home/projects/seviri_dust/sdf/' \
                   + datetimes[date_i].strftime('%B') \
                   + str(datetimes[date_i].year) + '/'

        if os.path.isfile(sdf_root + 'SDF_v2.' + \
                                  datestrings[date_i] + '.nc'):

            sdf = Dataset(
                sdf_root + 'SDF_v2.' + \
                datestrings[date_i] + '.nc')
            found_file = True
            # print sdf
            sdf_now = sdf.variables['SDF'][:]
        else:
            print 'No SDF file found for this date'
            found_file = False

    else:
        try:
            cloudmask = Dataset(
                '/soge-home/data/satellite/meteosat/seviri/15-min/'
                '0.03x0.03/cloudmask'
                '/nc/' + datetimes[date_i].strftime("%B").upper() +
                str(datetimes[date_i].year) + '_CLOUDS/eumetsat.cloud.' +
                datestrings[date_i] + '.nc')
            clouds_now = cloudmask.variables['cmask'][:][0]
            cloud_lons = cloudmask.variables['lon'][:]
            cloud_lats = cloudmask.variables['lat'][:]
        except:
            clouds_now = np.zeros(cloud_lons.shape)
            found_file = False
            print 'Found no cloud mask file!'

        sdf_root = '/soge-home/data_not_backed_up/satellite/meteosat' \
                   '/seviri/15' \
                   '-min/0.03x0.03/sdf/nc/' + datetimes[date_i].strftime(
            '%B').upper() + str(datetimes[date_i].year) + '/SDF_v2/'
        if os.path.isfile(sdf_root + 'SDF_v2.' + \
                                  datestrings[date_i] + '.nc'):

            sdf = Dataset(
                sdf_root + 'SDF_v2.' + \
                datestrings[date_i] + '.nc')
            found_file = True
            # print sdf
            if 'time' in sdf.variables:
                sdf_now = sdf.variables['bt108'][0]
            else:
                sdf_now = sdf.variables['bt108'][:]
        else:
            print 'No SDF file found for this date'
            found_file = False

    if found_file:

        if daily_bt == False:
            bt087 = bt.variables['bt087'][:][0]
            bt108 = bt.variables['bt108'][:][0]
            bt12 = bt.variables['bt120'][:][0]
            orig_lons = bt.variables['longitude'][:]
            orig_lats = bt.variables['latitude'][:]
            orig_lons, orig_lats = np.meshgrid(orig_lons, orig_lats)
        else:
            orig_lons = lons
            orig_lats = lats

        # print bt12.shape
        # print clouds_now.shape

        window_datetime_lower = datetime.datetime(datetimes[0].year,
                                                  6,
                                                  1,
                                                  0,
                                                  45) \
                                - datetime.timedelta(days=7)
        window_datetime_upper = datetime.datetime(datetimes[-1].year,
                                                  8,
                                                  31,
                                                  23,
                                                  45) \
                                + datetime.timedelta(days=7)

        BT_15_day_lower_bound = datetimes[date_i] - datetime.timedelta(days=7)
        BT_15_day_upper_bound = datetimes[date_i] + datetime.timedelta(days=7)

        # Get datetime objects between the above bounds
        time_params_7dayw = np.array([
            window_datetime_lower.year, window_datetime_upper.year,
            window_datetime_lower.month, window_datetime_upper.month,
            window_datetime_lower.day, window_datetime_upper.day,
            datetimes[date_i].hour, datetimes[date_i].hour,
            datetimes[date_i].minute, datetimes[date_i].minute
        ])
        datetimes_7dayw = utilities.get_daily_datetime_objects(
            time_params_7dayw)

        indices = np.arange(0, len(datetimes_7dayw))
        lower_ind = datetimes_7dayw == BT_15_day_lower_bound
        lower_ind = indices[lower_ind][0]
        upper_ind = datetimes_7dayw == BT_15_day_upper_bound
        upper_ind = indices[upper_ind][0]
        current_ind = datetimes_7dayw == datetimes[date_i]
        current_ind = indices[current_ind][0]

        if double_digits:
            f = tables.open_file(
                '/soge-home/projects/seviri_dust/sdf/intermediary_files'
                '/bt_15d_' + datetimes[date_i].strftime('%Y_%H_%M') + '.hdf')
            BT_15_days = f.root.data[lower_ind:upper_ind]
            bt_data = f.root.data[current_ind]
            f.close()
        else:
            f = tables.open_file(
                '/soge-home/projects/seviri_dust/sdf/intermediary_files'
                '/bt_15d_' + str(datetimes[date_i].year) + '_' +
                str(datetimes[date_i].hour) + '_' +
                str(datetimes[date_i].minute) + '.hdf')
            BT_15_days = f.root.data[lower_ind:upper_ind]
            bt_data = f.root.data[current_ind]
            f.close()

        if daily_bt:
            bt087 = bt_data[0]
            bt108 = bt_data[1]
            bt12 = bt_data[2]

        bt_15day_087 = BT_15_days[:, 0]
        bt_15day_108 = BT_15_days[:, 1]
        bt_15day_120 = BT_15_days[:, 2]

        bt_15day_087_mean = np.nanmean(bt_15day_087, axis=0)
        bt_15day_108_mean = np.nanmean(bt_15day_108, axis=0)
        bt_15day_120_mean = np.nanmean(bt_15day_120, axis=0)

        btdiff_2_15daymean = bt_15day_120_mean - bt_15day_087_mean

        bt_108_087 = bt108 - bt087
        bt_120_108 = bt12 - bt108
        bt_120_087 = bt12 - bt087

        bt_108_087_mean = bt_15day_108_mean - bt_15day_087_mean
        bt_120_108_mean = bt_15day_120_mean - bt_15day_108_mean
        bt_120_087_mean = bt_15day_120_mean - bt_15day_087_mean

        bt_108_087_anom = bt_108_087 - bt_108_087_mean
        bt_120_108_anom = bt_120_108 - bt_120_108_mean
        bt_120_087_anom = bt_120_087 - bt_120_087_mean

        if mesh:
            cloud_lons, cloud_lats = np.meshgrid(cloud_lons, cloud_lats)

        clouds_now_regridded = pinkdust.regrid_data(cloud_lons,
                                                    cloud_lats,
                                                    orig_lons,
                                                    orig_lats,
                                                    clouds_now,
                                                    mesh=True)

        bt_108_087_anom_m = deepcopy(bt_108_087_anom)
        bt_120_087_anom_m = deepcopy(bt_120_087_anom)
        bt_120_108_anom_m = deepcopy(bt_120_108_anom)

        clouds_now_regridded[sdf_now == 1] = 0

        bt_108_087_anom_m[clouds_now_regridded == 1] = np.nan
        bt_120_087_anom_m[clouds_now_regridded == 1] = np.nan
        bt_120_108_anom_m[clouds_now_regridded == 1] = np.nan

        #btdiff_2 = bt12 - bt087
        #btdiff_2_anom = btdiff_2 - btdiff_2_15daymean

        if bt_108_087_anom_m_prev_1 != None:

            arra = (bt_120_087_anom_m - bt_120_087_anom_m_prev_3) + (
                bt_120_087_anom_m - bt_120_087_anom_m_prev_2) + (
                    bt_120_087_anom_m - bt_120_087_anom_m_prev_1)

            arrb = (bt_120_108_anom_m - bt_120_108_anom_m_prev_3) + (
                bt_120_108_anom_m - bt_120_108_anom_m_prev_2) + (
                    bt_120_108_anom_m - bt_120_108_anom_m_prev_1)

            arrc = (bt_108_087_anom_m - bt_108_087_anom_m_prev_3) + (
                bt_108_087_anom_m - bt_108_087_anom_m_prev_2) + (
                    bt_108_087_anom_m - bt_108_087_anom_m_prev_1)

            detected_bt = (arrc - arrb) + (arra - arrb)

        else:
            detected_bt = np.zeros((bt_108_087_anom.shape))

        if date_i == 0:
            bt_120_087_anom_m_prev_1 = deepcopy(bt_120_087_anom_m)
            bt_120_108_anom_m_prev_1 = deepcopy(bt_120_108_anom_m)
            bt_108_087_anom_m_prev_1 = deepcopy(bt_108_087_anom_m)
        elif date_i == 1:
            bt_120_087_anom_m_prev_2 = deepcopy(bt_120_087_anom_m_prev_1)
            bt_120_108_anom_m_prev_2 = deepcopy(bt_120_108_anom_m_prev_1)
            bt_108_087_anom_m_prev_2 = deepcopy(bt_108_087_anom_m_prev_1)
            bt_120_087_anom_m_prev_1 = deepcopy(bt_120_087_anom_m)
            bt_120_108_anom_m_prev_1 = deepcopy(bt_120_108_anom_m)
            bt_108_087_anom_m_prev_1 = deepcopy(bt_108_087_anom_m)
        elif date_i == 2:
            bt_120_087_anom_m_prev_3 = deepcopy(bt_120_087_anom_m_prev_2)
            bt_120_108_anom_m_prev_3 = deepcopy(bt_120_108_anom_m_prev_2)
            bt_108_087_anom_m_prev_3 = deepcopy(bt_108_087_anom_m_prev_2)
            bt_120_087_anom_m_prev_2 = deepcopy(bt_120_087_anom_m_prev_1)
            bt_120_108_anom_m_prev_2 = deepcopy(bt_120_108_anom_m_prev_1)
            bt_108_087_anom_m_prev_2 = deepcopy(bt_108_087_anom_m_prev_1)
            bt_120_087_anom_m_prev_1 = deepcopy(bt_120_087_anom_m)
            bt_120_108_anom_m_prev_1 = deepcopy(bt_120_108_anom_m)
            bt_108_087_anom_m_prev_1 = deepcopy(bt_108_087_anom_m)
        elif date_i > 2:
            bt_120_087_anom_m_prev_3 = deepcopy(bt_120_087_anom_m_prev_2)
            bt_120_108_anom_m_prev_3 = deepcopy(bt_120_108_anom_m_prev_2)
            bt_108_087_anom_m_prev_3 = deepcopy(bt_108_087_anom_m_prev_2)
            bt_120_087_anom_m_prev_2 = deepcopy(bt_120_087_anom_m_prev_1)
            bt_120_108_anom_m_prev_2 = deepcopy(bt_120_108_anom_m_prev_1)
            bt_108_087_anom_m_prev_2 = deepcopy(bt_108_087_anom_m_prev_1)
            bt_120_087_anom_m_prev_1 = deepcopy(bt_120_087_anom_m)
            bt_120_108_anom_m_prev_1 = deepcopy(bt_120_108_anom_m)
            bt_108_087_anom_m_prev_1 = deepcopy(bt_108_087_anom_m)

        if daily_clouds:
            clouds_now_regridded = clouds_now_regridded > 1

        ### PASS I ###
        # In the FIRST PASS the LORD sayeth unto the image, 'Let all
        # whose BTD is below -10K be classified as CPOs, and remove the
        # tiny ones'
        # And those who fulfilled this condition were classified,
        # and it was good

        # NOTE: Why cloud mask here? What if there was a strong cloud
        # gradient in the previous three timesteps which disappeared in this
        #  one but still pushed us over the threshold? It wouldn't be cloud
        # masked
        detected_bt_um = deepcopy(detected_bt)
        #if cloud_mask:
        #   detected_bt[clouds_now > 0] = np.nan

        levels = np.arange(-24, 40, 8)
        cpo_mask_pass_1 = detected_bt < -6

        label_objects, nb_labels = ndi.label(cpo_mask_pass_1)

        sizes = np.bincount(label_objects.ravel())
        # Set clusters smaller than size 20 to zero
        mask_sizes = sizes > 20
        mask_sizes[0] = 0
        cpo_mask_pass_1 = mask_sizes[label_objects]

        ### PASS II ###
        # In the SECOND PASS the LORD sayeth unto the image, 'Let all
        # those included in the first pass which contain pixels which
        # are below -15K be classified'
        # And those who fulfilled this condition were classified,
        # and it was better

        cpo_mask_pass_2 = deepcopy(cpo_mask_pass_1)

        # Label the image and get all connected elements
        cpo_mask_pass_1, num = measurements.label(cpo_mask_pass_1)

        # Then loop through each labeled blob and find if one of the
        # pixels has a -15K. If it does, the whole blob is a freaking CPO.
        blob_ids = np.unique(cpo_mask_pass_1)
        blob_ids = blob_ids[blob_ids != 0]

        for i in np.arange(0, len(blob_ids)):
            target_region = cpo_mask_pass_1 == blob_ids[i]
            # First check if this region is within the original
            # time-gradient identified region (i.e. not one introduced
            # with the new generous gradient checker)
            if np.any(detected_bt[target_region == 1] < -20):
                # Next check if there is a generous gradient contained
                # within this region somewhere
                pass
            else:
                cpo_mask_pass_2[target_region == 1] = 0

        # For identified CPO regions, undo the convolution on the cloud
        # mask

        cpo_mask_um = detected_bt_um < -6
        # Label the image and get all connected elements
        cpo_mask_um, num = measurements.label(cpo_mask_um)

        # Then loop through each labeled blob and find if one of the
        # pixels has a -15K. If it does, the whole blob is a freaking CPO.
        blob_ids = np.unique(cpo_mask_um)
        blob_ids = blob_ids[blob_ids != 0]

        if 1 in cpo_mask_pass_2:
            for i in np.arange(0, len(blob_ids)):
                target_region = cpo_mask_um == blob_ids[i]
                if 1 in cpo_mask_pass_2[target_region]:
                    # if np.any(cpo_mask_pass_2[target_region == 1] == 1):
                    cpo_mask_pass_2[target_region] = 1

        if debug:
            extent = (np.min(orig_lons), np.max(orig_lons), np.min(orig_lats),
                      np.max(orig_lats))
            m = Basemap(projection='cyl',
                        llcrnrlon=extent[0],
                        urcrnrlon=extent[1],
                        llcrnrlat=extent[2],
                        urcrnrlat=extent[3],
                        resolution='i')

            m.drawcoastlines(linewidth=0.5)
            m.drawcountries(linewidth=0.5)
            parallels = np.arange(10., 40, 2.)
            # labels = [left,right,top,bottom]
            m.drawparallels(parallels,
                            labels=[False, True, True, False],
                            linewidth=0.5)
            meridians = np.arange(-20., 17., 4.)
            m.drawmeridians(meridians,
                            labels=[True, False, False, True],
                            linewidth=0.5)

            min = -20
            max = 5

            levels = MaxNLocator(nbins=15).tick_values(min, max)

            cmap = cm.get_cmap('Blues_r')
            norm = BoundaryNorm(levels, ncolors=cmap.N, clip=True)

            m.pcolormesh(orig_lons,
                         orig_lats,
                         detected_bt,
                         cmap=cmap,
                         vmin=min,
                         vmax=max,
                         norm=norm)

            cbar = plt.colorbar(orientation='horizontal',
                                fraction=0.056,
                                pad=0.06)
            cbar.ax.set_xlabel('BTdiff 2 anom diff')
            plt.tight_layout()
            plt.savefig('BTdiff_2_anom_diff' + datestrings[date_i] + '.png',
                        bbox_inches='tight')

            plt.close()

            extent = (np.min(orig_lons), np.max(orig_lons), np.min(orig_lats),
                      np.max(orig_lats))
            m = Basemap(projection='cyl',
                        llcrnrlon=extent[0],
                        urcrnrlon=extent[1],
                        llcrnrlat=extent[2],
                        urcrnrlat=extent[3],
                        resolution='i')

            m.drawcoastlines(linewidth=0.5)
            m.drawcountries(linewidth=0.5)
            parallels = np.arange(10., 40, 2.)
            # labels = [left,right,top,bottom]
            m.drawparallels(parallels,
                            labels=[False, True, True, False],
                            linewidth=0.5)
            meridians = np.arange(-20., 17., 4.)
            m.drawmeridians(meridians,
                            labels=[True, False, False, True],
                            linewidth=0.5)

            min = 180
            max = 320

            levels = MaxNLocator(nbins=15).tick_values(min, max)

            cmap = cm.get_cmap('Blues_r')
            norm = BoundaryNorm(levels, ncolors=cmap.N, clip=True)

            m.pcolormesh(orig_lons,
                         orig_lats,
                         bt108,
                         cmap=cmap,
                         vmin=min,
                         vmax=max,
                         norm=norm)

            cbar = plt.colorbar(orientation='horizontal',
                                fraction=0.056,
                                pad=0.06)
            cbar.ax.set_xlabel('BT 10.8')
            plt.tight_layout()
            plt.savefig('BT108_' + datestrings[date_i] + '.png',
                        bbox_inches='tight')

            plt.close()

        if show_all_indicators:

            bt_108_087 = bt108 - bt087
            bt_120_108 = bt12 - bt108
            bt_120_087 = bt12 - bt087

            bt_108_087_mean = bt_15day_108_mean - bt_15day_087_mean
            bt_120_108_mean = bt_15day_120_mean - bt_15day_108_mean
            bt_120_087_mean = bt_15day_120_mean - bt_15day_087_mean

            bt_108_087_anom = bt_108_087 - bt_108_087_mean
            bt_120_108_anom = bt_120_108 - bt_120_108_mean
            bt_120_087_anom = bt_120_087 - bt_120_087_mean

            bt_087_anom = bt087 - bt_15day_087_mean
            bt_108_anom = bt108 - bt_15day_108_mean
            bt_120_anom = bt12 - bt_15day_120_mean

            lat_grad, lon_grad = np.gradient(bt_108_087_anom)
            total_grad = np.sqrt(lat_grad**2 + lon_grad**2)
            grad_108_087_anom = deepcopy(total_grad)

            lat_grad, lon_grad = np.gradient(bt_120_108_anom)
            total_grad = np.sqrt(lat_grad**2 + lon_grad**2)
            grad_120_108_anom = deepcopy(total_grad)

            lat_grad, lon_grad = np.gradient(bt_120_087_anom)
            total_grad = np.sqrt(lat_grad**2 + lon_grad**2)
            grad_120_087_anom = deepcopy(total_grad)

            lat_grad, lon_grad = np.gradient(bt_087_anom)
            total_grad = np.sqrt(lat_grad**2 + lon_grad**2)
            grad_087_anom = deepcopy(total_grad)

            lat_grad, lon_grad = np.gradient(bt_108_anom)
            total_grad = np.sqrt(lat_grad**2 + lon_grad**2)
            grad_108_anom = deepcopy(total_grad)

            lat_grad, lon_grad = np.gradient(bt_120_anom)
            total_grad = np.sqrt(lat_grad**2 + lon_grad**2)
            grad_120_anom = deepcopy(total_grad)

            indicators = [
                bt087, bt108, bt12, bt_108_087_anom, bt_120_108_anom,
                bt_120_087_anom, grad_108_087_anom, grad_120_108_anom,
                grad_120_087_anom, grad_087_anom, grad_108_anom, grad_120_anom
            ]

            extent = (np.min(orig_lons), np.max(orig_lons), np.min(orig_lats),
                      np.max(orig_lats))

            m = Basemap(projection='cyl',
                        llcrnrlon=extent[0],
                        urcrnrlon=extent[1],
                        llcrnrlat=extent[2],
                        urcrnrlat=extent[3],
                        resolution='i')

            m.drawcoastlines(linewidth=0.5)
            m.drawcountries(linewidth=0.5)
            parallels = np.arange(10., 40, 2.)
            m.drawparallels(parallels,
                            labels=[False, True, True, False],
                            linewidth=0.5)
            meridians = np.arange(-20., 17., 4.)
            m.drawmeridians(meridians,
                            labels=[True, False, False, True],
                            linewidth=0.5)

            mins = [180, 180, 180, -13, -13, -13, 0, 0, 0, 0, 0, 0]
            maxs = [320, 320, 320, 6, 6, 6, 9, 9, 9, 50, 50, 50]
            labels = [
                'bt087', 'bt108', 'bt120', 'bt_108_087_anom',
                'bt_120_108_anom', 'bt_120_087_anom', 'grad_108_087_anom',
                'grad_120_108_anom', 'grad_120_087_anom', 'grad_087_anom',
                'grad_108_anom', 'grad_120_anom'
            ]

            for i in np.arange(0, len(indicators)):

                min = mins[i]
                max = maxs[i]

                levels = MaxNLocator(nbins=15).tick_values(min, max)

                cmap = cm.get_cmap('Blues_r')
                norm = BoundaryNorm(levels, ncolors=cmap.N, clip=True)

                plot = m.pcolormesh(orig_lons,
                                    orig_lats,
                                    indicators[i],
                                    cmap=cmap,
                                    vmin=min,
                                    vmax=max,
                                    norm=norm)

                cbar = plt.colorbar(orientation='horizontal',
                                    fraction=0.056,
                                    pad=0.06)

                cbar.ax.set_xlabel(labels[i])
                plt.tight_layout()
                plt.savefig(labels[i] + '_' + datestrings[date_i] + '.png',
                            bbox_inches='tight')

                cbar.remove()
                plot.remove()

        return cpo_mask_pass_2, bt_120_108_anom_m, \
               bt_120_108_anom_m_prev_1, bt_120_108_anom_m_prev_2, \
               bt_120_108_anom_m_prev_3, bt_108_087_anom_m,\
               bt_108_087_anom_m_prev_1, bt_108_087_anom_m_prev_2, \
               bt_108_087_anom_m_prev_3, bt_120_087_anom_m,\
               bt_120_087_anom_m_prev_1, bt_120_087_anom_m_prev_2, \
               bt_120_087_anom_m_prev_3
    else:
        if mesh:
            empty_arr = np.zeros((lats.shape[0], lons.shape[1]))
        else:
            empty_arr = np.zeros((lats.shape[0], lons.shape[0]))
        empty_arr[:] = np.nan
        return empty_arr, None, None, None
Пример #9
0
def wrapper(i):

    year_lower = i
    year_upper = i
    month_lower = 6
    month_upper = 8
    day_lower = 1
    day_upper = 31
    hour_lower = 0
    hour_upper = 23
    minute_lower = 0
    minute_upper = 45

    time_params = np.array([year_lower, year_upper, month_lower,
                            month_upper, day_lower, day_upper,
                            hour_lower, hour_upper, minute_lower,
                            minute_upper])

    datetimes = utilities.get_datetime_objects(time_params)

    for date_i in np.arange(0, len(datetimes)):
        date = datetimes[date_i]
        print date
        year_bool = np.asarray(years == date.year)
        root = np.asarray(year_cm_roots)[year_bool][0]
        if root == root2:
            # We're using TCH's daily cloudmask files. These are values from
            #  0-2 (I think), and need selecting and regridding.
            try:
                clouddata = Dataset(root+date.strftime("%B%Y")+'/cloudmask_' + datetimes[
                        date_i].strftime("%Y%m%d") + '.nc')
                cloudmask_times = num2date(clouddata.variables['time'][:],
                                           clouddata.variables['time'].units)
                cloudmask_times = np.asarray([dt.datetime(j.year, j.month,
                                                          j.day, j.hour,
                                                          j.minute) for j
                                              in cloudmask_times])
                cloudmask_bool = cloudmask_times == date
                clouds_data = clouddata.variables['cloud_mask'][cloudmask_bool]
                clouds_now = clouds_data#[cloudmask_bool]

                clouds_now_regridded = pinkdust.regrid_data(root2_lons, root2_lats,
                                                            target_lons,
                                                            target_lats,
                                                            clouds_now, mesh=True)
                data_array[clouds_now_regridded > 1] += 1
            except:
                print 'No cloud data for', date

        else:
            # We're using Ian's original cloud mask files. These are just a
            # binary one or zero and won't be regridded.
            try:
                clouddata = Dataset(root+
                    datetimes[date_i].strftime("%B").upper(
                    ) + str(datetimes[date_i].year) + '_CLOUDS/eumetsat.cloud.'
                    + datetimes[
                        date_i].strftime("%Y%m%d%H%M") + '.nc')
                clouds_now = clouddata.variables['cmask'][:][0]
                data_array[clouds_now == 1] += 1
            except:
                print 'No cloud data for', date

        if date_i == 100:
            print data_array
            np.save('cloudcover_array', data_array)

            extent = (
            np.min(target_lons), np.max(target_lons), np.min(target_lats),
            np.max(target_lats))
            m = Basemap(projection='cyl', llcrnrlon=extent[0],
                        urcrnrlon=extent[1],
                        llcrnrlat=extent[2], urcrnrlat=extent[3],
                        resolution='i')

            m.drawcoastlines(linewidth=0.5)
            m.drawcountries(linewidth=0.5)
            parallels = np.arange(10., 40, 2.)
            # labels = [left,right,top,bottom]
            m.drawparallels(parallels, labels=[False, True, True, False],
                            linewidth=0.5)
            meridians = np.arange(-20., 17., 4.)
            m.drawmeridians(meridians, labels=[True, False, False, True],
                            linewidth=0.5)

            min = 0
            max = 70000

            levels = MaxNLocator(nbins=15).tick_values(min, max)

            # discrete_cmap = utilities.cmap_discretize(cm.RdYlBu_r, 10)
            cmap = cm.get_cmap('RdYlBu_r')
            norm = BoundaryNorm(levels, ncolors=cmap.N, clip=True)

            data_array = np.ma.masked_where(np.isnan(data_array),
                                            data_array)

            m.pcolormesh(target_lons, target_lats, data_array,
                         cmap=cmap, norm=norm, vmin=min, vmax=max)

            cbar = plt.colorbar(orientation='horizontal', fraction=0.056,
                                pad=0.06)
            cbar.ax.set_xlabel('Counts of cloud occurrence')
            plt.tight_layout()
            plt.savefig('CPO_multiyear_cloud_frequency_2004_2012.png',
                        bbox_inches='tight')

            plt.close()
Пример #10
0
def wrapper(datetimes,
            datestrings,
            date_i,
            lons,
            lats,
            cloud_lons,
            cloud_lats,
            daily_clouds=False,
            double_digits=False,
            mesh=False,
            daily_bt=False):

    used_ids = []

    runtime = datetimes[date_i] - datetimes[0]
    # print '\n' + datestrings[date_i] + '\n'
    totaltest = datetime.datetime.now()

    found_file = True

    if daily_bt == False:
        if os.path.isfile('/ouce-home/data/satellite/meteosat/seviri/15-min/'
                          '0.03x0.03/bt'
                          '/nc/' + datetimes[date_i].strftime("%B").upper() +
                          str(datetimes[date_i].year) + '/H-000-MSG2__'
                          '-MSG2________-'
                          'IR_BrightnessTemperatures___'
                          '-000005___-' + datestrings[date_i] + '-__.nc'):
            bt = Dataset('/ouce-home/data/satellite/meteosat/seviri/15-min/'
                         '0.03x0.03/bt'
                         '/nc/' + datetimes[date_i].strftime("%B").upper() +
                         str(datetimes[date_i].year) + '/H-000-MSG2__'
                         '-MSG2________-'
                         'IR_BrightnessTemperatures___'
                         '-000005___-' + datestrings[date_i] + '-__.nc')
            found_file = True
        elif os.path.isfile('/ouce-home/data/satellite/meteosat/seviri/15-min/'
                            '0.03x0.03/bt'
                            '/nc/' + datetimes[date_i].strftime("%B").upper() +
                            str(datetimes[date_i].year) + '/H-000-MSG1__'
                            '-MSG1________-'
                            'IR_BrightnessTemperatures___'
                            '-000005___-' + datestrings[date_i] + '-__.nc'):
            bt = Dataset('/ouce-home/data/satellite/meteosat/seviri/15-min/'
                         '0.03x0.03/bt'
                         '/nc/' + datetimes[date_i].strftime("%B").upper() +
                         str(datetimes[date_i].year) + '/H-000-MSG1__'
                         '-MSG1________-'
                         'IR_BrightnessTemperatures___'
                         '-000005___-' + datestrings[date_i] + '-__.nc')
            found_file = True
        else:
            found_file = False

    if daily_clouds:
        try:
            cloudmask = Dataset('/soge-home/projects/seviri_dust/raw_seviri_'
                                'data/cloudmask_nc/' +
                                datetimes[date_i].strftime("%B%Y") +
                                '/cloudmask_' +
                                datetimes[date_i].strftime("%Y%m%d") + '.nc')
            cloudmask_times = num2date(cloudmask.variables['time'][:],
                                       cloudmask.variables['time'].units)
            cloudmask_times = np.asarray([
                datetime.datetime(j.year, j.month, j.day, j.hour, j.minute)
                for j in cloudmask_times
            ])

            cloudmask_bool = cloudmask_times == datetimes[date_i]
            clouds_now = cloudmask.variables['cloud_mask'][cloudmask_bool][0]
            found_file = True
        except:
            print 'Found no cloud mask file!'
            clouds_now = np.zeros(cloud_lons.shape)
            found_file = False

        sdf_root = '/soge-home/projects/seviri_dust/sdf/' \
                   + datetimes[date_i].strftime('%B') \
                   + str(datetimes[date_i].year) + '/'

        if os.path.isfile(sdf_root + 'SDF_v2.' + \
                                  datestrings[date_i] + '.nc'):

            sdf = Dataset(
                sdf_root + 'SDF_v2.' + \
                datestrings[date_i] + '.nc')
            found_file = True
            # print sdf
            sdf_now = sdf.variables['SDF'][:]
        else:
            print 'No SDF file found for this date'
            found_file = False

    else:
        try:
            cloudmask = Dataset(
                '/soge-home/data/satellite/meteosat/seviri/15-min/'
                '0.03x0.03/cloudmask'
                '/nc/' + datetimes[date_i].strftime("%B").upper() +
                str(datetimes[date_i].year) + '_CLOUDS/eumetsat.cloud.' +
                datestrings[date_i] + '.nc')
            clouds_now = cloudmask.variables['cmask'][:][0]
            cloud_lons = cloudmask.variables['lon'][:]
            cloud_lats = cloudmask.variables['lat'][:]
        except:
            clouds_now = np.zeros(cloud_lons.shape)
            found_file = False
            print 'Found no cloud mask file!'

        sdf_root = '/soge-home/data_not_backed_up/satellite/meteosat' \
                   '/seviri/15' \
                   '-min/0.03x0.03/sdf/nc/' + datetimes[date_i].strftime(
            '%B').upper() + str(datetimes[date_i].year) + '/SDF_v2/'
        if os.path.isfile(sdf_root + 'SDF_v2.' + \
                                  datestrings[date_i] + '.nc'):

            sdf = Dataset(
                sdf_root + 'SDF_v2.' + \
                datestrings[date_i] + '.nc')
            found_file = True
            # print sdf
            if 'time' in sdf.variables:
                sdf_now = sdf.variables['bt108'][0]
            else:
                sdf_now = sdf.variables['bt108'][:]
        else:
            print 'No SDF file found for this date'
            found_file = False

    if found_file:

        if daily_bt == False:
            bt087 = bt.variables['bt087'][:][0]
            bt108 = bt.variables['bt108'][:][0]
            bt12 = bt.variables['bt120'][:][0]
            orig_lons = bt.variables['longitude'][:]
            orig_lats = bt.variables['latitude'][:]
            orig_lons, orig_lats = np.meshgrid(orig_lons, orig_lats)
        else:
            orig_lons = lons
            orig_lats = lats

        # print bt12.shape
        # print clouds_now.shape

        window_datetime_lower = datetime.datetime(datetimes[0].year,
                                                  6,
                                                  1,
                                                  0,
                                                  45) \
                                - datetime.timedelta(days=7)
        window_datetime_upper = datetime.datetime(datetimes[-1].year,
                                                  8,
                                                  31,
                                                  23,
                                                  45) \
                                + datetime.timedelta(days=7)

        BT_15_day_lower_bound = datetimes[date_i] - datetime.timedelta(days=7)
        BT_15_day_upper_bound = datetimes[date_i] + datetime.timedelta(days=7)

        # Get datetime objects between the above bounds
        time_params_7dayw = np.array([
            window_datetime_lower.year, window_datetime_upper.year,
            window_datetime_lower.month, window_datetime_upper.month,
            window_datetime_lower.day, window_datetime_upper.day,
            datetimes[date_i].hour, datetimes[date_i].hour,
            datetimes[date_i].minute, datetimes[date_i].minute
        ])
        datetimes_7dayw = utilities.get_daily_datetime_objects(
            time_params_7dayw)

        indices = np.arange(0, len(datetimes_7dayw))
        lower_ind = datetimes_7dayw == BT_15_day_lower_bound
        lower_ind = indices[lower_ind][0]
        upper_ind = datetimes_7dayw == BT_15_day_upper_bound
        upper_ind = indices[upper_ind][0]
        current_ind = datetimes_7dayw == datetimes[date_i]
        current_ind = indices[current_ind][0]

        if double_digits:
            f = tables.open_file(
                '/soge-home/projects/seviri_dust/sdf/intermediary_files'
                '/bt_15d_' + datetimes[date_i].strftime('%Y_%H_%M') + '.hdf')
            BT_15_days = f.root.data[lower_ind:upper_ind]
            bt_data = f.root.data[current_ind]
            f.close()
        else:
            f = tables.open_file(
                '/soge-home/projects/seviri_dust/sdf/intermediary_files'
                '/bt_15d_' + str(datetimes[date_i].year) + '_' +
                str(datetimes[date_i].hour) + '_' +
                str(datetimes[date_i].minute) + '.hdf')
            BT_15_days = f.root.data[lower_ind:upper_ind]
            bt_data = f.root.data[current_ind]
            f.close()

        if daily_bt:
            bt087 = bt_data[0]
            bt108 = bt_data[1]
            bt12 = bt_data[2]

        bt_15day_087 = BT_15_days[:, 0]
        bt_15day_108 = BT_15_days[:, 1]
        bt_15day_120 = BT_15_days[:, 2]

        bt_15day_087_mean = np.nanmean(bt_15day_087, axis=0)
        bt_15day_108_mean = np.nanmean(bt_15day_108, axis=0)
        bt_15day_120_mean = np.nanmean(bt_15day_120, axis=0)

        if mesh:
            cloud_lons, cloud_lats = np.meshgrid(cloud_lons, cloud_lats)

        clouds_now_regridded = pinkdust.regrid_data(cloud_lons,
                                                    cloud_lats,
                                                    orig_lons,
                                                    orig_lats,
                                                    clouds_now,
                                                    mesh=True)

        if daily_clouds:
            clouds_now_regridded = clouds_now_regridded > 1

        if show_all_indicators:

            bt_108_087 = bt108 - bt087
            bt_120_108 = bt12 - bt108
            bt_120_087 = bt12 - bt087

            bt_108_087_mean = bt_15day_108_mean - bt_15day_087_mean
            bt_120_108_mean = bt_15day_120_mean - bt_15day_108_mean
            bt_120_087_mean = bt_15day_120_mean - bt_15day_087_mean

            bt_108_087_anom = bt_108_087 - bt_108_087_mean
            bt_120_108_anom = bt_120_108 - bt_120_108_mean
            bt_120_087_anom = bt_120_087 - bt_120_087_mean

            bt_087_anom = bt087 - bt_15day_087_mean
            bt_108_anom = bt108 - bt_15day_108_mean
            bt_120_anom = bt12 - bt_15day_120_mean

            lat_grad, lon_grad = np.gradient(bt_108_087_anom)
            total_grad = np.sqrt(lat_grad**2 + lon_grad**2)
            grad_108_087_anom = deepcopy(total_grad)

            lat_grad, lon_grad = np.gradient(bt_120_108_anom)
            total_grad = np.sqrt(lat_grad**2 + lon_grad**2)
            grad_120_108_anom = deepcopy(total_grad)

            lat_grad, lon_grad = np.gradient(bt_120_087_anom)
            total_grad = np.sqrt(lat_grad**2 + lon_grad**2)
            grad_120_087_anom = deepcopy(total_grad)

            lat_grad, lon_grad = np.gradient(bt_087_anom)
            total_grad = np.sqrt(lat_grad**2 + lon_grad**2)
            grad_087_anom = deepcopy(total_grad)

            lat_grad, lon_grad = np.gradient(bt_108_anom)
            total_grad = np.sqrt(lat_grad**2 + lon_grad**2)
            grad_108_anom = deepcopy(total_grad)

            lat_grad, lon_grad = np.gradient(bt_120_anom)
            total_grad = np.sqrt(lat_grad**2 + lon_grad**2)
            grad_120_anom = deepcopy(total_grad)

            bt_108_087_anom_m = deepcopy(bt_108_087_anom)
            bt_120_087_anom_m = deepcopy(bt_120_087_anom)
            bt_120_108_anom_m = deepcopy(bt_120_108_anom)

            clouds_now_regridded[sdf_now == 1] = 0

            bt_108_087_anom_m[clouds_now_regridded == 1] = np.nan
            bt_120_087_anom_m[clouds_now_regridded == 1] = np.nan
            bt_120_108_anom_m[clouds_now_regridded == 1] = np.nan

        return bt087, bt108, bt12, bt_108_087_anom, bt_120_087_anom, \
               bt_120_108_anom, grad_087_anom, grad_108_anom, grad_120_anom,\
               grad_108_087_anom, grad_120_108_anom, grad_120_087_anom, \
               bt_108_087_anom_m, bt_120_087_anom_m, bt_120_108_anom_m, \
               orig_lons, orig_lats, clouds_now_regridded, sdf_now
Пример #11
0
def detect_cpo(btdiff_2_anom_prev, btdiff_2_anom_prev_2,
               btdiff_2_anom_prev_3, datetimes, datestrings, date_i):

    # Get lats and lons
    sdf_test = Dataset(
        '/soge-home/data_not_backed_up/satellite/meteosat/seviri'
        '/15-min/0.03x0.03/sdf/nc/JUNE2010/SDF_v2/SDF_v2.'
        '201006031500.nc')

    lons, lats = np.meshgrid(sdf_test.variables['longitude'][:],
                             sdf_test.variables['latitude'][:])

    # Get cloud lats and lons
    cloud_test = Dataset(
            '/soge-home/data/satellite/meteosat/seviri/15-min/'
            '0.03x0.03/cloudmask'
            '/nc/'
            +
            'JUNE2010_CLOUDS/eumetsat.cloud.'
            + '201006031500.nc')
    cloud_lons = cloud_test.variables['lon'][:]
    cloud_lats = cloud_test.variables['lat'][:]
    cloud_lons, cloud_lats = np.meshgrid(cloud_lons, cloud_lats)
    lonmask = lons > 360
    latmask = lats > 90
    lons = np.ma.array(lons, mask=lonmask)
    lats = np.ma.array(lats, mask=latmask)
    used_ids = []

    runtime = datetimes[date_i] - datetimes[0]
    #print '\n' + datestrings[date_i] + '\n'
    totaltest = datetime.datetime.now()

    found_file = True

    if os.path.isfile('/ouce-home/data/satellite/meteosat/seviri/15-min/'
        '0.03x0.03/bt'
        '/nc/'
        +
        datetimes[date_i].strftime("%B").upper(
        ) + str(datetimes[date_i].year) + '/H-000-MSG2__'
                                          '-MSG2________-'
                                          'IR_BrightnessTemperatures___'
                                          '-000005___-'
        + datestrings[date_i] +
        '-__.nc'):
        bt = Dataset(
            '/ouce-home/data/satellite/meteosat/seviri/15-min/'
            '0.03x0.03/bt'
            '/nc/'
            +
            datetimes[date_i].strftime("%B").upper(
            ) + str(datetimes[date_i].year) + '/H-000-MSG2__'
                                              '-MSG2________-'
                                              'IR_BrightnessTemperatures___'
                                              '-000005___-'
            + datestrings[date_i] +
            '-__.nc')
    elif os.path.isfile('/ouce-home/data/satellite/meteosat/seviri/15-min/'
        '0.03x0.03/bt'
        '/nc/'
        +
        datetimes[date_i].strftime("%B").upper(
        ) + str(datetimes[date_i].year) + '/H-000-MSG1__'
                                          '-MSG1________-'
                                          'IR_BrightnessTemperatures___'
                                          '-000005___-'
        + datestrings[date_i] +
        '-__.nc'):
        bt = Dataset(
            '/ouce-home/data/satellite/meteosat/seviri/15-min/'
            '0.03x0.03/bt'
            '/nc/'
            +
            datetimes[date_i].strftime("%B").upper(
            ) + str(datetimes[date_i].year) + '/H-000-MSG1__'
                                              '-MSG1________-'
                                              'IR_BrightnessTemperatures___'
                                              '-000005___-'
            + datestrings[date_i] +
            '-__.nc')

    try:
        cloudmask = Dataset(
            '/soge-home/data/satellite/meteosat/seviri/15-min/'
            '0.03x0.03/cloudmask'
            '/nc/'
            +
            datetimes[date_i].strftime("%B").upper(
            ) + str(datetimes[date_i].year) + '_CLOUDS/eumetsat.cloud.'
            + datestrings[date_i] + '.nc')
        clouds_now = cloudmask.variables['cmask'][:][0]
        cloud_lons = cloudmask.variables['lon'][:]
        cloud_lats = cloudmask.variables['lat'][:]
    except:
        clouds_now = np.zeros(cloud_lons.shape)
        found_file = False

    if found_file:
        # Produce 12-10.8 imagery
        bt087 = bt.variables['bt087'][:][0]
        bt12 = bt.variables['bt120'][:][0]
        bt108 = bt.variables['bt108'][:][0]

        orig_lons = bt.variables['longitude'][:]
        orig_lats = bt.variables['latitude'][:]

        # print bt12.shape
        # print clouds_now.shape

        f = tables.open_file(
            '/soge-home/projects/seviri_dust/sdf/intermediary_files/bt_15d_' +
            str(datetimes[date_i].year) + '_' + str(
                datetimes[date_i].month) + '_' +
            str(datetimes[date_i].hour) + '_' + str(
                datetimes[date_i].minute) + '.hdf')
        arrobj = f.get_node('/data')
        bt_15day = arrobj.read()
        f.close()

        window_datetime_lower = datetime.datetime(datetimes[0].year,
                                                  datetimes[0].month,
                                                  datetimes[0].day,
                                                  datetimes[0].hour,
                                                  datetimes[0].minute) \
                                - datetime.timedelta(days=7)
        window_datetime_upper = datetime.datetime(datetimes[-1].year,
                                                  datetimes[-1].month,
                                                  datetimes[-1].day,
                                                  datetimes[-1].hour,
                                                  datetimes[-1].minute) \
                                + datetime.timedelta(days=7)

        BT_15_day_lower_bound = datetimes[date_i] - datetime.timedelta(
            days=7)
        BT_15_day_upper_bound = datetimes[date_i] + datetime.timedelta(
            days=7)

        # Get datetime objects between the above bounds
        time_params_7dayw = np.array([window_datetime_lower.year,
                                      window_datetime_upper.year,
                                      window_datetime_lower.month,
                                      window_datetime_upper.month,
                                      window_datetime_lower.day,
                                      window_datetime_upper.day,
                                      datetimes[date_i].hour,
                                      datetimes[date_i].hour,
                                      datetimes[date_i].minute,
                                      datetimes[date_i].minute])
        datetimes_7dayw = utilities.get_daily_datetime_objects(
            time_params_7dayw)

        BT_15_days = \
            bt_15day[np.asarray([j >= BT_15_day_lower_bound
                                 and j <= BT_15_day_upper_bound
                                 for j in datetimes_7dayw])]

        bt_15day_087 = BT_15_days[:, 0]
        #bt_15day_108 = BT_15_days[:, 1]
        bt_15day_120 = BT_15_days[:, 2]

        bt_15day_087_mean = np.nanmean(bt_15day_087, axis=0)
        #bt_15day_108_mean = np.nanmean(bt_15day_108, axis=0)
        bt_15day_120_mean = np.nanmean(bt_15day_120, axis=0)

        btdiff_2_15daymean = bt_15day_120_mean - bt_15day_087_mean

        orig_lons, orig_lats = np.meshgrid(orig_lons, orig_lats)

        bt087_regridded = pinkdust.regrid_data(orig_lons, orig_lats,
                                               cloud_lons,
                                               cloud_lats, bt087)

        bt12_regridded = pinkdust.regrid_data(orig_lons, orig_lats,
                                              cloud_lons,
                                              cloud_lats, bt12)

        btdiff_2_15daymean_regridded = pinkdust.regrid_data(orig_lons,
                                                            orig_lats,
                                                            cloud_lons,
                                                            cloud_lats,
                                                            btdiff_2_15daymean)

        btdiff_2 = bt12_regridded - bt087_regridded
        btdiff_2_anom = btdiff_2 - btdiff_2_15daymean_regridded

        btdiff_2_anom[clouds_now > 0] = np.nan

        if btdiff_2_anom_prev_3 != None:
            # Get the difference between this timestep and the one two
            # timesteps before
            btdiff_2_anom_diff = btdiff_2_anom - btdiff_2_anom_prev_3
            orig_btdiff_2_anom_diff = deepcopy(btdiff_2_anom_diff)
            btdiff_2_anom_diff += \
                orig_btdiff_2_anom_diff - btdiff_2_anom_prev_2
            btdiff_2_anom_diff += \
                orig_btdiff_2_anom_diff - btdiff_2_anom_prev
        else:
            btdiff_2_anom_diff = np.zeros((btdiff_2_anom.shape))
        if date_i == 0:
            btdiff_2_anom_prev = btdiff_2_anom
        elif date_i == 1:
            btdiff_2_anom_prev_2 = deepcopy(btdiff_2_anom_prev)
            btdiff_2_anom_prev = btdiff_2_anom
        elif date_i == 2:
            btdiff_2_anom_prev_3 = deepcopy(btdiff_2_anom_prev_2)
            btdiff_2_anom_prev_2 = deepcopy(btdiff_2_anom_prev)
            btdiff_2_anom_prev = deepcopy(btdiff_2_anom)
        elif date_i > 2:
            btdiff_2_anom_prev_3 = deepcopy(btdiff_2_anom_prev_2)
            btdiff_2_anom_prev_2 = deepcopy(btdiff_2_anom_prev)
            btdiff_2_anom_prev = deepcopy(btdiff_2_anom)

        lat_grad, lon_grad = np.gradient(btdiff_2_anom)
        total_grad = np.sqrt(lat_grad ** 2 + lon_grad ** 2)
        convolution = scipy.signal.convolve2d(clouds_now,
                                              np.ones((5,
                                                       5)),
                                              mode='same')
        clouds_now = convolution > 0
        total_grad[clouds_now == 1] = np.nan

        ### PASS I ###
        # In the FIRST PASS the LORD sayeth unto the image, 'Let all
        # whose BTD is below -10K be classified as CPOs, and remove the
        # tiny ones'
        # And those who fulfilled this condition were classified,
        # and it was good

        convolution = scipy.signal.convolve2d(clouds_now,
                                              np.ones((5,
                                                       5)),
                                              mode='same')
        clouds_now = convolution > 0

        btdiff_2_anom_diff_um = deepcopy(btdiff_2_anom_diff)
        btdiff_2_anom_diff[clouds_now > 0] = np.nan

        cpo_mask_pass_1 = btdiff_2_anom_diff < -7
        label_objects, nb_labels = ndi.label(cpo_mask_pass_1)
        sizes = np.bincount(label_objects.ravel())
        # Set clusters smaller than size 20 to zero
        mask_sizes = sizes > 20
        mask_sizes[0] = 0
        cpo_mask_pass_1 = mask_sizes[label_objects]

        ### PASS II ###
        # In the SECOND PASS the LORD sayeth unto the image, 'Let all
        # those included in the first pass which contain pixels which
        # are below -15K be classified'
        # And those who fulfilled this condition were classified,
        # and it was better

        cpo_mask_pass_2 = deepcopy(cpo_mask_pass_1)

        # Label the image and get all connected elements
        cpo_mask_pass_1, num = measurements.label(cpo_mask_pass_1)

        # Then loop through each labeled blob and find if one of the
        # pixels has a -15K. If it does, the whole blob is a freaking CPO.
        blob_ids = np.unique(cpo_mask_pass_1)
        blob_ids = blob_ids[blob_ids != 0]

        for i in np.arange(0, len(blob_ids)):
            target_region = cpo_mask_pass_1 == blob_ids[i]
            # First check if this region is within the original
            # time-gradient identified region (i.e. not one introduced
            # with the new generous gradient checker)
            if np.any(btdiff_2_anom_diff[target_region == 1] < -15):
                # Next check if there is a generous gradient contained
                # within this region somewhere
                pass
            else:
                cpo_mask_pass_2[target_region == 1] = 0

        # For identified CPO regions, undo the convolution on the cloud
        # mask

        cpo_mask_um = btdiff_2_anom_diff_um < -7
        # Label the image and get all connected elements
        cpo_mask_um, num = measurements.label(cpo_mask_um)

        # Then loop through each labeled blob and find if one of the
        # pixels has a -15K. If it does, the whole blob is a freaking CPO.
        blob_ids = np.unique(cpo_mask_um)
        blob_ids = blob_ids[blob_ids != 0]

        for i in np.arange(0, len(blob_ids)):
            target_region = cpo_mask_um == blob_ids[i]
            if np.any(cpo_mask_pass_2[target_region == 1] == 1):
                cpo_mask_pass_2[target_region == 1] = 1

        return cpo_mask_pass_2, btdiff_2_anom_prev, btdiff_2_anom_prev_2,\
               btdiff_2_anom_prev_3