Exemplo n.º 1
0
def get_cloud(lefttoplon, lefttoplat, rightbtmlon, rightbtmlat, time):
    """
    Return cloud masks given a bounding box and time
    """
    if abs(lefttoplon) > 180 or abs(rightbtmlon) > 180:
        print("wrong longitude")
        return None
    if abs(lefttoplat) > 90 or abs(rightbtmlat) > 90:
        print("wrong latitude")
        return None

    bands_script = 'return [B01,B02,B04,B05,B08,B8A,B09,B10,B11,B12]'
    desired_coords_wgs84 = [lefttoplon, lefttoplat, rightbtmlon, rightbtmlat]
    desired_bbox = BBox(bbox=desired_coords_wgs84, crs=CRS.WGS84)

    wms_bands_request = WmsRequest(
        layer='TRUE_COLOR',
        custom_url_params={CustomUrlParam.EVALSCRIPT: bands_script},
        bbox=desired_bbox,
        time=time,
        width=100,
        height=100,
        image_format=MimeType.TIFF_d32f,
        instance_id=INSTANCE_ID)

    all_cloud_masks = CloudMaskRequest(ogc_request=wms_bands_request,
                                       threshold=0.4)
    cloud_dates = all_cloud_masks.get_dates()
    cloud_masks = all_cloud_masks.get_cloud_masks(threshold=0.4)

    return cloud_masks, cloud_dates
Exemplo n.º 2
0
def test_cloud_mask_request(input_params, stats, config, subtests):
    """ Integration tests for CloudMasKRequest class that interacts with Sentinel Hub service
    """
    request = CloudMaskRequest(config=config, **input_params)

    masks = request.get_cloud_masks()
    _test_numpy_data(subtests,
                     masks,
                     exp_shape=stats['mask_shape'],
                     exp_dtype=np.int8,
                     exp_min=stats['clm_min'],
                     exp_max=stats['clm_max'],
                     exp_mean=stats['clm_mean'],
                     exp_median=stats['clm_median'],
                     delta=1e-4)

    prob_masks = request.get_probability_masks(non_valid_value=-50)
    _test_numpy_data(subtests,
                     prob_masks,
                     exp_shape=stats['mask_shape'],
                     exp_dtype=np.float64,
                     exp_min=stats['clp_min'],
                     exp_max=stats['clp_max'],
                     exp_mean=stats['clp_mean'],
                     exp_median=stats['clp_median'],
                     delta=1e-4)

    timestamps = request.get_timestamps()
    assert isinstance(timestamps, list)
    assert len(timestamps) == stats['mask_shape'][0]
    assert all(isinstance(timestamp, dt.datetime) for timestamp in timestamps)

    data = request.get_data()
    band_num = 13 if request.cloud_detector.all_bands else 10
    assert data.shape == stats['mask_shape'] + (band_num, )
    assert data.dtype == np.float32

    data_mask = request.get_data_mask()
    assert data_mask.shape == stats['mask_shape']
    assert data_mask.dtype == bool
Exemplo n.º 3
0
#Mostrar las probabilidades de nubes para cada imagen por fecha en el rango de analisis
fig = plt.figure(figsize=(15, 10))
n_cols = 4
n_rows = int(np.ceil(len(wms_true_color_imgs) / n_cols))
for idx, [prob, mask, data] in enumerate(all_cloud_masks):
    ax = fig.add_subplot(n_rows, n_cols, idx + 1)
    image = wms_true_color_imgs[idx]
    Cloudless_tools.overlay_cloud_mask(image, mask, factor=1, fig=fig)
plt.tight_layout()
plt.savefig('output_clouds/' + analysis_area + '/real_and_cloud.png')
#Mostrar las mascaras de nubes para cada imagen por fecha en el rango de analisis
fig = plt.figure(figsize=(15, 10))
n_cols = 4
n_rows = int(np.ceil(len(wms_true_color_imgs) / n_cols))
for idx, cloud_mask in enumerate(
        all_cloud_masks.get_cloud_masks(
            threshold=0.35)):  #se repite con linea 101
    ax = fig.add_subplot(n_rows, n_cols, idx + 1)
    Cloudless_tools.plot_cloud_mask(cloud_mask, fig=fig)
plt.tight_layout()
plt.savefig('output_clouds/' + analysis_area + '/cloud_masks.png')
#Calculo y extracción de imagenes con cobertura de nubes menor a x%
cld_per_idx = []
each_cld_mask = all_cloud_masks.get_cloud_masks(
    threshold=0.35)  #se repite con linea 94
for a in range(0, len(each_cld_mask)):
    n_cloud_mask = np.shape(np.concatenate(each_cld_mask[a]))
    cloud_perc = sum(np.concatenate(each_cld_mask[a]) == 1) / n_cloud_mask
    cld_per_idx.append(cloud_perc)
x = pd.DataFrame(cld_per_idx) < 0.6  #Menor a 60% de cobertura de nubes
valid_dates = pd.DataFrame(all_cloud_masks.get_dates())[x[0]]
print("[INFO] valid dates ... {:f})".format(valid_dates))
def extract_surface_water_area_per_frame(dam_id, dam_poly, dam_bbox, date,
                                         resx, resy):
    """
    Run water detection algorithm for a single timestamp.
    """
    measurement = get_new_measurement_entry(dam_id, date,
                                            WaterDetectionSensor.S2_NDWI,
                                            S2_WATER_DETECTOR_VERSION)

    # initialise requests
    try:
        wcs_ndwi_request = WcsRequest(layer='NDWI',
                                      bbox=dam_bbox,
                                      time=date.strftime('%Y-%m-%d'),
                                      maxcc=S2_MAX_CC,
                                      resx=f'{resx}m',
                                      resy=f'{resy}m',
                                      image_format=MimeType.TIFF_d32f,
                                      time_difference=timedelta(hours=2),
                                      custom_url_params={
                                          CustomUrlParam.SHOWLOGO: False,
                                          CustomUrlParam.TRANSPARENT: True
                                      })

        cloudresx, cloudresy = get_optimal_cloud_resolution(resx, resy)
        wcs_bands_request = WcsRequest(layer='NDWI',
                                       bbox=dam_bbox,
                                       time=date.strftime('%Y-%m-%d'),
                                       maxcc=S2_MAX_CC,
                                       resx=f'{cloudresx}m',
                                       resy=f'{cloudresy}m',
                                       image_format=MimeType.TIFF_d32f,
                                       time_difference=timedelta(hours=2),
                                       custom_url_params={
                                           CustomUrlParam.EVALSCRIPT:
                                           S2_CLOUD_BANDS_SCRIPT_V3
                                       })

    except (RuntimeError, DownloadFailedException):
        set_measurement_status(measurement,
                               WaterDetectionStatus.SH_REQUEST_ERROR)
        return measurement

    # download NDWI
    try:
        ndwi = np.asarray(wcs_ndwi_request.get_data())
    except (DownloadFailedException, ImageDecodingError):
        set_measurement_status(measurement,
                               WaterDetectionStatus.SH_REQUEST_ERROR)
        return measurement

    if len(ndwi) == 0:
        set_measurement_status(measurement, WaterDetectionStatus.SH_NO_DATA)
        return measurement

    # check that image has no INVALID PIXELS
    valid_pxs_frac = np.count_nonzero(ndwi[..., 1]) / np.size(ndwi[..., 1])
    if valid_pxs_frac < S2_MIN_VALID_FRACTION:
        del ndwi
        set_measurement_status(measurement, WaterDetectionStatus.INVALID_DATA)
        return measurement

    # run cloud detection
    try:
        all_cloud_masks = CloudMaskRequest(ogc_request=wcs_bands_request,
                                           threshold=0.4)
        cloud_mask = all_cloud_masks.get_cloud_masks()
    except (DownloadFailedException, ImageDecodingError):
        set_measurement_status(measurement,
                               WaterDetectionStatus.SH_REQUEST_ERROR)
        return measurement

    if len(ndwi) == 0:
        set_measurement_status(measurement,
                               WaterDetectionStatus.SH_NO_CLOUD_DATA)
        return measurement

    # check cloud coverage
    cloud_cov = np.count_nonzero(cloud_mask) / np.size(cloud_mask)
    if cloud_cov > S2_MAX_CLOUD_COVERAGE:
        del cloud_mask, all_cloud_masks
        set_measurement_status(measurement, WaterDetectionStatus.TOO_CLOUDY)
        return measurement

    measurement.CLOUD_COVERAGE = cloud_cov
    try:
        # run water detction algorithm
        result = get_water_level_optical(date,
                                         ndwi[0, ..., 0],
                                         dam_poly,
                                         dam_bbox,
                                         simplify=True)

        set_measurement_status(measurement,
                               WaterDetectionStatus.MEASUREMENT_VALID)
        measurement.SURF_WATER_LEVEL = result['water_level']
        measurement.GEOMETRY = result['geometry'].wkt
        measurement.ALG_STATUS = result['alg_status']

        del result
    except AttributeError:
        set_measurement_status(measurement,
                               WaterDetectionStatus.INVALID_POLYGON)

    del ndwi, cloud_mask, all_cloud_masks, wcs_ndwi_request, wcs_bands_request

    return measurement
Exemplo n.º 5
0
cloud_masks = cloud_detector.get_cloud_masks(np.array(wms_bands))
image_idx = 0
overlay_cloud_mask(wms_true_color_imgs[image_idx], cloud_masks[image_idx])
plot_probability_map(wms_true_color_imgs[image_idx], cloud_probs[image_idx])
plot_cloud_mask(cloud_masks[image_idx])

all_cloud_masks = CloudMaskRequest(ogc_request=wms_bands_request,
                                   threshold=0.1)
fig = plt.figure(figsize=(15, 10))
n_cols = 4
n_rows = int(np.ceil(len(wms_true_color_imgs) / n_cols))

for idx, [prob, mask, data] in enumerate(all_cloud_masks):
    ax = fig.add_subplot(n_rows, n_cols, idx + 1)
    image = wms_true_color_imgs[idx]
    overlay_cloud_mask(image, mask, factor=1, fig=fig)

plt.tight_layout()

all_cloud_masks.get_dates()

fig = plt.figure(figsize=(15, 10))
n_cols = 4
n_rows = int(np.ceil(len(wms_true_color_imgs) / n_cols))

for idx, cloud_mask in enumerate(
        all_cloud_masks.get_cloud_masks(threshold=0.7)):
    ax = fig.add_subplot(n_rows, n_cols, idx + 1)
    plot_cloud_mask(cloud_mask, fig=fig)

plt.tight_layout()
Exemplo n.º 6
0
class SentinelHubTimelapse(object):
    """
    Class for creating timelapses with Sentinel-2 images using Sentinel Hub's Python library.
    """

    def __init__(self, project_name, bbox, time_interval, instance_id, full_size=(1920, 1080), preview_size=(455, 256),
                 cloud_mask_res=('60m', '60m'), use_atmcor=True, layer='TRUE_COLOR',
                 time_difference=datetime.timedelta(seconds=-1)):

        self.project_name = project_name
        self.preview_request = WmsRequest(data_folder=project_name + '/previews', layer=layer, bbox=bbox,
                                          time=time_interval, width=preview_size[0], height=preview_size[1],
                                          maxcc=1.0, image_format=MimeType.PNG, instance_id=instance_id,
                                          custom_url_params={CustomUrlParam.TRANSPARENT: True},
                                          time_difference=time_difference)

        self.fullres_request = WcsRequest(data_folder=project_name + '/fullres', layer=layer, bbox=bbox,
                                          time=time_interval, resx='10m', resy='10m',
                                          maxcc=1.0, image_format=MimeType.PNG, instance_id=instance_id,
                                          custom_url_params={CustomUrlParam.TRANSPARENT: True,
                                              CustomUrlParam.ATMFILTER: 'ATMCOR'} if use_atmcor else {CustomUrlParam.TRANSPARENT: True},
                                          time_difference=time_difference)

        wcs_request = WcsRequest(layer=layer, bbox=bbox, time=time_interval,
                                 resx=cloud_mask_res[0], resy=cloud_mask_res[1], maxcc=1.0,
                                 image_format=MimeType.TIFF_d32f, instance_id=instance_id,
                                 time_difference=time_difference, custom_url_params={CustomUrlParam.EVALSCRIPT:
                                                                                     MODEL_EVALSCRIPT})

        self.cloud_mask_request = CloudMaskRequest(wcs_request)

        self.transparency_data = None
        self.preview_transparency_data = None
        self.invalid_coverage = None

        self.dates = self.preview_request.get_dates()
        if not self.dates:
            raise ValueError('Input parameters are not valid. No Sentinel 2 image is found.')

        if self.dates != self.fullres_request.get_dates():
            raise ValueError('Lists of previews and full resolution images do not match.')

        if self.dates != self.cloud_mask_request.get_dates():
            raise ValueError('List of previews and cloud masks do not match.')

        self.mask = np.zeros((len(self.dates),), dtype=np.uint8)
        self.cloud_masks = None
        self.cloud_coverage = None

        self.full_res_data = None
        self.previews = None
        self.full_size = full_size
        self.timelapse = None

        LOGGER.info('Found %d images of %s between %s and %s.', len(self.dates), project_name,
                    time_interval[0], time_interval[1])

        LOGGER.info('\nI suggest you start by downloading previews first to see,\n'
                    'if BBOX is OK, images are usefull, etc...\n'
                    'Execute get_previews() method on your object.\n')

    def get_previews(self, redownload=False):
        """
        Downloads and returns an numpy array of previews if previews were not already downloaded and saved to disk.
        Set `redownload` to True if to force downloading the previews again.
        """

        self.previews = np.asarray(self.preview_request.get_data(save_data=True, redownload=redownload))
        self.preview_transparency_data = self.previews[:,:,:,-1]

        LOGGER.info('%d previews have been downloaded and stored to numpy array of shape %s.', self.previews.shape[0],
                    self.previews.shape)

    def save_fullres_images(self, redownload=False):
        """
        Downloads and saves fullres images used to produce the timelapse. Note that images for all available dates
        within the specified time interval are downloaded, although they will be for example masked due to too high
        cloud coverage.
        """
        
        data4d = np.asarray(self.fullres_request.get_data(save_data=True, redownload=redownload))
        self.full_res_data = data4d[:,:,:,:-1]
        self.transparency_data = data4d[:,:,:,-1]

    def plot_preview(self, within_range=None, filename=None):
        """
        Plots all previews if within_range is None, or only previews in a given range.
        """
        within_range = CommonUtil.get_within_range(within_range, self.previews.shape[0])
        self._plot_image(self.previews[within_range[0]: within_range[1]] / 255., factor=1, filename=filename)

    def plot_cloud_masks(self, within_range=None, filename=None):
        """
        Plots all cloud masks if within_range is None, or only masks in a given range.
        """
        within_range = CommonUtil.get_within_range(within_range, self.cloud_masks.shape[0])
        self._plot_image(self.cloud_masks[within_range[0]: within_range[1]],
                         factor=1, cmap=plt.cm.binary, filename=filename)

    def _plot_image(self, data, factor=2.5, cmap=None, filename=None):
        rows = data.shape[0] // 5 + (1 if data.shape[0] % 5 else 0)
        aspect_ratio = (1.0 * data.shape[1]) / data.shape[2]
        fig, axs = plt.subplots(nrows=rows, ncols=5, figsize=(15, 3 * rows * aspect_ratio))
        for index, ax in enumerate(axs.flatten()):
            if index < data.shape[0] and index < len(self.dates):
                caption = str(index) + ': ' + self.dates[index].strftime('%Y-%m-%d')
                if self.cloud_coverage is not None:
                    caption = caption + '(' + "{0:2.0f}".format(self.cloud_coverage[index] * 100.0) + '%)'

                ax.set_axis_off()
                ax.imshow(data[index] * factor if data[index].shape[-1] == 3 or data[index].shape[-1] == 4 else
                          data[index] * factor, cmap=cmap, vmin=0.0, vmax=1.0)
                ax.text(0, -2, caption, fontsize=12, color='r' if self.mask[index] else 'g')
            else:
                ax.set_axis_off()

        if filename:
            plt.savefig(self.project_name + '/' + filename, bbox_inches='tight')

    def _load_cloud_masks(self):
        """
        Loads masks from disk, if they already exist.
        """
        cloud_masks_filename = self.project_name + '/cloudmasks/cloudmasks.npy'

        if not os.path.isfile(cloud_masks_filename):
            return False

        with open(cloud_masks_filename, 'rb') as fp:
            self.cloud_masks = np.load(fp)
        return True

    def _save_cloud_masks(self):
        """
        Saves masks to disk.
        """
        cloud_masks_filename = self.project_name + '/cloudmasks/cloudmasks.npy'

        if not os.path.exists(self.project_name + '/cloudmasks'):
            os.makedirs(self.project_name + '/cloudmasks')

        with open(cloud_masks_filename, 'wb') as fp:
            np.save(fp, self.cloud_masks)

    def _run_cloud_detection(self, rerun, threshold):
        """
        Determines cloud masks for each acquisition.
        """
        loaded = self._load_cloud_masks()
        if loaded and not rerun:
            LOGGER.info('Nothing to do. Masks are loaded.')
        else:
            LOGGER.info('Downloading cloud data and running cloud detection. This may take a while.')
            self.cloud_masks = self.cloud_mask_request.get_cloud_masks(threshold=threshold)
            self._save_cloud_masks()

    def mask_cloudy_images(self, rerun=False, max_cloud_coverage=0.1, threshold=None):
        """
        Marks images whose cloud coverage exceeds ``max_cloud_coverage``. Those
        won't be used in timelapse.

        :param rerun: Whether to rerun cloud detector
        :type rerun: bool
        :param max_cloud_coverage: Limit on the cloud coverage of images forming timelapse, 0 <= maxcc <= 1.
        :type max_cloud_coverage: float
        :param threshold:  A float from [0,1] specifying cloud threshold
        :type threshold: float or None
        """
        self._run_cloud_detection(rerun, threshold)

        self.cloud_coverage = np.asarray([self._get_coverage(mask) for mask in self.cloud_masks])

        for index in range(0, len(self.mask)):
            if self.cloud_coverage[index] > max_cloud_coverage:
                self.mask[index] = 1



    def mask_invalid_images(self, max_invalid_coverage=0.1):
        """
        Marks images whose invalid area coverage exceeds ``max_invalid_coverage``. Those
        won't be used in timelapse.

        :param max_invalid_coverage: Limit on the invalid area coverage of images forming timelapse, 0 <= maxic <= 1.
        :type max_invalid_coverage: float
        """

        # low-res and hi-res images/cloud masks may differ, just to be safe
        coverage_fullres = np.asarray([1.0-self._get_coverage(mask) for mask in self.transparency_data])
        coverage_preview = np.asarray([1.0-self._get_coverage(mask) for mask in self.preview_transparency_data])

        self.invalid_coverage = np.array([max(x,y) for x,y in zip(coverage_fullres, coverage_preview)])
        
        for index in range(0, len(self.mask)):
            if self.invalid_coverage[index] > max_invalid_coverage:
                self.mask[index] = 1

    def mask_images(self, idx):
        """
        Mannualy mask images with given indexes.
        """
        for index in idx:
            self.mask[index] = 1

    def unmask_images(self, idx):
        """
        Mannualy unmask images with given indexes.
        """
        for index in idx:
            self.mask[index] = 0

    def create_date_stamps(self):
        """
        Create date stamps to be included to gif.
        """
        filtered = list(compress(self.dates, list(np.logical_not(self.mask))))

        if not os.path.exists(self.project_name + '/datestamps'):
            os.makedirs(self.project_name + '/datestamps')

        for date in filtered:
            TimestampUtil.create_date_stamp(date, filtered[0], filtered[-1],
                                            self.project_name + '/datestamps/' + date.strftime(
                                                "%Y-%m-%dT%H-%M-%S") + '.png')

    def create_timelapse(self, scale_factor=0.3):
        """
        Adds date stamps to full res images and stores them in timelapse subdirectory.
        """
        filtered = list(compress(self.dates, list(np.logical_not(self.mask))))

        if not os.path.exists(self.project_name + '/timelapse'):
            os.makedirs(self.project_name + '/timelapse')

        self.timelapse = [TimestampUtil.add_date_stamp(self._get_filename('fullres', date.strftime("%Y-%m-%dT%H-%M-%S")),
                                         self.project_name + '/timelapse/' + date.strftime(
                                             "%Y-%m-%dT%H-%M-%S") + '.png',
                                         self._get_filename('datestamps', date.strftime("%Y-%m-%dT%H-%M-%S")),
                                         scale_factor=scale_factor) for date in filtered]

    @staticmethod
    def _get_coverage(mask):
        coverage_pixels = np.count_nonzero(mask)
        return 1.0 * coverage_pixels / mask.size

    @staticmethod
    def _iso_to_datetime(date):
        """ Convert ISO 8601 time format to datetime format

        This function converts a date in ISO format, e.g. 2017-09-14 to a datetime instance, e.g.
        datetime.datetime(2017,9,14,0,0)

        :param date: date in ISO 8601 format
        :type date: str
        :return: datetime instance
        :rtype: datetime
        """
        chunks = list(map(int, date.split('T')[0].split('-')))
        return datetime(chunks[0], chunks[1], chunks[2])

    @staticmethod
    def _datetime_to_iso(date, only_date=True):
        """ Convert datetime format to ISO 8601 time format

        This function converts a date in datetime instance, e.g. datetime.datetime(2017,9,14,0,0) to ISO format,
        e.g. 2017-09-14

        :param date: datetime instance to convert
        :type date: datetime
        :param only_date: whether to return date only or also time information. Default is `True`
        :type only_date: bool
        :return: date in ISO 8601 format
        :rtype: str
        """
        if only_date:
            return date.isoformat().split('T')[0]
        return date.isoformat()

    @staticmethod
    def _diff_month(start_dt, end_dt):
        return (end_dt.year - start_dt.year) * 12 + end_dt.month - start_dt.month + 1

    @staticmethod
    def _get_month_list(start_dt, end_dt):
        month_names = {1: 'J', 2: 'F', 3: 'M', 4: 'A', 5: 'M', 6: 'J', 7: 'J', 8: 'A', 9: 'S', 10: 'O', 11: 'N',
                       12: 'D'}

        total_months = SentinelHubTimelapse._diff_month(start_dt, end_dt)
        all_months = list(rrule(MONTHLY, count=total_months, dtstart=start_dt))
        return [month_names[date.month] for date in all_months]

    def _get_filename(self, subdir, date):
        for filename in glob.glob(self.project_name + '/' + subdir + '/*'):
            if date in filename:
                return filename

        return None

    def _get_timelapse_images(self):
        if self.timelapse is None:
            data = np.array(self.fullres_request.get_data())[:,:,:,:-1]
            return [data[idx] for idx, _ in enumerate(data) if self.mask[idx] == 0]
        return self.timelapse

    def make_video(self, filename='timelapse.avi', fps=2, is_color=True, n_repeat=1):
        """
        Creates and saves an AVI video from timelapse into ``timelapse.avi``
        :param fps: frames per second
        :type param: int
        :param is_color:
        :type is_color: bool
        """

        images = np.array([image[:,:,[2,1,0]] for image in self._get_timelapse_images()])

        if None in self.full_size:
            self.full_size = (int(images.shape[2]),int(images.shape[1]))

        fourcc = cv2.VideoWriter_fourcc(*"mp4v")
        video = cv2.VideoWriter(os.path.join(self.project_name, filename), fourcc, float(fps), self.full_size,
                                is_color)
                
        for _ in range(n_repeat):
            for image in images:
                video.write(image)
        video.write(images[-1])

        video.release()
        cv2.destroyAllWindows()

    def make_gif(self, filename='timelapse.gif', fps=3):
        """
        Creates and saves a GIF animation from timelapse into ``timelapse.gif``
        :param fps: frames per second
        :type fps: int
        """

        frames = []
        for image in self._get_timelapse_images():
            frames.append(Image.fromarray(image))

        frames[0].save(os.path.join(self.project_name, filename), save_all=True, append_images=frames[1:], fps=fps, loop=True, optimize=False)
Exemplo n.º 7
0
    def cloud_process(bounding_box, Date_Ini, Date_Fin, x_width, y_height,
                      analysis_area, clouds_folder, lote_aoi, municipio,
                      departamento):
        INSTANCE_ID = '3a63d637-11ad-493a-b921-91be7c4da68d'  #From Sentinel HUB Python Instance ID /change to dynamic user input
        LAYER_NAME = 'TRUE-COLOR-S2-L1C'  # e.g. TRUE-COLOR-S2-L1C
        #Obtener imagenes por fecha (dentro de rango) dentro de box de interés
        wms_true_color_request = WmsRequest(
            layer=LAYER_NAME,
            bbox=bounding_box,
            time=(Date_Ini, Date_Fin),  #cambiar a fechas de interés
            width=x_width,
            height=y_height,
            image_format=MimeType.PNG,
            time_difference=datetime.timedelta(hours=2),
            instance_id=INSTANCE_ID)
        wms_true_color_imgs = wms_true_color_request.get_data()
        #Cloudless_tools.plot_previews(np.asarray(wms_true_color_imgs), wms_true_color_request.get_dates(), cols=4, figsize=(15, 10))

        #count of 0's to know how empty is the image
        count_of_zeros = []
        for n in range(0, len(wms_true_color_imgs)):
            # zeros / 4 channels * width * height (pixels)
            count_of_zeros.append(
                (np.count_nonzero(wms_true_color_imgs[n] == 0)) /
                (4 * wms_true_color_imgs[n][:, :, 0].shape[0] *
                 wms_true_color_imgs[n][:, :, 0].shape[1]))

        #Calculo de probabilidades y obtención de mascaras de nubes
        bands_script = 'return [B01,B02,B04,B05,B08,B8A,B09,B10,B11,B12]'
        wms_bands_request = WmsRequest(
            layer=LAYER_NAME,
            custom_url_params={
                CustomUrlParam.EVALSCRIPT: bands_script,
                CustomUrlParam.ATMFILTER: 'NONE'
            },
            bbox=bounding_box,
            time=(Date_Ini, Date_Fin),
            width=x_width,
            height=y_height,
            image_format=MimeType.TIFF_d32f,
            time_difference=datetime.timedelta(hours=2),
            instance_id=INSTANCE_ID)
        wms_bands = wms_bands_request.get_data()
        #wms_bands_request.get_filename_list()
        #wms_bands_request.get_url_list()
        #wms_bands_request.get_dates()
        cloud_detector = S2PixelCloudDetector(
            threshold=0.35, average_over=8,
            dilation_size=3)  #change threshold to test
        #cloud_probs = cloud_detector.get_cloud_probability_maps(np.array(wms_bands))
        cloud_masks = cloud_detector.get_cloud_masks(np.array(wms_bands))
        all_cloud_masks = CloudMaskRequest(ogc_request=wms_bands_request,
                                           threshold=0.35)
        #cloud_masks = all_cloud_masks.get_cloud_masks()

        #Mostrar las probabilidades de nubes para cada imagen por fecha en el rango de analisis
        n_cols = 4
        n_rows = int(np.ceil(len(wms_true_color_imgs) / n_cols))
        fig = plt.figure(figsize=(n_cols * 4,
                                  n_rows * 3))  #, constrained_layout=False
        for idx, [prob, mask, data] in enumerate(all_cloud_masks):
            ax = fig.add_subplot(n_rows, n_cols, idx + 1)
            image = wms_true_color_imgs[idx]
            Cloudless_tools.overlay_cloud_mask(image, mask, factor=1, fig=fig)
        plt.tight_layout()
        plt.savefig(clouds_folder + analysis_area + '/real_and_cloud.png')

        #Mostrar las mascaras de nubes para cada imagen por fecha en el rango de analisis
        n_cols = 4
        n_rows = int(np.ceil(len(wms_true_color_imgs) / n_cols))
        fig = plt.figure(figsize=(n_cols * 4, n_rows * 3))
        #each_cld_mask = all_cloud_masks.get_cloud_masks(threshold=0.35)
        cld_per_idx = []
        for idx, cloud_mask in enumerate(
                all_cloud_masks.get_cloud_masks(threshold=0.35)):
            ax = fig.add_subplot(n_rows, n_cols, idx + 1)
            #correct mask, when no data is in the image, to mask non values
            cloud_mask[wms_true_color_imgs[idx][:, :, 0] == 0] = 1
            Cloudless_tools.plot_cloud_mask(cloud_mask, fig=fig)
            n_cloud_mask = np.shape(np.concatenate(cloud_mask))
            cloud_perc = sum(np.concatenate(cloud_mask) == 1) / n_cloud_mask
            cld_per_idx.append(cloud_perc.astype(float))
        plt.tight_layout()
        plt.savefig(clouds_folder + analysis_area + '/cloud_masks.png')

        #Calculo y extracción de imagenes con cobertura de nubes menor a x%
        x = pd.DataFrame(
            cld_per_idx
        ) < 0.6  #Menor a 60% de cobertura de nubes // or lote is visible TO ADD
        all_dates = pd.DataFrame(all_cloud_masks.get_dates())
        valid_dates = all_dates[x[0]]
        all_dates['year-month'] = all_dates[0].dt.to_period('M')
        all_dates['cld_percent'] = cld_per_idx
        all_dates['empty_percent'] = count_of_zeros
        all_dates = all_dates.rename(columns={0: 'dates'})
        #summary
        '''
        summary_clds = all_dates[['year-month','cld_percent','dates']].groupby('year-month').agg({'dates':lambda x: x.diff().mean(), 'cld_percent': ['count', lambda x: (x<0.6).sum(), lambda x: x.mean(), 'min']}) \
            .reset_index()
        '''
        def f_mi(x):
            d = []
            d.append(x['dates'].diff().mean())
            d.append(x['cld_percent'].count())
            d.append((x['cld_percent'] < 0.6).sum())
            d.append(x['cld_percent'].mean())
            d.append(x['cld_percent'].min())
            d.append(x[x['cld_percent'] < 0.6]['dates'].max())
            d.append(x[x['cld_percent'] < 0.6]['dates'].min())
            d.append(x['empty_percent'].max())
            d.append(x['empty_percent'].min())
            return pd.Series(d,
                             index=[
                                 'time_between_pass', 'count_pass',
                                 'clear_images', 'mean_cloud_cover',
                                 'min_cloud_cover', 'last_good_date',
                                 'first_good_date', 'max_empty_space',
                                 'min_empty_space'
                             ])  #

        summary_clds = all_dates.groupby('year-month').apply(f_mi)
        summary_clds['centroid_x'], summary_clds['centroid_y'], summary_clds[
            'terrain_name'], summary_clds['terrain_code'], summary_clds[
                'municipio'], summary_clds['departamento'] = lote_aoi['x'][
                    0], lote_aoi['y'][0], lote_aoi['name'][
                        0], analysis_area, municipio, departamento
        #export data
        summary_clds.to_csv(clouds_folder + analysis_area +
                            '/Analisis_nubes.csv',
                            index=True,
                            header=True)

        #filter clouds dataframe with only valid dates
        clouds_data = cloud_masks[x[0]]
        minIndex = cld_per_idx.index(min(cld_per_idx))
        best_date = valid_dates[valid_dates.index == minIndex]
        best_date = best_date.iloc[0, 0]

        #Mostrar las mascaras de nubes para cada imagen por fecha valida
        n_cols = 4
        n_rows = int(np.ceil(len(clouds_data) / n_cols))
        fig = plt.figure(figsize=(n_cols * 4, n_rows * 3))
        for idx, cloud_mask in enumerate(clouds_data):
            ax = fig.add_subplot(n_rows, n_cols, idx + 1)
            Cloudless_tools.plot_cloud_mask(cloud_mask, fig=fig)
        plt.tight_layout()
        plt.savefig(clouds_folder + analysis_area + '/cloud_masks_valid.png')

        clear_pct = len(valid_dates) / len(cld_per_idx)
        number_cld_analysis = len(cld_per_idx)
        return best_date, valid_dates, clouds_data, clear_pct, number_cld_analysis
n_rows = int(np.ceil(len(wms_true_color_imgs) / n_cols))

for idx, [prob, mask, data] in enumerate(all_cloud_masks):
    ax = fig.add_subplot(n_rows, n_cols, idx + 1)
    image = wms_true_color_imgs[idx]
    overlay_cloud_mask(image, mask, factor=1, fig=fig)
    
plt.tight_layout()

all_cloud_masks.get_dates()

fig = plt.figure(figsize=(15, 10))
n_cols = 4
n_rows = int(np.ceil(len(wms_true_color_imgs) / n_cols))

for idx, cloud_mask in enumerate(all_cloud_masks.get_cloud_masks(threshold=0.7)):
    ax = fig.add_subplot(n_rows, n_cols, idx + 1)
    plot_cloud_mask(cloud_mask, fig=fig)
    
plt.tight_layout()

fig = plt.figure(figsize=(15, 10))
n_cols = 4
n_rows = int(np.ceil(len(wms_true_color_imgs) / n_cols))

for idx, cloud_mask in enumerate(all_cloud_masks.get_cloud_masks()):
    ax = fig.add_subplot(n_rows, n_cols, idx + 1)
    plot_cloud_mask(cloud_mask, fig=fig)
    
plt.tight_layout()