Example #1
0
def get_txt_files(all_files, request=None):
    # Load txt data into rawdata folder
    if all_files:
        dt_config = get_pars_from_ini('../config/config.ini')
        url_files = get_all_names(folder=dt_config['Paths']['path_url_light'], ext='txt')
        local_files = os.listdir('../data/rawdata/')
        new_files = list(set(url_files) - set(local_files))
        get_all_files(new_files, dt_config['Paths']['path_url_light'], dt_config['Paths']['path_txt_light_out'])
    else:
        dt_config = get_pars_from_ini('../config/config.ini')
        new_files = request
        get_all_files(new_files, dt_config['Paths']['path_url_light'], dt_config['Paths']['path_txt_light_out'])
Example #2
0
def fn_make_summary(years, months, resolution):
    if resolution is 'Y':

        for year in years:
            dt_config = get_pars_from_ini('../config/config.ini')
            year_files = os.listdir(dt_config['Paths']['path_txt_light_out'])

            ls_files = [x.split('_')[1][:4] for x in year_files]
            ls_index = [i for i, j in enumerate(ls_files) if j == str(year)]

            exist = os.path.isfile('../data/summary/year/raw_light_{}.csv'.format(str(year)))

            if not exist:
                fn_make_summary_light(files=[os.listdir('../data/rawdata/')[i] for i in ls_index], outputfile='../data/summary/year/raw_lightning_{}.csv'.format(str(year)))

    elif resolution is 'M':

        for year in years:

            dt_config = get_pars_from_ini('../config/config.ini')
            year_files = os.listdir(dt_config['Paths']['path_txt_light_out'])
            ls_files = [x.split('_')[1][:6] for x in year_files]

            for m in months:
                ls_month_index = [i for i, j in enumerate(ls_files) if j == '{}{:02d}'.format(str(year), m)]
                if not ls_month_index:
                    print 'there is no data for that date'
                else:
                    exist = os.path.isfile('../data/summary/month/raw_lightning_{}_{:02d}.csv'.format(str(year), m))

                    if not exist:
                        fn_make_summary_light(files=[os.listdir('../data/rawdata/')[i] for i in ls_month_index], outputfile='../data/summary/month/raw_lightning_{}_{:02d}.csv'.format(str(year), m))

    elif resolution is 'D':

        for year in years:

            dt_config = get_pars_from_ini('../config/config.ini')
            year_files = os.listdir(dt_config['Paths']['path_txt_light_out'])
            ls_files = [x.split('_')[1][:6] for x in year_files]

            for m in months:

                ls_month_index = [i for i, j in enumerate(ls_files) if j == '{}{:02d}'.format(str(year), m)]

                for d in ls_month_index:

                    exist = os.path.isfile('../data/summary/day/raw_lightning_{}.csv'.format(os.listdir('../data/rawdata/')[d].split('_')[1]))

                    if not exist:
                        fn_make_summary_light(files=[os.listdir('../data/rawdata/')[d]], outputfile='../data/summary/day/raw_lightning_{}.csv'.format(os.listdir('../data/rawdata/')[d].split('_')[1]))
def fn_maps(df_data, loc, resolution, name):
    dt_config = get_pars_from_ini('../config/config.ini')

    if loc is 'BOG':
        ext = fn_get_shape_extent('../gis/Bog_Localidades.shp')
        pixel = dt_config['QuerySetUp']['bog_res']

    elif loc is 'COL':
        ext = fn_get_shape_extent('../gis/Colombia_Continental.shp')
        pixel = dt_config['QuerySetUp']['col_res']

    else:
        ext = None
        pixel = None

    xmin = ext['xmin'] - 0.1
    xmax = ext['xmax'] + 0.1
    ymin = ext['ymin'] - 0.1
    ymax = ext['ymax'] + 0.1

    raster_origin = (xmin - (pixel / 2), ymax + (pixel / 2))
    ncols = int((xmax - xmin) / pixel) + 1
    nrows = int((ymax - ymin) / pixel) + 1

    counts, y, x = np.histogram2d(df_data.Latitude, df_data.Longitude, bins=(nrows, ncols), range=([ymin, ymax], [xmin, xmax]))
    # density = counts / (pixel ** 2)
    density = counts / 25.

    # Dado que np.histogram2d devuelve ordenado al reves la matriz de conteo, es necesario reversar la matriz solo en las filas
    fn_array2raster(newRasterfn='../rasters/count/{}/CDT_{}_{}.tif'.format(resolution, loc, name), rasterOrigin=raster_origin, pixelWidth=pixel, pixelHeight=-pixel, array=np.flip(counts, axis=0), gdtype=gdal.GDT_Int16)
    fn_array2raster(newRasterfn='../rasters/density/{}/DDT_{}_{}.tif'.format(resolution, loc, name), rasterOrigin=raster_origin, pixelWidth=pixel, pixelHeight=-pixel, array=np.flip(density, axis=0), gdtype=gdal.GDT_Float32)
def fn_density_maps(years, months, resolution, loc):
    dt_config = get_pars_from_ini('../config/config.ini')

    if resolution is 'Y':
        for year in years:
            all_data = fn_read_summary_csv(path_file='../data/summary/year/lightning_{}.csv'.format(str(year)))
            if loc is 'BOG':
                mask_shape = '../gis/Bog_Localidades.shp'
                max_error = dt_config['LightFilters']['error_bog']
            elif loc is 'COL':
                mask_shape = '../gis/Colombia_Continental.shp'
                max_error = dt_config['LightFilters']['error_col']
            else:
                mask_shape = None
            data = fn_mask_data(raw=all_data, ext=fn_get_shape_extent(mask_shape))
            data_filter = fn_filter_light(raw=data)
            fn_maps(data_filter, loc, resolution, year)

    elif resolution is 'M':
        months = ['{:02}'.format(i) for i in months]
        comb_days = ['_'.join(x) for x in list((itertools.product(map(str, years), months)))]
        for month in comb_days:
            all_data = fn_read_summary_csv(path_file='../data/summary/month/raw_lightning_{}.csv'.format(month))
            if loc is 'BOG':
                mask_shape = '../gis/Bog_Localidades.shp'
                max_error = dt_config['LightFilters']['error_bog']
            elif loc is 'COL':
                mask_shape = '../gis/Colombia_Continental.shp'
                max_error = dt_config['LightFilters']['error_col']
            else:
                mask_shape = None
                max_error = None
            data = fn_mask_data(raw=all_data, ext=fn_get_shape_extent(mask_shape))
            data_filter = fn_filter_light(raw=data, max_amp=dt_config['LightFilters']['amperage'], max_error=max_error)
            fn_maps(data_filter, loc, resolution, month)

    elif resolution is 'D':
        days = ['{:02d}'.format(i) for i in np.arange(1, 32)]
        months = ['{:02}'.format(i) for i in months]
        comb_days = [''.join(x) for x in list((itertools.product(map(str, years), months, days)))]
        for day in comb_days:
            try:
                all_data = fn_read_summary_csv(path_file='../data/summary/day/raw_lightning_{}1200.csv'.format(day))

                if loc is 'BOG':
                    mask_shape = '../gis/Bog_Localidades.shp'
                    max_error = dt_config['LightFilters']['error_bog']
                elif loc is 'COL':
                    mask_shape = '../gis/Colombia_Continental.shp'
                    max_error = dt_config['LightFilters']['error_col']
                else:
                    mask_shape = None
                    max_error = None

                data = fn_mask_data(raw=all_data, ext=fn_get_shape_extent(mask_shape))
                data_filter = fn_filter_light(raw=data, max_amp=dt_config['LightFilters']['amperage'], max_error=max_error)
                fn_maps(data_filter, loc, resolution, day)
            except IOError:
                print 'cannot open :', day
Example #5
0
def connect_pg(login='******'):
    dt_db_login = get_pars_from_ini()
    host = dt_db_login[login]['host']
    port = dt_db_login[login]['port']
    dbname = dt_db_login[login]['dbname']
    user = dt_db_login[login]['user']
    password = dt_db_login[login]['pass']
    conn_pg = f"postgresql://{user}:{password}@{host}:{port}/{dbname}"

    return conn_pg
Example #6
0
def bart_upload(name_file):
    ftp_server = get_pars_from_ini(
        '../config/config.ini')['bart']['ftp_server']
    ftp_user = get_pars_from_ini('../config/config.ini')['bart']['ftp_user']
    ftp_key = get_pars_from_ini('../config/config.ini')['bart']['ftp_key']
    ftp_root = get_pars_from_ini('../config/config.ini')['bart']['ftp_root']

    source_file = '../results/kml/pronos/{}.kml'.format(name_file)
    target_file = '{}.kml'.format(name_file)

    try:
        s = ftplib.FTP(ftp_server, ftp_user, ftp_key)
        try:
            f = open(source_file, 'rb')
            s.cwd(ftp_root)
            s.storbinary('STOR ' + target_file, f)
            f.close()
            s.quit()
        except:
            print
            "No se ha podido encontrar el fichero " + source_file
    except:
        print
        "No se ha podido conectar al servidor " + ftp_server
Example #7
0
def gen_kml(points, other_info, icon, name, light):
    kml = simplekml.Kml(open=1)
    logo_path = get_pars_from_ini('../config/config.ini')['Paths']['logo_path']

    if light:
        # print len(points.index)
        for event in points.index:
            pnt = kml.newpoint()
            pnt.name = str(event + 1)

            pnt.description = 'Presentado: {} <br />' \
                              'Localidad: {} <br />Periodo Observado: {} <br />' \
                              'Red Linet/Keraunos Suministrado IDEAM <br /><br />' \
                              '<img src="{}" alt="picture" width="151" height="25" align="left" />' \
                              '<br /><br />'.format(points.loc[event, 'TIEMPO'], points.loc[event, 'LOCALIDAD'], other_info, logo_path)

            pnt.coords = [(points.loc[event,
                                      'LONGITUD'], points.loc[event,
                                                              'LATITUD'])]
            pnt.style.labelstyle.scale = 1
            pnt.style.iconstyle.scale = 1
            pnt.style.iconstyle.icon.href = icon

        # Save the KML
        kml.save('../results/{}.kml'.format(name))
    else:
        pnt = kml.newpoint()
        pnt.name = str('Sin Eventos')

        pnt.description = 'Novedad: {} <br />' \
                          'Red Linet/Keraunos Suministrado IDEAM <br /><br />' \
                          '<img src="{}" alt="picture" width="151" height="25" align="left" />' \
                          '<br /><br />'.format(other_info, logo_path)

        pnt.coords = [(points.loc[0, 'LONGITUD'], points.loc[0, 'LATITUD'])]
        pnt.style.labelstyle.scale = 1
        pnt.style.iconstyle.scale = 1
        pnt.style.iconstyle.icon.href = icon

        # Save the KML
        kml.save('../results/{}.kml'.format(name))
        print 'Done'
Example #8
0
def main():
    dt_config = get_pars_from_ini('../config/config.ini')
    get_all_files(ls_names=['pronosticos_bogota.db'],
                  path_src=dt_config['Paths']['bd_pronos_bogota'],
                  path_out='../data/pronos/')

    str_date = datetime.datetime.today().strftime('%Y-%m-%d')
    str_date = '2018-09-27'

    # In trial mode, the script only takes the data of jchavarro user
    # (trial's user) and makes the kml files. In operation mode,
    # the script only takes the data of the official meteorologists.
    mode = 'operation'  # or trail for evaluations

    releases_pronos = ['Mañana', 'Tarde', 'Noche', 'Madrugada']
    df_pronos = get_pronos(releases=releases_pronos,
                           str_date=str_date,
                           mode=mode)

    for j in releases_pronos:
        gen_kml(df_data=df_pronos[df_pronos['Jornada'] == j.decode('utf8')],
                forecast=j)
Example #9
0
def get_pronos(releases, str_date):
    pars = get_pars_from_ini('../config/config.ini')

    path_db = '../data/pronos/pronosticos_bogota.db'
    conn = sqlite3.connect(path_db)  # crear la conexion
    cursor = conn.cursor()

    for release in releases:

        query = """
            select *
            from pronosticos2
            where Fecha like '{}%'
            and Jornada = '{}'
            order by Zona
            """.format(str_date, release)

        results = cursor.execute(query)
        columns = [i[0] for i in results.description]
        data = results.fetchall()

        return pd.DataFrame(data, columns=columns)
Example #10
0
def main():

    dt_config = get_pars_from_ini('../config/config.ini')
    get_all_files(ls_names=['pronosticos_bogota.db'],
                  path_src=dt_config['Paths']['bd_pronos_bogota'],
                  path_out='../data/pronos/')

    # hour = datetime.datetime.now().hour
    hour = 8
    # str_date = datetime.datetime.today().strftime('%Y-%m-%d')
    str_date = '2018-05-24'

    if hour == 8:
        releases_pronos = ['Mañana', 'Tarde']

    elif hour == 18:
        releases_pronos = ['Noche', 'Madrugada']

    else:
        print('Emision de Pronostico por fuera de la hora oficial')
        sys.exit(0)

    get_pronos(releases=releases_pronos, str_date=str_date)
Example #11
0
def fn_density_maps(years, months, days, hours, resolution, loc):
    dt_config = get_pars_from_ini('../config/config.ini')

    if resolution is 'Y':
        for year in years:
            all_data = fn_read_summary_csv(path_file='../data/summary/year/raw_lightning_{}.csv'.format(str(year)))
            if loc is 'BOG':
                mask_shape = '../gis/Bog_Localidades.shp'
                max_error = dt_config['LightFilters']['error_bog']
            elif loc is 'COL':
                mask_shape = '../gis/Colombia_Continental.shp'
                max_error = dt_config['LightFilters']['error_col']
            else:
                mask_shape = None
                max_error = None

            # Check if the file exist
            exist = os.path.isfile('../results/rasters/count/{}/{}/CDT_{}_{}.tif'.format(resolution, loc, loc, str(year)))

            if not exist:
                data = fn_mask_data(raw=all_data, ext=fn_get_shape_extent(mask_shape))
                data_filter = fn_filter_light(raw=data, max_error=max_error)
                fn_maps(data_filter, loc, resolution, year)

    elif resolution is 'M':
        months = ['{:02}'.format(i) for i in months]
        combos = ['_'.join(x) for x in list((itertools.product(map(str, years), months)))]
        for month in combos:
            all_data = fn_read_summary_csv(path_file='../data/summary/month/raw_lightning_{}.csv'.format(month))
            if loc is 'BOG':
                mask_shape = '../gis/Bog_Localidades.shp'
                max_error = dt_config['LightFilters']['error_bog']
            elif loc is 'COL':
                mask_shape = '../gis/Colombia_Continental.shp'
                max_error = dt_config['LightFilters']['error_col']
            else:
                mask_shape = None
                max_error = None

            # Check if the file exist
            exist = os.path.isfile('../results/rasters/count/{}/{}/CDT_{}_{}.tif'.format(resolution, loc, loc, month))

            if not exist:
                data = fn_mask_data(raw=all_data, ext=fn_get_shape_extent(mask_shape))
                data_filter = fn_filter_light(raw=data, max_amp=dt_config['LightFilters']['amperage'], max_error=max_error)
                fn_maps(data_filter, loc, resolution, month)

    elif resolution is 'D':
        if days is None:
            days = ['{:02d}'.format(i) for i in np.arange(1, 32)]

        days = ['{:02d}'.format(i) for i in np.arange(1, 32)]
        months = ['{:02}'.format(i) for i in months]
        combos = [''.join(x) for x in list((itertools.product(map(str, years), months, days)))]
        for day in combos:
            print day
            try:
                all_data = fn_read_summary_csv(path_file='../data/summary/day/raw_lightning_{}1200.csv'.format(day))

                if loc is 'BOG':
                    mask_shape = '../gis/Bog_Localidades.shp'
                    max_error = dt_config['LightFilters']['error_bog']
                elif loc is 'COL':
                    mask_shape = '../gis/Colombia_Continental.shp'
                    max_error = dt_config['LightFilters']['error_col']
                else:
                    mask_shape = None
                    max_error = None

                # Check if the file exist
                exist = os.path.isfile('../results/rasters/count/{}/{}/CDT_{}_{}.tif'.format(resolution, loc, loc, day))

                if not exist:
                    data = fn_mask_data(raw=all_data, ext=fn_get_shape_extent(mask_shape))
                    data_filter = fn_filter_light(raw=data, max_amp=dt_config['LightFilters']['amperage'], max_error=max_error)
                    fn_maps(data_filter, loc, resolution, day)
            except IOError:
                print 'cannot open :', day

    elif resolution is 'H':
        if months is None:
            months = ['{:02d}'.format(i) for i in np.arange(1, 13)]

        if days is None:
            days = ['{:02d}'.format(i) for i in np.arange(1, 32)]

        if hours is None:
            hours = ['{:02d}'.format(i) for i in np.roll(np.arange(0, 24), shift=17)]

        combos = [''.join(x) for x in list((itertools.product(years, months, days)))]

        for day in combos:
            print day
            try:
                all_data = fn_read_summary_csv(path_file='../data/summary/day/raw_lightning_{}1200.csv'.format(day))
                all_data.index = all_data.index - pd.Timedelta(hours=5)

                if loc is 'BOG':
                    mask_shape = '../gis/Bog_Localidades.shp'
                    max_error = dt_config['LightFilters']['error_bog']
                elif loc is 'COL':
                    mask_shape = '../gis/Colombia_Continental.shp'
                    max_error = dt_config['LightFilters']['error_col']
                else:
                    mask_shape = None
                    max_error = None

                data = fn_mask_data(raw=all_data, ext=fn_get_shape_extent(mask_shape))
                data_filter = fn_filter_light(raw=data, max_amp=dt_config['LightFilters']['amperage'], max_error=max_error)

                if data_filter.empty:

                    for hour in hours:
                        if int(hour) < 7:
                            pd_day = pd.Timestamp(day) + pd.Timedelta(days=1)
                            day_adj = '{}{:02d}{:02d}'.format(pd_day.year, pd_day.month, pd_day.day)
                        else:
                            day_adj = day
                        # Check if the file exist
                        exist = os.path.isfile('../results/rasters/count/{}/{}/CDT_{}_{}{}.tif'.format(resolution, loc, loc, day_adj, hour))
                        if not exist:
                            hourly_data = data_filter
                            fn_maps(hourly_data, loc, resolution, '{}{}00'.format(day_adj, hour))

                else:

                    data_filter['Hour'] = data_filter.index.hour
                    data_filter['day'] = data_filter.index.day

                    for hour in hours:
                        if int(hour) < 7:
                            pd_day = pd.Timestamp(day) + pd.Timedelta(days=1)
                            day_adj = '{}{:02d}{:02d}'.format(pd_day.year, pd_day.month, pd_day.day)
                        else:
                            day_adj = day
                        # Check if the file exist
                        exist = os.path.isfile('../results/rasters/count/{}/{}/CDT_{}_{}{}.tif'.format(resolution, loc, loc, day_adj, hour))
                        if not exist:
                            hourly_data = data_filter[data_filter['Hour'] == int(hour)]
                            fn_maps(hourly_data, loc, resolution, '{}{}00'.format(day_adj, hour))
            except IOError:
                print 'cannot open :', day
Example #12
0
def main(kml_date):
    year = kml_date[:4]
    month = kml_date[5:7]
    day = kml_date[8:10]

    dt_config = get_pars_from_ini('../config/config.ini')
    path_url = dt_config['Paths']['path_url_kml']

    dict_files = {
        'acum': 'EVENTOS_LOCALIDADES_BOGOTA.xlsx',
        '24H': 'EVENTOS_LOCALIDADES_BOGOTA24.xlsx'
    }

    dict_nameout = {
        'acum': '{}{}{}_Current'.format(year, month, day),
        '24H': '{}{}{}_Yesterday'.format(year, month, day)
    }

    for i in dict_files:

        path = '{}{}/{}/{}/Bogota/{}'.format(path_url, year, month, day,
                                             dict_files[i])

        if i == 'acum':
            path_txt = '{}{}/{}/{}/Bogota/{}'.format(path_url, year, month,
                                                     day, 'LapsoCurrent.txt')
            for line in urllib2.urlopen(path_txt):
                per_obs = line
        else:
            path_txt = '{}{}/{}/{}/Bogota/{}'.format(path_url, year, month,
                                                     day, 'Lapso24Hrs.txt')
            for line in urllib2.urlopen(path_txt):
                per_obs = line

        try:
            df_data = pd.ExcelFile(path).parse(sheet_name='Sheet1')
            df_data['LOCALIDAD'] = df_data['LOCALIDAD'].str.replace(
                u'\xd1', 'N')
            df_data.sort_values(by='TIEMPO', inplace=True)
            df_data.reset_index(drop=True, inplace=True)

            gen_kml(points=df_data,
                    other_info=per_obs,
                    icon=dt_config['Paths']['icon'],
                    name=dict_nameout[i],
                    light=True)

        except IOError as e:

            # print "No Hubo Actividad Electrica --->>> Date: {1}".format(e.filename.split('/')[-1], kml_date)

            dict_none = {
                'DESCRIPCION': '{} {}'.format(year, month),
                'LATITUD': 4.6748,
                'LONGITUD': -74.1135,
            }

            df_data = pd.DataFrame.from_dict(data=dict_none, orient='index').T

            per_obs_desc = ''

            if i == 'acum':
                path_txt = '{}{}/{}/{}/Bogota/{}'.format(
                    path_url, year, month, day, 'LapsoCurrent.txt')
                for line in urllib2.urlopen(path_txt):
                    per_obs = line
                path_txt_desc = '{}{}/{}/{}/Bogota/{}'.format(
                    path_url, year, month, day, 'BOGOTA_T3.txt')
                for line_desc in urllib2.urlopen(path_txt_desc):
                    per_obs_desc = line_desc
                text_info = '{} {}'.format(per_obs, per_obs_desc).replace(
                    u'\xe9'.encode('utf-8'), 'e')
            else:
                path_txt = '{}{}/{}/{}/Bogota/{}'.format(
                    path_url, year, month, day, 'Lapso24Hrs.txt')
                for line in urllib2.urlopen(path_txt):
                    per_obs = line
                path_txt_desc = '{}{}/{}/{}/Bogota/{}'.format(
                    path_url, year, month, day, 'BOGOTA_24.txt')
                for line_desc in urllib2.urlopen(path_txt_desc):
                    per_obs_desc = per_obs_desc + line_desc
                text_info = '{} {}'.format(per_obs, per_obs_desc).replace(
                    u'\xe9'.encode('utf-8'), 'e')

            gen_kml(points=df_data,
                    other_info=text_info,
                    icon=dt_config['Paths']['logo_path'],
                    name=dict_nameout[i],
                    light=False)
Example #13
0
import pandas as pd
from rasterstats import point_query, zonal_stats
from shapely.geometry import Point
from rasterio.errors import RasterioIOError

from config_utils import get_pars_from_ini
import sqlite3
from constants import dt_pt_ranges

# TODO: Agregar los estadisticos de la precipitacion para cada zona en la base de datos de los pronosticos

exec_prefix = sys.exec_prefix
gdal_data = '{}/share/gdal/'.format(exec_prefix)
os.environ['GDAL_DATA'] = gdal_data

dt_extents = get_pars_from_ini('../config/zones.ini')
dt_config = get_pars_from_ini('../config/config.ini')
dt_colors = get_pars_from_ini('../config/plots.ini')
dt_paths = dt_config['Paths']

# path_results = dt_paths['path_results']
path_goes = dt_paths['path_goes']
path_gis = dt_paths['path_gis']
path_eval = dt_paths['path_eval']

path_raster = '../rasters/GOES13_v1'
# path_results = 'http://172.16.1.237/almacen/externo/estaciones/interpolacion'
path_results = '../results/estaciones'

dt_months = {
    1: 'Ene',
Example #14
0
def gen_kml(df_data, forecast):
    desc_zones = {
        1: 'Suba, Engativá',
        2: 'Usaquén',
        3:
        'Chapinero, B. unidos, Teusaquillo, Pte. Aranda, Mártires, R. Uribe, A. Nariño, Santa Fe, Candelaria, San Cristobal',
        4: 'Fontibón, Kennedy, Bosa',
        5: 'Cuidad Bolívar, Tunjuelito, Usme'
    }

    kml = simplekml.Kml(open=1)
    logo_path = get_pars_from_ini(
        '../config/config.ini')['Paths']['logo_pronos']
    df_coords = get_geometry()
    df_meteo = get_meteorologo()
    df_icons = get_icons()

    if not df_data.empty:
        pronos = df_data.copy()
        for zone in pronos.Zona:
            pnt = kml.newpoint()
            pnt.name = df_coords[df_coords.Cod_Zona ==
                                 zone].Descripcion.values[0]

            dominio = desc_zones[zone]
            meteo = df_meteo[df_meteo.Id == int(
                pronos.Meterologo.values[0])].Nombre.values[0]
            edition_time = pronos[pronos.Zona == zone].Fecha_Update.values[0]
            tmin = pronos[pronos.Zona == zone].TS_Min.values[0]
            tmax = pronos[pronos.Zona == zone].TS_Max.values[0]
            tiempo = df_icons[df_icons.Cod_Icono == pronos[
                pronos.Zona == zone].Codigo_PT.values[0]].Descripcion.values[0]
            condiciones = pronos[pronos.Zona == zone].Hidro_Meteo.values[0]

            if len(condiciones) == 0:
                pnt.description = 'Dominio: {} <br><br> ' \
                                  'Condiciones Actuales: <br><br>' \
                                  'Tiempo Esperado: {} <br>' \
                                  'Temp. Mínima °C: {} <br>' \
                                  'Temp. Máxima °C: {} <br>' \
                                  'Meteorólogo: {} <br>' \
                                  'Fecha Edición: {} <br>' \
                                  '<br><br>' \
                                  '<img src="{}" alt="picture" width="145" height="40" align="right" >' \
                                  '<br>' \
                                  '<br>'.format(dominio, tiempo.encode('utf8'), tmin, tmax, meteo.encode('utf8'), edition_time, logo_path).decode('utf8')
            else:
                pnt.description = 'Dominio: {} <br><br> ' \
                                  'Condiciones Actuales: {} <br><br>' \
                                  'Tiempo Esperado: {} <br>' \
                                  'Temp. Mínima °C: {} <br>' \
                                  'Temp. Máxima °C: {} <br>' \
                                  'Meteorólogo: {} <br>' \
                                  'Fecha Edición: {} <br>' \
                                  '<br><br>' \
                                  '<img src="{}" alt="picture" width="145" height="40" align="right" >' \
                                  '<br>' \
                                  '<br>'.format(dominio, condiciones.encode('utf8'), tiempo.encode('utf8'), tmin, tmax, meteo.encode('utf8'), edition_time, logo_path).decode('utf8')

            pnt.coords = [(float(df_coords[
                df_coords.Cod_Zona == zone].Geometria.values[0].replace(
                    'POINT(', '')[:-1].split(' ')[0]),
                           float(df_coords[df_coords.Cod_Zona ==
                                           zone].Geometria.values[0].replace(
                                               'POINT(',
                                               '')[:-1].split(' ')[1]))]

            pnt.style.labelstyle.scale = 1.0
            pnt.style.iconstyle.scale = 2.0
            pnt.style.iconstyle.icon.href = df_icons[
                df_icons.Cod_Icono == pronos[
                    pronos.Zona == zone].Codigo_PT.values[0]].url.values[0]
        #
        # # Save the KML
        if u'Ma\xf1ana' == u'{}'.format(forecast.decode('utf-8')):
            kml.save(u'../results/kml/pronos/Manana.kml')
            # kml.save('{}/Manana.kml'.format(out_path))
        else:
            kml.save(u'../results/kml/pronos/{}.kml'.format(
                forecast.decode('utf8')))
            # kml.save('{}/{}.kml'.format(out_path, forecast.decode('utf8')))
        print(forecast)
        bart_upload(forecast)

    else:

        print(u'No existen registros de pronóstico en la base de datos')
        sys.exit(0)