Example #1
0
def weather_cube(firepaths, list_of_params, dimensions, n_params=6):
    json_folder = check_jsonstats_folder("JsonStats")
    stats_json = f"Weather_Stats.json"
    jpath = os.path.join(json_folder, stats_json)
    my_file_json = Path(jpath)
    if my_file_json.is_file():
        with open(jpath) as json_file:
            json_dict = json.load(json_file)
        json_file.close()
    else:
        get_dataset = get_datasetpaths()[0]
        json_dict = get_weather_stats_dict(get_dataset, jpath, list_of_params)

    hyper_cube = np.zeros((dimensions + (6 * len(firepaths), )))
    count_dim = 0
    for index, folder in enumerate(firepaths):
        z_axis_begin = index * n_params
        original_params_list = read_weather_params(firepaths, list_of_params)
        normalized_params_list = [[] for _param in range(n_params)]
        # se quiser trocar a ordem aux_cube = np.zeros(((6,) + (256, 256)))
        aux_cube = np.zeros((dimensions + (6, )))
        for i, param in enumerate(list_of_params):
            mean = json_dict[param][0]
            std = json_dict[param][1]
            normalized_params_list[i] = (original_params_list[i][index] -
                                         mean) / std
            matrix = np.ones(dimensions) * normalized_params_list[i]
            aux_cube[:, :, i] = matrix
            count_dim += 1

        hyper_cube[:, :, z_axis_begin:count_dim] = aux_cube

    return hyper_cube
Example #2
0
def create_cosv2_shp():
    full_path = os.getcwd()
    os.chdir(Path(full_path).parents[0])
    path = r"COS2018_v1/COS2018_v1.shp"
    my_file_cos = Path(path)
    if not my_file_cos.is_file():
        sys.exit(
            "O ficheiro COS_2018_v1.shp não se encontra no caminho especificado!"
        )

    print("A gerar o novo shapefile\n"
          "O processo pode demorar alguns minutos...")
    df = gpd.read_file(path)
    json_folder = check_jsonstats_folder("JsonCos")

    class_json = os.path.join(json_folder, "Old_Class_Cos.json")

    # Verificar elementos únicos na coluna COS2018_n4 do shapefile
    unique = df.COS2018_n4.unique()

    # Criar lista com esses elementos
    cos2018n4_list = list(unique)

    # Número de elementos da lista
    n_elements = len(cos2018n4_list)

    # Criar lista de classes (0 - 82) para cada um dos elementos COS2018_n4
    values_class = list(range(n_elements))

    # Criar dicionário com formato dict = {
    #   "COS2018_n4": "Class"
    # }
    iterclass = zip(cos2018n4_list, values_class)
    classdictionary = dict(iterclass)
    with open(class_json, 'w') as output:
        json.dump(classdictionary, output, indent=4)
    output.close()

    # Criar novo shapefile com coluna adicional 'Class' para cada respectivo valor 'COS2018_n4'
    for key in classdictionary.keys():
        df.loc[df['COS2018_n4'] == str(key), 'Class'] = classdictionary[key]

    df.to_file('newcos18_v2')
    convert_cosv2_epsg()
Example #3
0
def normalize_dem(dempath):
    dem = io.imread(os.path.join(dempath, "dem.tif"))
    json_folder = check_jsonstats_folder("JsonStats")
    stats_json = f"Dems_Stats.json"
    jpath = os.path.join(json_folder, stats_json)
    my_file_json = Path(jpath)
    if my_file_json.is_file():
        with open(jpath) as json_file:
            js_dict = json.load(json_file)
        json_file.close()
        global_mean = js_dict["mean"]
        global_std = js_dict["std"]

    else:
        get_dataset = get_datasetpaths()[0]
        global_mean, global_std = get_dems_mean_std(get_dataset, jpath)

    norm_dem = (dem - global_mean) / global_std
    return norm_dem
Example #4
0
def get_newclass_dict():
    json_folder = check_jsonstats_folder("JsonCos")
    old_classes_json = os.path.join(json_folder, "Old_Class_Cos.json")
    new_classes_json = os.path.join(json_folder, "New_Class_Cos.json")
    my_new_file_json = Path(new_classes_json)
    if my_new_file_json.is_file():
        with open(new_classes_json) as json_file:
            cos_new_dict = json.load(json_file)
        json_file.close()
    else:
        with open(old_classes_json) as json_file:
            cos_old_dict = json.load(json_file)
        json_file.close()
        cos_list = [('1', ), ('2', ), ('3', ),
                    ('4.1.1.1', '4.1.1.2', '4.1.1.6'), ('4.1.1.4', ),
                    ('4.1.1.5', '4.1.1.3', '4.1.1.7'), ('5.1.1.5', ),
                    ('5.1.1.1', '5.1.1.2'),
                    ('5.1.1.3', '5.1.1.4', '5.1.1.6', '5.1.1.7'),
                    ('5.1.2.1', ), ('5.1.2.2', '5.1.2.3'), ('6', '7.1.3.1'),
                    ('7.1.1.1', '7.1.1.2', '7.1.2.1', '8', '9')]

        cos_new_dict = {}
        new_class = 0

        for cos_tuple in cos_list:
            for cos_code in cos_tuple:
                for old_key in cos_old_dict.keys():
                    if str(old_key).startswith(str(cos_code)):
                        cos_new_dict.update({cos_old_dict[old_key]: new_class})
            new_class += 1

        my_new_json_file = Path(new_classes_json)
        if not my_new_json_file.is_file():
            with open(new_classes_json, 'w') as output:
                json.dump(cos_new_dict, output, indent=4)
            output.close()

    return cos_new_dict
Example #5
0
def weatherapi(year="2017"):
    # variaveis e paths necessárias
    api_key = "af22cf6a216a427fb88685100b43d048"
    pathfolder = "Dataset" + str(year)
    path = os.path.join("Ardidas" + str(year) + "final",
                        "ardidas" + str(year[-2:]) + ".shp")
    sf = shapefile.Reader(path)
    json_folder = check_jsonstats_folder("JsonApis")
    verify_filename = "LastIndex_WeatherApi" + " " + str(year) + ".json"
    jverifypath = os.path.join(json_folder, verify_filename)
    json_requests = "Requests" + "_forKey_ " + f"{api_key}" + ".json"
    jrequestspath = os.path.join(json_folder, json_requests)
    timefile = "Last_time" + "_forKey_ " + f"{api_key}" + ".json"
    jtimefilepath = os.path.join(json_folder, timefile)
    jsonstring = "Last Iteration" + " " + str(year)
    counter_requests = "Counter"
    lastdate = "Lastdate"
    verify = False
    # verificar existência e extrair info de jsons de controlo
    # controlo de nº de requests/ timestamp em que se atingiu esse limite/ ultimo index no momento paragem
    my_file_index = Path(jverifypath)
    my_file_requests = Path(jrequestspath)
    my_file_lasttime = Path(jtimefilepath)

    if my_file_requests.is_file():
        with open(jrequestspath, "r") as file:
            data_r = file.read()
        file_dict = json.loads(data_r)
        number_requests = file_dict[counter_requests]
    else:
        number_requests = 0

    if my_file_lasttime.is_file():
        with open(jtimefilepath, "r") as file:
            data_t = file.read()
        file_dict = json.loads(data_t)
        last_date = file_dict[lastdate]
        last_date_obj = datetime.strptime(last_date, "%d/%m/%Y %H:%M:%S")
        delta = datetime.now() - last_date_obj
        delta_in_sec = delta.total_seconds()
        delta_in_hours = divmod(delta_in_sec, 3600)[0]
        if delta_in_hours < 24:
            sys.exit(
                "Ainda não passou o periodo de espera para efetuar requests à api!"
            )
        else:
            pass
    else:
        pass

    if my_file_index.is_file():

        with open(jverifypath, "r") as file:
            data = file.read()

        file_dict = json.loads(data)

        index = file_dict[jsonstring]

    else:
        index = 0

    api = Api(api_key)

    # ciclo para percorrer a totalidade do dataset
    for i in range(index, len(sf)):

        if my_file_requests.is_file():
            verify = verify_last_request_date(jrequestspath)
            update_verify_jsontime(number_requests, jtimefilepath, verify)

        shape = sf.shapeRecord(i)

        pathjson = os.path.join(pathfolder, str(i))
        filename = "Weather.json"
        filepath = os.path.join(pathjson, filename)

        longitude, latitude = centerpointshape(shape)

        if int(year) == 2017:
            DHInicio = shape.record.DHInicio
            DHFim = shape.record.DHFim

        else:
            DHInicio = shape.record.data_inici
            DHFim = shape.record.data_fim

        # formatar data extraida do shapefile
        date_format_str = '%Y-%m-%d %H:%M:%S.%f'
        start = datetime.strptime(DHInicio, date_format_str)
        final = datetime.strptime(DHFim, date_format_str)

        # calculos para seleccionar o tipo de request a efetuar
        delta = final - start
        delta_in_sec = delta.total_seconds()
        delta_in_hours = divmod(delta_in_sec, 3600)[0]
        dias_completos = math.floor(delta_in_hours / 24)
        num_horas_restantes = delta_in_hours - (24 * dias_completos)

        initial_dateime = start + timedelta(hours=-1)
        initial_date = initial_dateime.strftime("%Y-%m-%d:%H")

        # primeiro caso
        if delta_in_hours >= 24:
            # parte relativa aos dias completos
            for k in range(1, dias_completos + 1):
                new_end = initial_dateime + timedelta(days=1)
                new_end_date = new_end.strftime("%Y-%m-%d:%H")
                filename = os.path.join(pathjson, f"{k}.json")

                weather_request(api, latitude, longitude, initial_date,
                                new_end_date, filename)
                number_requests += 1

                initial_dateime = new_end
                initial_date = initial_dateime.strftime("%Y-%m-%d:%H")

                updatejsonrequest(number_requests, jrequestspath)
                update_verify_jsontime(number_requests, jtimefilepath, verify)

            # parte relativa às horas restantes
            if num_horas_restantes != 0:
                new_initial = start + timedelta(days=dias_completos, hours=-1)
                new_initial_date = new_initial.strftime("%Y-%m-%d:%H")

                temporal_dif = final - new_initial
                temporal_dif_hours = temporal_dif.total_seconds() / 3600
                if temporal_dif_hours < 24:
                    new_end = initial_dateime + timedelta(
                        hours=math.floor(temporal_dif_hours) + 1)
                else:
                    new_end = initial_dateime + timedelta(
                        hours=math.floor(temporal_dif_hours))

                new_end_date = new_end.strftime("%Y-%m-%d:%H")

                filename = os.path.join(pathjson, f"{dias_completos + 1}.json")
                weather_request(api, latitude, longitude, new_initial_date,
                                new_end_date, filename)
                number_requests += 1

                updatejsonrequest(number_requests, jrequestspath)
                update_verify_jsontime(number_requests, jtimefilepath, verify)

            # Merge dos jsons e posterior eliminação dos jsons temporários
            listfiles = []
            for file in os.listdir(pathjson):
                if re.match(r"[0-9]+.json", file):
                    fpath = os.path.join(pathjson, file)
                    listfiles.append(fpath)

            merge_json(listfiles, pathjson)
            del_files(listfiles)

        # segundo caso (no limite de durar 24 horas)
        elif 22 <= delta_in_hours < 24:
            final_plus_1day = start + timedelta(days=1, hours=-1)
            end_date = final_plus_1day.strftime("%Y-%m-%d:%H")

            weather_request(api, latitude, longitude, initial_date, end_date,
                            filepath)
            number_requests += 1

        # terceiro caso (última possibilidade)
        else:
            final_plus_2hours = final + timedelta(hours=1)
            end_date = final_plus_2hours.strftime("%Y-%m-%d:%H")

            weather_request(api, latitude, longitude, initial_date, end_date,
                            filepath)
            number_requests += 1

        # criação de dicionarios para json de controlo de index
        json_dict = {jsonstring: i}
        json_dict_exception = {jsonstring: 0}
        """ 
            Escrita de ficheiro json para guardar index onde o programa parou ou reiniciar index se terminou o ciclo.
            Por questão de segurança guarda o penúltimo indice para assegurar que a parte meteorológica do dataset 
        é gerada na sua totalidade e corretamente.
        
        """
        if i < len(sf) - 1:
            with open(jverifypath, 'w') as output:
                json.dump(json_dict, output, indent=4)
        else:
            with open(jverifypath, 'w') as output:
                json.dump(json_dict_exception, output, indent=4)

        updatejsonrequest(number_requests, jrequestspath)
Example #6
0
def apirequest(year="2017"):
    full_path = os.getcwd()
    os.chdir(Path(full_path).parents[0])
    downloadfolder = r"Produtos_Sentinel"
    try:
        if not os.path.exists(downloadfolder):
            os.makedirs(downloadfolder)
    except OSError:
        print("Erro: Criar Pasta " + downloadfolder)

    api = SentinelAPI('pfmacf', '4mcTUobqdf',
                      'https://scihub.copernicus.eu/dhus')
    path = "Dataset" + str(year)
    my_dir_path = Path(path)
    if not my_dir_path.is_dir():
        switcherfunc = {
            0: convertshapeepsg,
            1: createindividualshp,
            2: datatxt,
            3: getgeojson
        }
        for func in switcherfunc:
            _func_exec = switcherfunc[func](year)

    num_sub_folders = [name for name in os.listdir(path)]
    json_folder = check_jsonstats_folder("JsonApis")
    index_filename = "LastIndex_SentinelApi" + " " + str(year) + ".json"
    jpath = os.path.join(json_folder, index_filename)
    jsonstring = "Last Iteration" + " " + str(year)
    unique_txt = r"Unique_Products.txt"
    open(unique_txt, 'a').close()
    contador_vazios = 0
    my_file_index = Path(index_filename)

    if my_file_index.is_file():

        with open(index_filename, "r") as file:
            data = file.read()

        file_dict = json.loads(data)

        index = file_dict[jsonstring]

    else:
        index = 0

    for i in range(index, len(num_sub_folders)):

        pathfinal = os.path.join(path, str(i))
        pathtxt = os.path.join(pathfinal, "Data.txt")
        pathgeojson = os.path.join(pathfinal, "bbox4326.geojson")

        data = linecache.getline(pathtxt, 2)
        date_fire = data.rstrip('\n').split(" ")

        ano = int(date_fire[2].split("-")[0])
        mes = int(date_fire[2].split("-")[1])
        dia = int(date_fire[2].split("-")[2])
        search_date = [ano, mes, dia]

        products = get_footprint(api, pathgeojson, search_date, 14)
        print('\nNumero de produtos encontrados = ', (len(products)))
        print('\nIndex = ', i)
        if not len(products):
            products = get_footprint(api, pathgeojson, search_date, 30)
            if not len(products):
                products = get_footprint(api, pathgeojson, search_date, 60)
                if not len(products):
                    filename = "Sem_Produtos.txt"
                    filepath = os.path.join(pathfinal, filename)
                    write_to_file(filepath, mode="w", text="Invalido")
                    contador_vazios += 1
                    continue

        # converter para DataFrame
        products_df = api.to_dataframe(products)
        products_geojson = api.to_geojson(products)
        valid_list = validate_footprints(products_geojson, pathfinal, i, year)

        if valid_list:

            product_id, product_title = get_final_product(
                valid_list, products_df, pathfinal)
            product_info = api.get_product_odata(product_id)
            file = open(unique_txt, "r+", encoding="utf-8")

            line_found = any(product_title in line for line in file)
            if not line_found:
                write_to_file(unique_txt, mode="a", text=product_title)
                open("Products_to_Download.txt", 'a').close()
                check_availability_download(product_info, product_title,
                                            product_id, api, downloadfolder)
                file.close()
            else:
                file.close()

        else:
            filename = "Sem_Produtos.txt"
            filepath = os.path.join(pathfinal, filename)
            write_to_file(filepath, mode="w", text="Invalido")
            contador_vazios += 1

        json_dict = {jsonstring: i}
        json_dict_exception = {jsonstring: 0}

        if i < len(num_sub_folders) - 1:
            with open(jpath, 'w') as output:
                json.dump(json_dict, output, indent=4)
        else:
            with open(jpath, 'w') as output:
                json.dump(json_dict_exception, output, indent=4)

    print("Contagem de incendios sem Produto: ", contador_vazios)