def batch_upload(inifile):
    """ Returns batch upload shapes for one geoserver """
    setting = settings_object(inifile)
    setting.set_project()
    setting.count_uuid_sections()

    # set logging
    set_log_config(setting.ini_location)
    sys.stdout = logger(setting.ini_location)
    print(setting.directory)

    in_paths = glob(setting.directory + "/*.tif")
    print(in_paths)
    in_paths = in_paths + glob(setting.directory + "/*.vrt")
    in_paths = in_paths + glob(setting.directory + "/*.asc")
    in_paths = in_paths + glob(setting.directory + "/*.json")

    present_paths = []
    for path in in_paths:
        file_name = get_file_name_from_path(path)
        if file_name.split(".")[1] == "json":
            present_paths.append(path)
        if file_name in setting.config.sections():
            present_paths.append(path)

    if len(setting.uuid_list) > 0:
        [present_paths.append(('uuid', uuid)) for uuid in setting.uuid_list]

    setting.store = rasterstore()
    print_list(present_paths, "Paths")

    failures = {}
    succes = {}
    for count, path in enumerate(present_paths):
        log_time("info", percentage(count, len(present_paths)), path, "l")
        # try:
        setting.in_path = path
        setting = add_output_settings(setting)

        if not setting.skip:
            print_dictionary(setting.__dict__, "Settings")

            succes[setting.onderwerp] = upload(setting)

        # except Exception as e:
        #     print(e)
        #     failures[setting.onderwerp] = e

    # log_time("info", "sleeping to decrease load on server....")
    # sleep(30)

    print_dictionary(succes, "Succes")
    print_dictionary(failures, "Failures")
def extract_external(externals, temp_dir):

    extract_data_failures = []
    extract_data_succes = []

    log_time("info", "Extracting externals metadata")
    for extern in tqdm(externals):
        try:
            log_time("info", "Processing external wms:", extern["name"])
            json_dict = {}

            meta_path = os.path.join(temp_dir, extern["unique_name"] + ".json")

            if os.path.exists(meta_path):
                log_time("info", "Meta file exists, skipping", extern["name"])
                continue

        except Exception as e:
            extern["error"] = f"General error: {e}"
        else:
            extern["subject"] = extern["name"]
            extract_data_succes.append(extern)

        finally:
            json_dict["atlas"] = extern
            with open(meta_path, "w") as outfile:
                json.dump(json_dict, outfile)

    return extract_data_succes, extract_data_failures
def upload(setting):
    log_time("info", setting.layer_name, "0. starting.....")

    if isinstance(setting.in_datasource, dict):
        _clear_connections_database(setting.in_datasource)

    if not (setting.skip_gs_upload and setting.skip_pg_upload):
        vector = wrap_shape(setting.in_datasource, setting.in_layer)

    if not setting.skip_correction:
        log_time("info", setting.layer_name, "1. vector corrections")
        vector.correct(vector.layer, setting.layer_name, setting.epsg)
        setting.layer_name = vector.layer_name

    if not setting.skip_mask:
        log_time("info", setting.layer_name, "1.2 vector corrections - mask")
        vector_geom = vector_to_geom(setting.mask_path, setting.epsg)
        vector.clip(vector.layer, vector_geom)

    if (not setting.skip_mask) or (not setting.skip_correction):
        if vector.ds[0].GetFeatureCount() == 0:
            log_time("error", setting.layer_name, "vector feature count is 0")
        if vector.ds == None:
            log_time("error", setting.layer_name, "Datasource is none")
        if vector.ds[0][0] == None:
            log_time("error", setting.layer_name, "Feature is none")

    if not setting.skip_delete_excess_field_names:
        vector = wrap_shape(vector.ds)
        field_names = vector.get_all_field_names()
        field_names_lowered = [f.lower() for f in field_names]

        sld = wrap_sld(setting.in_sld_path, _type="path")
        sld.lower_all_property_names()
        sld_fields = sld.get_all_property_names()

        if len(sld_fields) > 0:
            for idx, field_name in enumerate(field_names_lowered):
                if field_name not in sld_fields:
                    vector.delete_fields([field_names[idx]])
                    continue
                else:
                    log_time("info", f"Keeping '{field_name}' field in vector")

    if not setting.skip_pg_upload:
        log_time("info", setting.layer_name, "2. Upload shape to pg database.")

        pg_details = PG_DATABASE[setting.server_naam]
        _clear_connections_database(pg_details)

        if setting.database_name is not None:
            pg_details["database"] = setting.database_name

        pg_database = wrap_shape(pg_details)
        if not setting.product_naam == "flooding" and setting.set_metadata:
            add_metadata_pgdatabase(setting, pg_database)

        schema_layers = [layer.split(".")[-1] for layer in pg_database.layers]
        pg_layer_present = setting.layer_name in schema_layers

        if not pg_layer_present or setting.overwrite_postgres:
            copy2pg_database(
                pg_database.ds,
                vector.ds,
                vector.layer,
                setting.layer_name,
                setting.schema_name,
            )

        else:
            log_time("info", setting.layer_name, "Layer already in database.")

        pg_database.get_layer(setting.layer_name.lower())
        pg_database.lower_all_field_names()

    if not setting.skip_gs_upload:
        log_time("info", setting.layer_name, "3. Create workspace.")
        server = setting.server
        server.create_workspace(setting.workspace_name)

        log_time("info", setting.layer_name, "4. Create store.")
        pg_details = PG_DATABASE[setting.server_naam]
        server.create_postgis_datastore(setting.store_name,
                                        setting.workspace_name, pg_details)

        log_time("info", setting.layer_name, "5. Publish featuretype.")
        server.publish_layer(
            setting.layer_name,
            setting.workspace_name,
            setting.overwrite_feature,
            setting.epsg,
            reload=True,
        )

        if setting.use_existing_geoserver_sld:
            log_time("info", setting.layer_name, "6-9. Setting existing sld.")
            server.set_sld_for_layer(workspace_name=None,
                                     style_name=setting.existing_sld,
                                     use_custom=True)
        else:
            log_time("info", setting.layer_name,
                     "6. Load Style Layer Descriptor.")

            sld = wrap_sld(setting.in_sld_path, _type="path")
            if not setting.skip_sld_check:
                log_time("info", setting.layer_name, "7. Check sld.")

                # lower all and cut field names to esri shape standards
                sld.lower_all_property_names()
                sld.cut_len_all_property_names(_len=10)

            log_time("info", setting.layer_name, "8. Upload sld.")
            style_name = setting.layer_name + "_style"
            server.upload_sld(style_name, setting.workspace_name,
                              sld.get_xml(), setting.overwrite_sld)

            log_time("info", "9. Connect sld to layer.")
            server.set_sld_for_layer()

        # log_time("info", setting.layer_name, "10. Add to abstract.")
        # if setting.overwrite_abstract:
        #     server.write_abstract(setting.abstract_data)

        # log_time("info", setting.layer_name, "11. Add to title.")
        # if setting.overwrite_title:
        #     server.write_title(setting.title_data)

    if not setting.skip_lizard_wms_layer:
        log_time("info", setting.layer_name, "12. Add wms layer.")
        if not setting.skip_gs_upload:
            gs_wms_server = setting.server
            gs_wms_server.get_layer(setting.slug)
            setting.wmslayer.configuration["wms_url"] = setting.wms_url
            download_url = setting.wmslayer.get_download_url(
                setting.wms_url, setting.slug)
            setting.wmslayer.configuration["download_url"] = download_url
            setting.wmslayer.configuration["slug"] = setting.slug[:64]

        else:
            gs_wms_server = wrap_geoserver(setting.wmsserver)
            gs_wms_server.get_layer(setting.wmsslug)

        latlon_bbox = gs_wms_server.layer_latlon_bbox
        setting.wmslayer.configuration["spatial_bounds"] = {
            "south": latlon_bbox[2],
            "west": latlon_bbox[0],
            "north": latlon_bbox[3],
            "east": latlon_bbox[1],
        }

        setting.wmslayer.create(setting.wmslayer.configuration, overwrite=True)

    log_time("info", setting.layer_name, "13. Returning wms, slug")
    return setting.wms_url, setting.slug
def batch_upload(inifile):
    """ Returns batch upload shapes for one geoserver """
    setting = settings_object(inifile)

    # set logging
    set_log_config(setting.ini_location)
    sys.stdout = logger(setting.ini_location)

    # get vectors
    if setting.use_directory:
        setting.set_directory()
        in_paths, subjects = get_paths_and_subjects(setting, "directory")

    elif setting.use_postgis:
        setting.set_postgis()
        in_paths, subjects = get_paths_and_subjects(setting, "postgis")

    elif setting.use_directory and setting.use_postgis:
        in_paths, subjects = get_paths_and_subjects(setting, "directory")
        pg_in_paths, pg_subjects = get_paths_and_subjects(setting, "postgis")

        in_paths = in_paths + pg_in_paths
        subjects = subjects + pg_subjects

    else:
        print("use either use_postgis or use_batch")

    print_list(subjects, "Subjects")
    print_list(in_paths, "Paths")

    failures = {}
    succes = {}
    for count, (in_path, subject) in enumerate(zip(in_paths, subjects)):

        setting = add_output_settings(setting, subject, in_path)

        if not setting.skip:
            log_time("info", percentage(count, len(in_paths)), subject, "l")
            setting.server = wrap_geoserver(setting.server_naam)
            print_dictionary(setting.__dict__, "Layer settings")

            pg_details = PG_DATABASE[setting.server_naam]
            _clear_connections_database(pg_details)

            try:
                succes[setting.subject] = upload(setting)

            except Exception as e:
                print(e)
                failures[setting.subject] = e

            finally:
                _clear_connections_database(pg_details)

            log_time("info", "sleeping to decrease load on server....")
            sleep(2)
        else:
            log_time("info", "Skipping", subject, "l")

    print_dictionary(succes, "Succes")
    print_dictionary(failures, "Failures")
def extract_atlas(atlas_name, wd, download, resolution=10):
    """ Returns batch upload shapes for one geoserver """

    os.chdir(wd)

    vector_dir = mk_dir("extract_vector",path=wd)
    raster_dir = mk_dir("extract_raster",path=wd)
    external_dir = mk_dir("extract_external",path=wd)

    data = get_atlas_data(atlas_name)
    unique_data = unique(data)
    vectors = unique_data["vector"]
    rasters = unique_data["raster"]
    if "external" in unique_data.keys():
        externals = unique_data["external"]
    else:
        externals = None

    log_time("info", "Raster directory:", raster_dir)
    log_time("info", "Vector directory:", vector_dir)
    log_time("info", "Exteral wms directory:", external_dir)

    log_time("info", "Amount of vectors: {}".format(len(vectors)))
    log_time("info", "Amount of rasters: {}".format(len(rasters)))
    if externals:
        log_time("info", "Amount of external wms: {}".format(len(externals)))
    else:
        log_time("info", "Found no external wms")
        
    atlas = wrap_atlas(atlas_name)
    clip_geom = atlas.get_boundaring_polygon(atlas_name, "boundary", write = False)
    clip_geom_nl =  atlas.get_boundaring_polygon(atlas_name, "boundary", 
                                                 write=False, epsg=28992)

    # extract vector data from their respective sources
    extract_vector_succes, extract_vector_failures = extract_vectors(
        vectors, vector_dir, atlas_name, clip_geom, download
    )
    extract_raster_succes, extract_raster_failures = extract_rasters(
        rasters, raster_dir, atlas_name, False, resolution, clip_geom, clip_geom_nl
    )
    if externals:
        extract_ext_succes, exteract_ext_failures = extract_external(
            externals, external_dir
        )
    else:
        exteract_ext_failures = None

    return (
        vectors,
        rasters,
        externals,
        exteract_ext_failures,
        extract_raster_failures,
        extract_vector_failures,
    )
def extract_rasters(rasters, temp_dir, atlas_name, download=False, resolution=10, clip_geom=None, clip_geom_nl=None):

    raster_failures = []
    raster_succes = []

    store = rasterstore()

    # Start raster changes
    for raster in rasters:
        log_time("info", "Processing raster:", raster["name"])
        log_time("info", "Unique name:", raster["unique_name"])

        json_dict = {}
        subject = "_".join(raster["name"].lower().split(" "))

        meta_path_exists = False
        meta_path = os.path.join(temp_dir, raster['unique_name'] + ".json")
        if os.path.exists(meta_path):
            log_time("info", "Meta file exists, skipping", subject)
            meta_path_exists = True
            continue

        try:
            uuid = store.get_uuid_by_slug(raster["slug"])
            config = store.get_store(uuid)
            
            #if download:
                    # e = clip_geom.GetEnvelope()
                    # nl = clip_geom_nl.GetEnvelope()
                    # bounds = {"west": e[0],
                    #           "east": e[1],
                    #           "north": e[3],
                    #           "south": e[2]}
                    
                    
                    # width = int((nl[1] - nl[0])/ resolution)
                    # height = int((nl[3] - nl[2]) / resolution) 
                    
                    # while (width * height) > 1000000000:
                    #     resolution = resolution + 0.5
                    #     log_time('warning',
                    #              'maximum download support is 1000000000')
                    #     log_time('Waring',
                    #              f'Lowering resolution to {resolution}')
                    #     width = int((nl[1] - nl[0])/ resolution)
                    #     height = int((nl[3] - nl[2]) / resolution)                        
                        
                    # pathname = os.path.join(temp_dir, 
                    #                         meta_path.replace(".json",
                    #                                           ".tif"))
                    # download_lizard_raster(uuid, 
                    #                        "EPSG:28992",
                    #                        bounds=bounds, 
                    #                        width=width,
                    #                        height=height,
                    #                        pathname=pathname)

        except ValueError as e:
            raster["error"] = "Does this store exist? {} {}".format(raster["slug"], e)
            raster_failures.append(raster)

        except StoreNotFound as e:
            raster["error"] = "Does this store exist in lizard? {} {}".format(
                raster["slug"], e
            )
            raster_failures.append(raster)

        else:
            raster_succes.append(raster)

        finally:
            if not meta_path_exists:
                json_dict["rasterstore"] = config
                json_dict["atlas"] = raster
                with open(meta_path, "w") as outfile:
                    json.dump(json_dict, outfile)

    return raster_succes, raster_failures
def extract_vectors(vectors, temp_dir, organisation, clip_geom, download=False, bound_error=False):

    extract_data_failures = []
    extract_data_succes = []
    bbox = clip_geom.GetEnvelope()
    wkt = POLYGON.format(x1=bbox[0], x2=bbox[1], y1=bbox[2], y2=bbox[3])
    bbox_geom = ogr.CreateGeometryFromWkt(wkt)

    log_time("info", "Set geoserver connections")
    gs_dict = set_geoserver_connections(vectors)

    log_time("info", "Extracting vector data")
    for vector in vectors:
        try:
            log_time("info", "Processing vector:", vector["name"])
            log_time("info", "Unique name:", vector["unique_name"])
            
            json_dict = {}

            subject = get_subject_from_name(vector["layername"], vector["workspace"])

            meta_path_exists = False
            meta_path = os.path.join(temp_dir, vector['unique_name'] + ".json")
            if os.path.exists(meta_path):
                log_time("info", "Meta file exists, skipping", subject)
                meta_path_exists = True
                continue


            if not wms_in_extent(vector, bbox_geom) or download:
                log_time(
                    "info",
                    "Wms layer bbox outside area, retrieving raw data"
                    " or download = True",
                )
                download_vector(
                    vector, temp_dir, meta_path.replace(".json", ".geojson"), *bbox,
                )

                retrieve_sld(vector, gs_dict, meta_path.replace(".json", ".sld"))

                # log_time("info",'Checking feature count outside geometry')
                # count = feature_count_outside_geometry(meta_path.replace(
                #                                                   ".json",
                #                                                   ".shp"),
                #                                                   clip_geom)
                #if count > 0:
                if bound_error:    
                    raise VectorOutsideArea(f"Outside atlas area")

        except DownloadFailure as e:
            vector["error"] = "Download failure, message:{}".format(e)
            extract_data_failures.append(vector)

        except MissingSLD as e:
            vector["error"] = "missing sld body layer not in geoserver, {}".format(e)
            extract_data_failures.append(vector)

        except VectorOutsideArea as e:
            vector["error"] = "Vector outside ara, message:{}".format(e)
            extract_data_failures.append(vector)

        except RuntimeError as e:
            vector["error"] = "Vector has extract error {}".format(e)
            extract_data_failures.append(vector)

        except AttributeError as e:
            vector["error"] = "missing sld body layer not in geoserver, {}".format(e)
            extract_data_failures.append(vector)

        except json.JSONDecodeError as e:
            vector["error"] = "Vector has json error{}".format(e)
            extract_data_failures.append(vector)

        else:
            vector["subject"] = subject
            extract_data_succes.append(vector)

        finally:
            if not meta_path_exists:
                json_dict["atlas"] = vector
                with open(meta_path, "w") as outfile:
                    json.dump(json_dict, outfile)

    return extract_data_succes, extract_data_failures