def batch_upload(inifile):
    """ Returns batch upload shapes for one geoserver """
    setting = settings_object(inifile)
    setting.set_project()
    setting.count_uuid_sections()

    # set logging
    set_log_config(setting.ini_location)
    sys.stdout = logger(setting.ini_location)

    in_paths = glob(setting.directory + "/*.tif")
    in_paths = in_paths + glob(setting.directory + "/*.vrt")
    in_paths = in_paths + glob(setting.directory + "/*.asc")
    in_paths = in_paths + glob(setting.directory + "/*.json")

    present_paths = []
    for path in in_paths:
        file_name = get_file_name_from_path(path)
        if file_name.split(".")[1] == "json":
            present_paths.append(path)
        if file_name in setting.config.sections():
            present_paths.append(path)

    if len(setting.uuid_list) > 0:
        [present_paths.append(('uuid', uuid)) for uuid in setting.uuid_list]

    setting.store = rasterstore()
    print_list(present_paths, "Paths")

    failures = {}
    succes = {}
    for count, path in enumerate(present_paths):
        log_time("info", percentage(count, len(present_paths)), path, "l")
        # try:
        setting.in_path = path
        setting = add_output_settings(setting)

        if not setting.skip:
            print_dictionary(setting.__dict__, "Settings")

            succes[setting.onderwerp] = upload(setting)

        # except Exception as e:
        #     print(e)
        #     failures[setting.onderwerp] = e

    # log_time("info", "sleeping to decrease load on server....")
    # sleep(30)

    print_dictionary(succes, "Succes")
    print_dictionary(failures, "Failures")
Example #2
0
def extract_external(externals, temp_dir):

    extract_data_failures = []
    extract_data_succes = []

    log_time("info", "Extracting externals metadata")
    for extern in tqdm(externals):
        try:
            log_time("info", "Processing external wms:", extern["name"])
            json_dict = {}

            meta_path = os.path.join(temp_dir, extern["unique_name"] + ".json")

            if os.path.exists(meta_path):
                log_time("info", "Meta file exists, skipping", extern["name"])
                continue

        except Exception as e:
            extern["error"] = f"General error: {e}"
        else:
            extern["subject"] = extern["name"]
            extract_data_succes.append(extern)

        finally:
            json_dict["atlas"] = extern
            with open(meta_path, "w") as outfile:
                json.dump(json_dict, outfile)

    return extract_data_succes, extract_data_failures
Example #3
0
def create_atlasstores(
    raster_dir,
    clip_list,
    setting,
    use_nens=False,
    overwrite=False,
):

    raster_failures = []
    raster_succes = []

    # Start raster changes

    for meta_path in glob(raster_dir + "/*.json"):
        raster = json.load(open(meta_path))

        log_time("info", "Processing rasterstore:", raster["atlas"]["name"])

        store = rasterstore()

        # copy rasterstore
        config = raster["rasterstore"]

        # add clip top copied rasterstore
        geoblock = uuid_store(config["uuid"])
        graph = geoblock_clip(geoblock, clip_list)
        config["source"] = {"graph": graph, "name": "endpoint"}

        # acces modifier
        config["access_modifier"] = 0

        # name
        config["name"] = raster["atlas"]["name"]

        # supplier
        config["supplier"] = setting.eigen_naam

        # Description
        config["description"] = strip_information(
            raster["atlas"]["information"])

        # observation type
        if "observation_type" in config and config["observation_type"]:
            if "code" in config["observation_type"]:
                code = config["observation_type"]["code"]
        else:
            code = "Waterdepth"

        config["observation_type"] = code

        # organisation
        if use_nens:
            config["organisation"] = "61f5a464-c350-44c1-9bc7-d4b42d7f58cb"
        else:
            config["organisation"] = setting.organisatie_uuid

        # slug search name
        slug_name = config["name"].replace(" ", "-").lower()
        slug = "{}:{}".format(setting.organisatie, slug_name)

        # format slug to 64 characters
        slug = slug[:64]

        # styles
        config = delete_excess_raster_info(config)

        # add datasets
        config["datasets"] = [setting.dataset]

        # add slug for search
        config["slug"] = slug

        # create stores
        try:
            store.create(config, overwrite=overwrite)

        except Exception as e:
            raster["error"] = e
            raster_failures.append(raster)
        else:
            raster_succes.append(raster)

    return raster_succes, raster_failures
Example #4
0
def create_wmslayers(upload_dir, setting, bounds, use_nens=False):

    wmslayer_failures = []
    wmslayer_succes = []

    for meta_path in glob(upload_dir + "/*.json"):
        try:

            wmslayer = wmslayers()
            meta_data = json.load(open(meta_path))

            name = meta_data["atlas"]["name"]

            log_time("info", f"Creating wms layer for {name}")

            wms_info, result_exists = wmslayer.get_layer(name)

            if result_exists:
                wmslayer.delete(wms_info["uuid"])

            if use_nens:
                setting.organisatie_uuid = wmslayer.get_nens_id()

            # set description
            if "information" in meta_data["atlas"]:
                wmslayer_description = strip_information(
                    meta_data["atlas"]["information"])
            else:
                wmslayer_description = ""

            # set name of layer
            if "name" in meta_data["atlas"]:
                name = meta_data["atlas"]["name"][:80]
            else:
                name = name[:80]

            if "extract_error" in meta_data:
                if "coverage" in meta_data["extract_error"]:
                    error = True
                elif "sld" in meta_data["extract_error"]:
                    error = True
                else:
                    error = False
            else:
                error = False

            if error:
                continue

            slug = meta_data["atlas"]["slug"].lower()
            url = meta_data["atlas"]["url"]

            # download link
            download_url = "{}?&request=GetFeature&typeName={}&OutputFormat=application/json".format(
                url.replace("wms", "wfs"), slug)

            # legend link
            legend_link = (
                "{}?REQUEST=GetLegendGraphic&VERSION=1.0.0&"
                "FORMAT=image/png&LAYER={}&LEGEND_OPTIONS="
                "forceRule:True;dx:0.2;dy:0.2;mx:0.2;my:0.2;"
                "fontName:Times%20New%20Roman;borderColor:#429A95;"
                "border:true;fontColor:#15EBB3;fontSize:18;dpi:180").format(
                    url, slug)
            bounding_box = {
                "south": bounds[2],
                "west": bounds[0],
                "north": bounds[3],
                "east": bounds[1],
            }

            configuration = {
                "name": name,
                "description": wmslayer_description,
                "slug": slug,
                "tiled": True,
                "wms_url": url,
                "access_modifier": 0,
                "supplier": setting.eigen_naam,
                "options": {
                    "transparent": "true"
                },
                "shared_with": [],
                "datasets": [setting.dataset],
                "organisation": setting.organisatie_uuid,
                "download_url": download_url,
                "spatial_bounds": bounding_box,
                "legend_url": legend_link,
                "get_feature_info_url": url,
                "get_feature_info": True,
            }

            meta_data["wmslayer"] = wmslayer.create(configuration,
                                                    overwrite=True)

        except Exception as e:
            print(e)
            meta_data["error"] = e
            wmslayer_failures.append(meta_data)

        else:
            wmslayer_succes.append(meta_data)

    return wmslayer_succes, wmslayer_failures
Example #5
0
def upload_ready_vectors(upload_dir,
                         clip_geom,
                         organisation,
                         dataset,
                         epsg=3857):

    upload_ready_succes = []
    upload_ready_failures = []

    log_time("info", "upload ready vectors")
    for meta_path in tqdm(glob("extract_vector" + "\*.json")):

        vector_path = meta_path.replace(".json", ".shp")
        meta_data = json.load(open(meta_path))

        if "error" in meta_data["atlas"]:
            log_time("info",
                     "Skipped {} due to error message".format(meta_path))
            continue

        vector_name = get_subject_from_name(
            os.path.basename(vector_path).split(".")[0], organisation)

        vector_length = 62
        vector_name_new = vector_name[:vector_length]
        meta_data["vector_name"] = vector_name_new

        feature_store = True
        sld_store = True

        try:
            if "coverage" in meta_data["extract_error"]:
                feature_store = False
                sld_store = False
            elif "sld" in meta_data["extract_error"]:
                sld_store = False
            else:
                pass

            log_time("info", "incomplete data", meta_data["extract_error"])

        except Exception:
            pass

        # if feature_store and os.path.exists(vector_path):
        #     fs_vector = vector_wrap(os.path.join(os.getcwd(), vector_path))
        #     fs_vector.correct(fs_vector.layer, epsg=epsg)
        #     fs_vector.clip(fs_vector.layer, clip_geom)
        #     output_file = os.path.join(upload_dir, vector_name_new + ".shp")
        #     fs_vector.write(
        #         output_file, fs_vector.layer, layer_name=vector_name_new
        #     )

        # copy sld
        if sld_store:
            sld_in = meta_path.replace(".json", ".sld")
            sld_out = os.path.join(upload_dir, vector_name_new + ".sld")
            #copyfile(sld_in, sld_out)

        meta_out = os.path.join(upload_dir, vector_name_new + ".json")
        with open(meta_out, "w") as out_file:
            json.dump(meta_data, out_file)

    return upload_ready_succes, upload_ready_failures
Example #6
0
def summary(
    vectors,
    vector_extract_failures,
    wmslayer_failures,
    ready_failure,
    rasters,
    raster_extract_failures,
    rasterstore_failures,
    externals,
    externals_failures,
):
    log_time("info", "summary",
             "Found vectors in atlas {}".format(len(vectors)))
    for vector in vectors:
        print("\t" + vector["name"])

    log_time(
        "info",
        "summary",
        "Extract vector failures {}".format(len(vector_extract_failures)),
    )
    for vector in vector_extract_failures:
        print("\t{}:{}".format(vector["name"], vector["error"]))

    log_time("info", "summary",
             "Upload-ready vector failures {}".format(len(ready_failure)))
    for vector in ready_failure:
        print("\t{}:{}".format(vector["name"], vector["error"]))

    log_time("info", "summary",
             "Upload vector failures {}".format(len(wmslayer_failures)))
    for vector in wmslayer_failures:
        print("\t{}:{}".format(vector["name"], vector["error"]))

    log_time("info", "summary",
             "Found rasters in atlas {}".format(len(rasters)))
    for raster in rasters:
        print("\t" + raster["name"])

    log_time(
        "info",
        "summary",
        "Raster extract failures {}".format(len(raster_extract_failures)),
    )
    for raster in raster_extract_failures:
        print("\t{}:{}".format(raster["atlas"]["name"], raster["error"]))

    log_time(
        "info",
        "summary",
        "Upload to rasterstore failures {}".format(len(rasterstore_failures)),
    )
    for raster in rasterstore_failures:
        print("\t{}:{}".format(raster["rasterstore"]["name"], raster["error"]))

    log_time("info", "summary",
             "External wmslayers in atlas {}".format(len(externals)))
    for external in externals_failures:
        print("\t" + external["atlas"]["name"])
def upload(setting):
    log_time("info", setting.layer_name, "0. starting.....")

    if isinstance(setting.in_datasource, dict):
        _clear_connections_database(setting.in_datasource)

    if not (setting.skip_gs_upload and setting.skip_pg_upload):
        vector = wrap_shape(setting.in_datasource, setting.in_layer)

    if not setting.skip_correction:
        log_time("info", setting.layer_name, "1. vector corrections")
        vector.correct(
            vector.layer, setting.layer_name, setting.epsg
        )
        setting.layer_name = vector.layer_name

    if not setting.skip_mask:
        log_time("info", setting.layer_name, "1.2 vector corrections - mask")
        vector_geom = vector_to_geom(setting.mask_path, setting.epsg)
        vector.clip(vector.layer, vector_geom)

    if (not setting.skip_mask) or (not setting.skip_correction):
        if vector.ds[0].GetFeatureCount() == 0:
            log_time("error", setting.layer_name, "vector feature count is 0")
        if vector.ds == None:
            log_time("error", setting.layer_name, "Datasource is none")
        if vector.ds[0][0] == None:
            log_time("error", setting.layer_name, "Feature is none")

    if not setting.skip_delete_excess_field_names:
        vector = wrap_shape(vector.ds)
        field_names = vector.get_all_field_names()
        field_names_lowered = [f.lower() for f in field_names]

        sld = wrap_sld(setting.in_sld_path, _type="path")
        sld.lower_all_property_names()
        sld_fields = sld.get_all_property_names()
    
        if len(sld_fields) > 0:
            for idx, field_name in enumerate(field_names_lowered):
                if field_name not in sld_fields:
                    vector.delete_fields([field_names[idx]])
                    continue
                else:
                    log_time("info", f"Keeping '{field_name}' field in vector")

    if not setting.skip_pg_upload:
        log_time("info", setting.layer_name, "2. Upload shape to pg database.")

        pg_details = PG_DATABASE[setting.server_naam]
        _clear_connections_database(pg_details)

        if setting.database_name is not None:
            pg_details["database"] = setting.database_name

        pg_database = wrap_shape(pg_details)
        if not setting.product_naam == "flooding" and setting.set_metadata:
            add_metadata_pgdatabase(setting, pg_database)

        schema_layers = [layer.split(".")[-1] for layer in pg_database.layers]
        pg_layer_present = setting.layer_name in schema_layers

        if not pg_layer_present or setting.overwrite_postgres:
            copy2pg_database(
                pg_database.ds,
                vector.ds,
                vector.layer,
                setting.layer_name,
                setting.schema_name,
            )

        else:
            log_time("info", setting.layer_name, "Layer already in database.")

        pg_database.get_layer(setting.layer_name.lower())
        pg_database.lower_all_field_names()

    if not setting.skip_gs_upload:
        log_time("info", setting.layer_name, "3. Create workspace.")
        server = setting.server
        server.create_workspace(setting.workspace_name)

        log_time("info", setting.layer_name, "4. Create store.")
        pg_details = PG_DATABASE[setting.server_naam]
        server.create_postgis_datastore(
            setting.store_name, setting.workspace_name, pg_details
        )

        log_time("info", setting.layer_name, "5. Publish featuretype.")
        server.publish_layer(
            setting.layer_name,
            setting.workspace_name,
            setting.overwrite_feature,
            setting.epsg,
            reload=True,
        )

        if setting.use_existing_geoserver_sld:
            log_time("info", setting.layer_name, "6-9. Setting existing sld.")
            server.set_sld_for_layer(
                workspace_name=None, style_name=setting.existing_sld, use_custom=True
            )
        else:
            log_time("info", setting.layer_name, "6. Load Style Layer Descriptor.")

            sld = wrap_sld(setting.in_sld_path, _type="path")
            if not setting.skip_sld_check:
                log_time("info", setting.layer_name, "7. Check sld.")

                # lower all and cut field names to esri shape standards
                sld.lower_all_property_names()
                sld.cut_len_all_property_names(_len=10)

            log_time("info", setting.layer_name, "8. Upload sld.")
            style_name = setting.layer_name + "_style"
            server.upload_sld(
                style_name, setting.workspace_name, sld.get_xml(), setting.overwrite_sld
            )

            log_time("info", "9. Connect sld to layer.")
            server.set_sld_for_layer()

        log_time("info", setting.layer_name, "10. Add to abstract.")
        if setting.overwrite_abstract:
            server.write_abstract(setting.abstract_data)

        log_time("info", setting.layer_name, "11. Add to title.")
        if setting.overwrite_title:
            server.write_title(setting.title_data)

    if not setting.skip_lizard_wms_layer:
        log_time("info", setting.layer_name, "12. Add wms layer.")
        if not setting.skip_gs_upload:
            gs_wms_server = setting.server
            gs_wms_server.get_layer(setting.slug)
            setting.wmslayer.configuration["wms_url"] = setting.wms_url
            download_url = setting.wmslayer.get_download_url(
                setting.wms_url, setting.slug
            )
            setting.wmslayer.configuration["download_url"] = download_url
            setting.wmslayer.configuration["slug"] = setting.slug

        else:
            gs_wms_server = wrap_geoserver(setting.wmsserver)
            gs_wms_server.get_layer(setting.wmsslug)

        latlon_bbox = gs_wms_server.layer_latlon_bbox
        setting.wmslayer.configuration["spatial_bounds"] = {
            "south": latlon_bbox[2],
            "west": latlon_bbox[0],
            "north": latlon_bbox[3],
            "east": latlon_bbox[1],
        }

        setting.wmslayer.create(setting.wmslayer.configuration, overwrite=True)

    log_time("info", setting.layer_name, "13. Returning wms, slug")
    return setting.wms_url, setting.slug
def batch_upload(inifile):
    """ Returns batch upload shapes for one geoserver """
    setting = settings_object(inifile)

    # set logging
    set_log_config(setting.ini_location)
    sys.stdout = logger(setting.ini_location)

    # get vectors
    if setting.use_directory:
        setting.set_directory()
        in_paths, subjects = get_paths_and_subjects(setting, "directory")

    elif setting.use_postgis:
        setting.set_postgis()
        in_paths, subjects = get_paths_and_subjects(setting, "postgis")

    elif setting.use_directory and setting.use_postgis:
        in_paths, subjects = get_paths_and_subjects(setting, "directory")
        pg_in_paths, pg_subjects = get_paths_and_subjects(setting, "postgis")

        in_paths = in_paths + pg_in_paths
        subjects = subjects + pg_subjects

    else:
        print("use either use_postgis or use_batch")

    print_list(subjects, "Subjects")
    print_list(in_paths, "Paths")

    failures = {}
    succes = {}
    for count, (in_path, subject) in enumerate(zip(in_paths, subjects)):

        setting = add_output_settings(setting, subject, in_path)

        if not setting.skip:
            log_time("info", percentage(count, len(in_paths)), subject, "l")
            setting.server = wrap_geoserver(setting.server_naam)
            print_dictionary(setting.__dict__, "Layer settings")

            pg_details = PG_DATABASE[setting.server_naam]
            _clear_connections_database(pg_details)

            try:
                succes[setting.subject] = upload(setting)

            except Exception as e:
                print(e)
                failures[setting.subject] = e

            finally:
                _clear_connections_database(pg_details)

            log_time("info", "sleeping to decrease load on server....")
            sleep(2)
        else:
            log_time("info", "Skipping", subject, "l")

    print_dictionary(succes, "Succes")
    print_dictionary(failures, "Failures")
Example #9
0
def extract_atlas(atlas_name, wd, download, resolution=10):
    """ Returns batch upload shapes for one geoserver """

    os.chdir(wd)

    vector_dir = mk_dir("extract_vector",path=wd)
    raster_dir = mk_dir("extract_raster",path=wd)
    external_dir = mk_dir("extract_external",path=wd)

    data = get_atlas_data(atlas_name)
    unique_data = unique(data)
    vectors = unique_data["vector"]
    rasters = unique_data["raster"]
    externals = unique_data["external"]

    log_time("info", "Raster directory:", raster_dir)
    log_time("info", "Vector directory:", vector_dir)
    log_time("info", "Exteral wms directory:", external_dir)

    log_time("info", "Amount of vectors: {}".format(len(vectors)))
    log_time("info", "Amount of rasters: {}".format(len(rasters)))
    log_time("info", "Amount of external wms: {}".format(len(externals)))

    atlas = wrap_atlas(atlas_name)
    clip_geom = atlas.get_boundaring_polygon(atlas_name, "boundary", write = False)
    clip_geom_nl =  atlas.get_boundaring_polygon(atlas_name, "boundary", 
                                                 write=False, epsg=28992)

    # extract vector data from their respective sources
    extract_vector_succes, extract_vector_failures = extract_vectors(
        vectors, vector_dir, atlas_name, clip_geom, download
    )
    extract_raster_succes, extract_raster_failures = extract_rasters(
        rasters, raster_dir, atlas_name, False, resolution, clip_geom, clip_geom_nl
    )
    extract_ext_succes, exteract_ext_failures = extract_external(
        externals, external_dir
    )

    return (
        vectors,
        rasters,
        externals,
        exteract_ext_failures,
        extract_raster_failures,
        extract_vector_failures,
    )
Example #10
0
def extract_rasters(rasters, temp_dir, atlas_name, download=False, resolution=10, clip_geom=None, clip_geom_nl=None):

    raster_failures = []
    raster_succes = []

    store = rasterstore()

    # Start raster changes
    for raster in rasters:
        log_time("info", "Processing raster:", raster["name"])
        log_time("info", "Unique name:", raster["unique_name"])

        json_dict = {}
        subject = "_".join(raster["name"].lower().split(" "))

        meta_path_exists = False
        meta_path = os.path.join(temp_dir, raster['unique_name'] + ".json")
        if os.path.exists(meta_path):
            log_time("info", "Meta file exists, skipping", subject)
            meta_path_exists = True
            continue

        try:
            uuid = store.get_uuid_by_slug(raster["slug"])
            config = store.get_store(uuid)
            
            #if download:
                    # e = clip_geom.GetEnvelope()
                    # nl = clip_geom_nl.GetEnvelope()
                    # bounds = {"west": e[0],
                    #           "east": e[1],
                    #           "north": e[3],
                    #           "south": e[2]}
                    
                    
                    # width = int((nl[1] - nl[0])/ resolution)
                    # height = int((nl[3] - nl[2]) / resolution) 
                    
                    # while (width * height) > 1000000000:
                    #     resolution = resolution + 0.5
                    #     log_time('warning',
                    #              'maximum download support is 1000000000')
                    #     log_time('Waring',
                    #              f'Lowering resolution to {resolution}')
                    #     width = int((nl[1] - nl[0])/ resolution)
                    #     height = int((nl[3] - nl[2]) / resolution)                        
                        
                    # pathname = os.path.join(temp_dir, 
                    #                         meta_path.replace(".json",
                    #                                           ".tif"))
                    # download_lizard_raster(uuid, 
                    #                        "EPSG:28992",
                    #                        bounds=bounds, 
                    #                        width=width,
                    #                        height=height,
                    #                        pathname=pathname)

        except ValueError as e:
            raster["error"] = "Does this store exist? {} {}".format(raster["slug"], e)
            raster_failures.append(raster)

        except StoreNotFound as e:
            raster["error"] = "Does this store exist in lizard? {} {}".format(
                raster["slug"], e
            )
            raster_failures.append(raster)

        else:
            raster_succes.append(raster)

        finally:
            if not meta_path_exists:
                json_dict["rasterstore"] = config
                json_dict["atlas"] = raster
                with open(meta_path, "w") as outfile:
                    json.dump(json_dict, outfile)

    return raster_succes, raster_failures
Example #11
0
def extract_vectors(vectors, temp_dir, organisation, clip_geom, download=False, bound_error=False):

    extract_data_failures = []
    extract_data_succes = []
    bbox = clip_geom.GetEnvelope()
    wkt = POLYGON.format(x1=bbox[0], x2=bbox[1], y1=bbox[2], y2=bbox[3])
    bbox_geom = ogr.CreateGeometryFromWkt(wkt)

    log_time("info", "Set geoserver connections")
    gs_dict = set_geoserver_connections(vectors)

    log_time("info", "Extracting vector data")
    for vector in vectors:
        try:
            log_time("info", "Processing vector:", vector["name"])
            log_time("info", "Unique name:", vector["unique_name"])
            
            json_dict = {}

            subject = get_subject_from_name(vector["layername"], vector["workspace"])

            meta_path_exists = False
            meta_path = os.path.join(temp_dir, vector['unique_name'] + ".json")
            if os.path.exists(meta_path):
                log_time("info", "Meta file exists, skipping", subject)
                meta_path_exists = True
                continue


            if not wms_in_extent(vector, bbox_geom) or download:
                log_time(
                    "info",
                    "Wms layer bbox outside area, retrieving raw data"
                    " or download = True",
                )
                download_vector(
                    vector, temp_dir, meta_path.replace(".json", ".geojson"), *bbox,
                )

                retrieve_sld(vector, gs_dict, meta_path.replace(".json", ".sld"))

                # log_time("info",'Checking feature count outside geometry')
                # count = feature_count_outside_geometry(meta_path.replace(
                #                                                   ".json",
                #                                                   ".shp"),
                #                                                   clip_geom)
                #if count > 0:
                if bound_error:    
                    raise VectorOutsideArea(f"Outside atlas area")

        except DownloadFailure as e:
            vector["error"] = "Download failure, message:{}".format(e)
            extract_data_failures.append(vector)

        except MissingSLD as e:
            vector["error"] = "missing sld body layer not in geoserver, {}".format(e)
            extract_data_failures.append(vector)

        except VectorOutsideArea as e:
            vector["error"] = "Vector outside ara, message:{}".format(e)
            extract_data_failures.append(vector)

        except RuntimeError as e:
            vector["error"] = "Vector has extract error {}".format(e)
            extract_data_failures.append(vector)

        except AttributeError as e:
            vector["error"] = "missing sld body layer not in geoserver, {}".format(e)
            extract_data_failures.append(vector)

        except json.JSONDecodeError as e:
            vector["error"] = "Vector has json error{}".format(e)
            extract_data_failures.append(vector)

        else:
            vector["subject"] = subject
            extract_data_succes.append(vector)

        finally:
            if not meta_path_exists:
                json_dict["atlas"] = vector
                with open(meta_path, "w") as outfile:
                    json.dump(json_dict, outfile)

    return extract_data_succes, extract_data_failures