def main(): """ Import and convert DSG colormaps """ parser = argparse.ArgumentParser( description="Import and convert DSG colormaps") parser.add_argument("config_file", help="Enter path to the WebViz-4D configuration file") args = parser.parse_args() config_file = args.config_file config = common.read_config(config_file) settings_file = common.get_config_item(config, "settings") settings_file = common.get_full_path(settings_file) settings = common.read_config(settings_file) folder = settings["map_settings"]["colormaps_folder"] SUFFIX = ".clx" import_colormaps(folder, SUFFIX)
def main(): """ Display the tooltip info found in all well list (pickle) files found in a folder Parameters ---------- configuration_file : str The name of WebViz-4D configuration file Returns ------- """ description = "Check well list files" parser = argparse.ArgumentParser(description=description) parser.add_argument("config_file", help="Enter path to the WebViz-4D configuration file") args = parser.parse_args() print(description) print(args) config_file = args.config_file config = common.read_config(config_file) wellfolder = common.get_config_item(config, "wellfolder") wellfolder = common.get_full_path(wellfolder) print("Reading well lists in", wellfolder) pickle_files = glob.glob(wellfolder + "/*.pkl") for pickle_file in pickle_files: file_object = open(pickle_file, "rb") info = pickle.load(file_object) print(pickle_file) data = info["data"] if len(data) > 0: for item in data: print(item["tooltip"]) print("")
def main(): """ Create production data tables """ description = "Create production data tables" parser = argparse.ArgumentParser(description=description) parser.add_argument("config_file", help="Enter path to the WebViz-4D configuration file") args = parser.parse_args() print(description) print(args) config_file = args.config_file config_file = common.read_config(config_file) production_directory = common.get_config_item(config_file, "production_data") production_directory = common.get_full_path(production_directory) production_table_file = os.path.join(production_directory, "production_fluid_table.csv") injection_table_file = os.path.join(production_directory, "injection_fluid_table.csv") bore_oil_file = os.path.join(production_directory, "BORE_OIL_VOL.csv") bore_gas_file = os.path.join(production_directory, "BORE_GAS_VOL.csv") bore_water_file = os.path.join(production_directory, "BORE_WAT_VOL.csv") print("Loading oil volumes from file", bore_oil_file) bore_oil = pd.read_csv(bore_oil_file) print("Loading gas volumes from file", bore_gas_file) bore_gas = pd.read_csv(bore_gas_file) print("Loading water volumes from file", bore_water_file) bore_water = pd.read_csv(bore_water_file) with open(production_table_file, "w") as file_object: file_object.write("Well_name,4D_interval,Volumes,Fluid\n") fluid = "Oil_[Sm3]" write_data(bore_oil, fluid, 1, file_object) fluid = "Gas_[kSm3]" write_data(bore_gas, fluid, 1000, file_object) fluid = "Water_[Sm3]" write_data(bore_water, fluid, 1, file_object) print("Production volumes table stored to file", production_table_file) inject_gas_file = os.path.join(production_directory, "BORE_GI_VOL.csv") inject_water_file = os.path.join(production_directory, "BORE_WI_VOL.csv") print("Loading injected gas volumes from file", inject_gas_file) inject_gas = pd.read_csv(inject_gas_file) print("Loading injected water volumes from file", inject_water_file) inject_water = pd.read_csv(inject_water_file) with open(injection_table_file, "w") as file_object: file_object.write("Well_name,4D_interval,Volumes,Fluid\n") fluid = "Injected_Gas_[kSm3]" write_data(inject_gas, fluid, 1000, file_object) fluid = "Injected_Water_[Sm3]" write_data(inject_water, fluid, 1, file_object) print("Injection volumes table stored to file", injection_table_file)
def main(): # Main description = "Create well lists based on production data" parser = argparse.ArgumentParser(description=description) parser.add_argument("config_file", help="Enter path to the WebViz-4D configuration file") args = parser.parse_args() print(description) print(args) config_file = args.config_file config = common.read_config(config_file) # Well and production data well_suffix = common.get_config_item(config, "well_suffix") map_suffix = common.get_config_item(config, "map_suffix") delimiter = common.get_config_item(config, "delimiter") metadata_file = common.get_config_item(config, "surface_metadata") well_directory = common.get_config_item(config, "wellfolder") well_directory = common.get_full_path(well_directory) prod_info_dir = common.get_config_item(config, "production_data") prod_info_dir = common.get_full_path(prod_info_dir) update_metadata_file = os.path.join(prod_info_dir, ".production_update.yaml") update_dates = common.get_update_dates(well_directory) production_update = update_dates["production_last_date"] print("Production data update", production_update) try: settings_file = common.get_config_item(config, "settings") settings_file = common.get_full_path(settings_file) settings = common.read_config(settings_file) interval = common.get_config_item(config, "default_interval") except: settings_file = None settings = None interval = None shared_settings = config["shared_settings"] print("Extracting 4D intervals ...") metadata_file = common.get_config_item(config, "surface_metadata") metadata = get_metadata(shared_settings, delimiter, map_suffix, metadata_file) intervals_4d, incremental = get_all_intervals(metadata, "reverse") colors = common.get_well_colors(settings) prod_info_files = [os.path.join(prod_info_dir, OIL_PRODUCTION_FILE)] prod_info_files.append(os.path.join(prod_info_dir, GAS_INJECTION_FILE)) prod_info_files.append(os.path.join(prod_info_dir, WATER_INJECTION_FILE)) prod_info_list = [] for prod_info_file in prod_info_files: print("Reading production info from file " + str(prod_info_file)) prod_info = pd.read_csv(prod_info_file) prod_info.name = os.path.basename(str(prod_info_file)) prod_info_list.append(prod_info) drilled_well_df, drilled_well_info, interval_df = well.load_all_wells( well_directory, well_suffix) drilled_well_info = add_production_volumes(drilled_well_info, prod_info_list) #well_info = WellDataFrame(drilled_well_info) wellbores = drilled_well_info["wellbore.name"].unique() # print("well_info.data_frame") print("Last production update", production_update) print("Looping through all 4D intervals ...") for interval_4d in intervals_4d: print("4D interval:", interval_4d) if interval_4d[0:10] <= production_update: well_layer = make_new_well_layer( interval_4d, drilled_well_df, drilled_well_info, interval_df, prod_info_list, colors, selection="production", label="Producers", ) label = "production_well_layer_" store_well_layer(well_layer, well_directory, label, interval_4d) well_layer = make_new_well_layer( interval_4d, drilled_well_df, drilled_well_info, interval_df, prod_info_list, colors, selection="production_start", label="Producers - started", ) label = "production_start_well_layer_" store_well_layer(well_layer, well_directory, label, interval_4d) well_layer = make_new_well_layer( interval_4d, drilled_well_df, drilled_well_info, interval_df, prod_info_list, colors, selection="production_completed", label="Producers - completed", ) label = "production_completed_well_layer_" store_well_layer(well_layer, well_directory, label, interval_4d) well_layer = make_new_well_layer( interval_4d, drilled_well_df, drilled_well_info, interval_df, prod_info_list, colors, selection="injection", label="Injectors", ) label = "injection_well_layer_" store_well_layer(well_layer, well_directory, label, interval_4d) well_layer = make_new_well_layer( interval_4d, drilled_well_df, drilled_well_info, interval_df, prod_info_list, colors, selection="injection_start", label="Injectors - started", ) label = "injection_start_well_layer_" store_well_layer(well_layer, well_directory, label, interval_4d) well_layer = make_new_well_layer( interval_4d, drilled_well_df, drilled_well_info, interval_df, prod_info_list, colors, selection="injection_completed", label="Injectors - completed", ) label = "injection_completed_well_layer_" store_well_layer(well_layer, well_directory, label, interval_4d) else: print(" - no production data for this time interval") prod_headers = prod_info.columns last_header = prod_headers[-1] interval_4d = last_header well_layer = make_new_well_layer( interval_4d, drilled_well_df, drilled_well_info, interval_df, prod_info_list, colors, selection="active", label="Active wells", ) if well_layer: label = "active_well_layer_" store_well_layer(well_layer, well_directory, label, interval_4d)
def main(): """ Compile metadata from all wells and extract top reservoir depths """ description = "Compile metadata from all wells and extract top reservoir depths" parser = argparse.ArgumentParser(description=description) parser.add_argument("config_file", help="Enter path to the WebViz-4D configuration file") args = parser.parse_args() print(description) print(args) config_file = args.config_file config = common.read_config(config_file) try: well_directory = common.get_config_item(config, "wellfolder") well_directory = common.get_full_path(well_directory) except: well_directory = None print("ERROR: Well directory", well_directory, "not found") print("Execution stopped") print("Well directory", well_directory) if well_directory: try: settings_file = common.get_config_item(config, "settings") settings_file = common.get_full_path(settings_file) settings = common.read_config(settings_file) surface_file = settings["depth_maps"]["top_reservoir"] surface = load_surface(surface_file) except: surface_file = None surface = None else: print("ERROR: Well data not found in", well_directory) exit() print("Surface file", surface_file) WELLBORE_INFO_FILE = "wellbore_info.csv" INTERVALS_FILE = "intervals.csv" WELL_SUFFIX = ".w" wellbore_info, intervals = extract_metadata(well_directory) pd.set_option("display.max_rows", None) print(wellbore_info) wellbore_info = compile_data(surface, well_directory, wellbore_info, WELL_SUFFIX) wellbore_info.to_csv(os.path.join(well_directory, WELLBORE_INFO_FILE)) intervals.to_csv(os.path.join(well_directory, INTERVALS_FILE)) # print(intervals) print("Metadata stored to " + os.path.join(well_directory, WELLBORE_INFO_FILE)) print("Completion intervals stored to " + os.path.join(well_directory, INTERVALS_FILE)) planned_wells_dir = [ f.path for f in os.scandir(well_directory) if f.is_dir() ] for folder in planned_wells_dir: wellbore_info = pd.DataFrame() wellbore_info = compile_data(surface, folder, wellbore_info, WELL_SUFFIX) wellbore_info.to_csv(os.path.join(folder, WELLBORE_INFO_FILE)) print(wellbore_info) print("Metadata stored to " + os.path.join(folder, WELLBORE_INFO_FILE))
def main(): # Reek data print("Reek") config_file = "./examples/reek_4d.yaml" config = common.read_config(config_file) print(config_file) print(config) wellfolder = common.get_config_item(config, "wellfolder") print("wellfolder", wellfolder) settings_file = common.get_config_item(config, "settings_file") print("settings_file", settings_file) settings_file = common.get_full_path(settings_file) print("settings_file", settings_file) print("") # Johan Sverdrup (Eli/Tonje) print("Johan Sverdrup - synthetic 4D maps") config_file = "configurations/js_test_eli_v2.yaml" config = common.read_config(config_file) shared_settings = config["shared_settings"] print(config_file) print(config) map_suffix = common.get_config_item(config, "map_suffix") delimiter = common.get_config_item(config, "delimiter") metadata_file = common.get_config_item(config, "surface_metadata") metadata = get_metadata(shared_settings, map_suffix, delimiter, metadata_file) print(metadata) all_intervals, incremental_intervals = get_all_intervals(metadata, "reverse") print("incremental_intervals") print(incremental_intervals) print("all_intervals") print(all_intervals) print("") # Johan Sverdrup (Simulation model) print("Johan Sverdrup - simulation model") config_file = "configurations/js_test.yaml" config = common.read_config(config_file) shared_settings = config["shared_settings"] print(config_file) print(config) map_suffix = common.get_config_item(config, "map_suffix") delimiter = common.get_config_item(config, "delimiter") metadata_file = common.get_config_item(config, "surface_metadata") metadata = get_metadata(shared_settings, map_suffix, delimiter, metadata_file) print(metadata) all_intervals, incremental_intervals = get_all_intervals(metadata, "reverse") print("incremental_intervals") print(incremental_intervals) print("all_intervals") print(all_intervals) print("") # Grane print("Grane") config_file = "configurations/config_template.yaml" print(config_file) config = common.read_config(config_file) shared_settings = config["shared_settings"] map_suffix = common.get_config_item(config, "map_suffix") print("map_suffix", map_suffix) delimiter = common.get_config_item(config, "delimiter") print("delimiter", delimiter) metadata_file = common.get_config_item(config, "surface_metadata") print("metadata_file", metadata_file) metadata = get_metadata(shared_settings, map_suffix, delimiter, metadata_file) print(metadata) all_intervals, incremental_intervals = get_all_intervals(metadata, "normal") print("incremental_intervals") print(incremental_intervals) print("all_intervals") print(all_intervals)
def main(): # Main description = "Create a well overview file (.csv) with relevant metadata" parser = argparse.ArgumentParser(description=description) parser.add_argument("config_file", help="Enter path to the WebViz-4D configuration file") args = parser.parse_args() print(description) print(args) config_file = args.config_file config = common.read_config(config_file) # Well and production data well_suffix = common.get_config_item(config, "well_suffix") map_suffix = common.get_config_item(config, "map_suffix") delimiter = common.get_config_item(config, "delimiter") metadata_file = common.get_config_item(config, "surface_metadata") well_directory = common.get_config_item(config, "wellfolder") well_directory = common.get_full_path(well_directory) prod_info_dir = common.get_config_item(config, "production_data") prod_info_dir = common.get_full_path(prod_info_dir) update_metadata_file = os.path.join(prod_info_dir, ".production_update.yaml") prod_info_files = [os.path.join(prod_info_dir, OIL_PRODUCTION_FILE)] prod_info_files.append(os.path.join(prod_info_dir, GAS_INJECTION_FILE)) prod_info_files.append(os.path.join(prod_info_dir, WATER_INJECTION_FILE)) prod_info_list = [] for prod_info_file in prod_info_files: print("Reading production info from file " + str(prod_info_file)) prod_info = pd.read_csv(prod_info_file) prod_info.name = os.path.basename(str(prod_info_file)) prod_info_list.append(prod_info) _drilled_well_df, drilled_well_info, interval_df = well.load_all_wells( well_directory, well_suffix) print(interval_df) drilled_well_info = add_production_volumes(drilled_well_info, prod_info_list) wellbore_overview = drilled_well_info[[ "wellbore.name", "wellbore.well_name", "BORE_OIL_VOL.csv_PDM well name", "wellbore.drilling_end_date", "wellbore.type", "wellbore.fluids", "BORE_OIL_VOL.csv_Start date", "BORE_OIL_VOL.csv_Stop date", "BORE_GI_VOL.csv_Start date", "BORE_GI_VOL.csv_Stop date", "BORE_WI_VOL.csv_Start date", "BORE_WI_VOL.csv_Stop date", ]] print(wellbore_overview) wellbores = wellbore_overview["wellbore.name"].unique() top_completion = [] end_completion = [] for wellbore in wellbores: try: top_md = interval_df[interval_df["interval.wellbore"] == wellbore]["interval.mdTop"].values[0] except: top_md = None top_completion.append(top_md) try: base_md = interval_df[interval_df["interval.wellbore"] == wellbore]["interval.mdBottom"].values[-1] except: base_md = None print(wellbore, top_md, base_md) end_completion.append(base_md) wellbore_overview.insert(6, "Top Screen", top_completion) wellbore_overview.insert(7, "Base Screen", end_completion) wellbore_overview.rename(columns={ "wellbore.name": "Wellbore", "wellbore.well_name": "Well", "wellbore.drilling_end_date": "Drilling ended", "wellbore.type": "Type", "wellbore.fluids": "Fluid(s)", "BORE_OIL_VOL.csv_PDM well name": "PDM Well", "BORE_OIL_VOL.csv_Start date": "Start oil prod.", "BORE_OIL_VOL.csv_Stop date": "End oil prod.", "BORE_GI_VOL.csv_Start date": "Start gas inj.", "BORE_GI_VOL.csv_Stop date": "End gas inj.", "BORE_WI_VOL.csv_Start date": "Start water inj.", "BORE_WI_VOL.csv_Stop date": "End water inj.", }, inplace=True) wellbore_overview.sort_values("Wellbore", inplace=True) print(wellbore_overview) csv_file = os.path.join(well_directory, "wellbore_overview.csv") wellbore_overview.to_csv(csv_file, index=False, float_format='%.1f') print("Wellbore overview saved to:", csv_file)
def main(): """ Extract min-/max-values for all maps """ parser = argparse.ArgumentParser( description="Extract min-/max-values for all maps") parser.add_argument("config_file", help="Enter path to the WebViz-4D configuration file") parser.add_argument( "--mode", help= "Full=> all maps, Standard (default)=> only one realization and iteration", default="Standard", ) args = parser.parse_args() config_file = args.config_file mode = args.mode config = common.read_config(config_file) shared_settings = config["shared_settings"] map_suffix = common.get_config_item(config, "map_suffix") delimiter = common.get_config_item(config, "delimiter") metadata_file = common.get_config_item(config, "surface_metadata") settings_file = common.get_config_item(config, "settings") settings_file = common.get_full_path(settings_file) settings = common.read_config(settings_file) csv_file = settings["map_settings"]["colormaps_settings"] csv_file = common.get_full_path(csv_file) print(csv_file) if csv_file is not None and os.path.isfile(csv_file): old_map_df = pd.read_csv(csv_file) print(" - file loaded") print(old_map_df) else: old_map_df = None csv_file = settings["map_settings"]["colormaps_settings"] surface_metadata = _metadata.get_metadata(shared_settings, map_suffix, delimiter, metadata_file) print("surface_metadata") print(surface_metadata) surface_types = ["observations", "results"] mapping_dict = {"observations": "observed", "results": "simulated"} results_map_dir = mapping_dict["results"] + "_maps" if results_map_dir is not None: map_settings = shared_settings[results_map_dir] realization_names = map_settings["realization_names"] iteration_names = map_settings["ensemble_names"] selected_realization = realization_names[0].replace("*", "0") selected_iteration = iteration_names[0].replace("*", "0") map_types = [] surface_names = [] attributes = [] intervals = [] map_files = [] min_values = [] max_values = [] lower_limits = [] upper_limits = [] headers = [ "map type", "name", "attribute", "interval", "minimum value", "maximum value", "lower_limit", "upper_limit", "file_path", ] map_df = pd.DataFrame() surface_files = surface_metadata["filename"] surface_files = surface_files.replace("/.", "/").replace(".yaml", "") for _index, row in surface_metadata.iterrows(): #print(row) map_type = row["map_type"] surface_name = row["data.name"] attribute = row["data.content"] interval = (row["data.time.t2"].replace("-", "") + "_" + row["data.time.t1"].replace("-", "")) surface_file = row["filename"] surface_file = surface_file.replace("/.", "/").replace(".yaml", "") # print(surface_file) print(map_type, surface_name, attribute, interval) if not mode == "Full": realization = row["fmu_id.realization"] iteration = row["fmu_id.ensemble"] # print(realization, iteration) if map_type == "results": if (realization == selected_realization and iteration == selected_iteration): surface = load_surface(surface_file) map_types.append(map_type) surface_names.append(surface_name) attributes.append(attribute) intervals.append(interval) zvalues = get_surface_arr(surface)[2] min_val = np.nanmin(zvalues) max_val = np.nanmax(zvalues) min_values.append(min_val) max_values.append(max_val) map_files.append(surface_file) lower_limit, upper_limit = get_plot_limits( old_map_df, map_type, surface_name, attribute, interval) lower_limits.append(lower_limit) upper_limits.append(upper_limit) else: surface = load_surface(surface_file) map_types.append(map_type) surface_names.append(surface_name) attributes.append(attribute) intervals.append(interval) zvalues = get_surface_arr(surface)[2] min_val = np.nanmin(zvalues) max_val = np.nanmax(zvalues) min_values.append(min_val) max_values.append(max_val) map_files.append(surface_file) lower_limit, upper_limit = get_plot_limits( old_map_df, map_type, surface_name, attribute, interval) lower_limits.append(lower_limit) upper_limits.append(upper_limit) else: surface = load_surface(surface_file) map_types.append(map_type) surface_names.append(surface_name) attributes.append(attribute) intervals.append(interval) zvalues = get_surface_arr(surface)[2] min_val = np.nanmin(zvalues) max_val = np.nanmax(zvalues) min_values.append(min_val) max_values.append(max_val) map_files.append(surface_file) lower_limit, upper_limit = get_plot_limits(old_map_df, map_type, surface_name, attribute, interval) lower_limits.append(lower_limit) upper_limits.append(upper_limit) map_df[headers[0]] = map_types map_df[headers[1]] = surface_names map_df[headers[2]] = attributes map_df[headers[3]] = intervals map_df[headers[4]] = min_values map_df[headers[5]] = max_values map_df[headers[6]] = lower_limits map_df[headers[7]] = upper_limits map_df[headers[8]] = map_files print(map_df) map_df.to_csv(csv_file, index=False) print("Data saved to ", csv_file)
def __init__( self, app, wellfolder: Path = None, production_data: Path = None, map1_defaults: dict = None, well_suffix: str = ".w", map_suffix: str = ".gri", default_interval: str = None, settings: Path = None, delimiter: str = "--", surface_metadata: str = "surface_metadata.csv", ): super().__init__() self.shared_settings = app.webviz_settings["shared_settings"] self.fmu_directory = self.shared_settings["fmu_directory"] self.map_suffix = map_suffix self.delimiter = delimiter self.wellfolder = wellfolder self.observations = "observations" self.simulations = "results" self.config = None self.attribute_settings = {} self.surface_metadata = None self.well_base_layers = None #print("default_interval", default_interval) self.fmu_info = self.fmu_directory self.well_update = "" self.production_update = "" self.number_of_maps = 1 self.metadata = get_metadata( self.shared_settings, map_suffix, delimiter, surface_metadata ) #print("Maps metadata") #print(self.metadata) self.intervals, incremental = get_all_intervals(self.metadata, "reverse") #print(self.intervals) if default_interval is None: default_interval = self.intervals[-1] self.surface_layer = None if settings: self.configuration = settings self.config = read_config(self.configuration) # print(self.config) try: self.attribute_settings = self.config["map_settings"][ "attribute_settings" ] except: pass try: colormaps_folder = self.config["map_settings"]["colormaps_folder"] if colormaps_folder: colormaps_folder = get_full_path(colormaps_folder) print("Reading custom colormaps from:", colormaps_folder) load_custom_colormaps(colormaps_folder) except: pass try: attribute_maps_file = self.config["map_settings"]["colormaps_settings"] attribute_maps_file = get_full_path(attribute_maps_file) self.surface_metadata = pd.read_csv(attribute_maps_file) print("Colormaps settings loaded from file", attribute_maps_file) #print(self.surface_metadata) except: pass self.map_defaults = [] if map1_defaults is not None: map1_defaults["interval"] = default_interval self.map_defaults.append(map1_defaults) print("Default interval", default_interval) #print("Map 1 defaults:") #print(map1_defaults) if map1_defaults is None: self.map_defaults = create_map_defaults( self.metadata, default_interval, self.observations, self.simulations ) else: self.map_defaults = [] self.map_defaults.append(map1_defaults) #print("map_defaults", self.map_defaults) self.selected_interval = default_interval self.selected_name = None self.selected_attribute = None self.selected_ensemble = None self.selected_realization = None self.wellsuffix = ".w" self.well_base_layers = [] self.colors = get_well_colors(self.config) if wellfolder and os.path.isdir(wellfolder): self.wellfolder = wellfolder update_dates = get_update_dates(wellfolder) self.well_update = update_dates["well_update_date"] self.production_update = update_dates["production_last_date"] ( self.drilled_well_df, self.drilled_well_info, self.interval_df, ) = load_all_wells(wellfolder, self.wellsuffix) if self.drilled_well_df is not None: self.well_base_layers.append( make_new_well_layer( self.selected_interval, self.drilled_well_df, self.drilled_well_info, ) ) self.well_base_layers.append( make_new_well_layer( self.selected_interval, self.drilled_well_df, self.drilled_well_info, colors=self.colors, selection="reservoir_section", label="Reservoir sections", ) ) planned_wells_dir = [f.path for f in os.scandir(wellfolder) if f.is_dir()] for folder in planned_wells_dir: planned_well_df, planned_well_info, dummy_df = load_all_wells( folder, self.wellsuffix ) if planned_well_df is not None: self.well_base_layers.append( make_new_well_layer( self.selected_interval, planned_well_df, planned_well_info, self.colors, selection="planned", label=os.path.basename(folder), ) ) elif wellfolder and not os.path.isdir(wellfolder): print("ERROR: Folder", wellfolder, "doesn't exist. No wells loaded") self.selector = SurfaceSelector( app, self.metadata, self.intervals, self.map_defaults[0] ) self.set_callbacks(app)