def test_save_kw(self): with TestAreaContext("python/ecl_file/save_kw"): data = range(1000) kw = EclKW("MY_KEY", len(data), EclDataType.ECL_INT) for index, val in enumerate(data): kw[index] = val clean_dump = "my_clean_file" fortio = FortIO(clean_dump, FortIO.WRITE_MODE) kw.fwrite(fortio) fortio.close() test_file = "my_dump_file" fortio = FortIO(test_file, FortIO.WRITE_MODE) kw.fwrite(fortio) fortio.close() self.assertFilesAreEqual(clean_dump, test_file) ecl_file = EclFile(test_file, flags=EclFileFlagEnum.ECL_FILE_WRITABLE) loaded_kw = ecl_file["MY_KEY"][0] self.assertTrue(kw.equal(loaded_kw)) ecl_file.save_kw(loaded_kw) ecl_file.close() self.assertFilesAreEqual(clean_dump, test_file) ecl_file = EclFile(test_file) loaded_kw = ecl_file["MY_KEY"][0] self.assertTrue(kw.equal(loaded_kw))
def test_fortio_creation(self): with TestAreaContext("python/fortio/create"): w = FortIO("test", FortIO.WRITE_MODE) rw = FortIO("test", FortIO.READ_AND_WRITE_MODE) r = FortIO("test", FortIO.READ_MODE) a = FortIO("test", FortIO.APPEND_MODE) w.close() w.close() # should not fail
def test_ecl_file_indexed_read(self): with TestAreaContext("ecl_file_indexed_read") as area: fortio = FortIO("ecl_file_index_test", mode=FortIO.WRITE_MODE) element_count = 100000 ecl_kw_1 = EclKW("TEST1", element_count, EclDataType.ECL_INT) ecl_kw_2 = EclKW("TEST2", element_count, EclDataType.ECL_INT) for index in range(element_count): ecl_kw_1[index] = index ecl_kw_2[index] = index + 3 ecl_kw_1.fwrite(fortio) ecl_kw_2.fwrite(fortio) fortio.close() ecl_file = EclFile("ecl_file_index_test") index_map = IntVector() index_map.append(2) index_map.append(3) index_map.append(5) index_map.append(7) index_map.append(11) index_map.append(13) index_map.append(313) index_map.append(1867) index_map.append(5227) index_map.append(7159) index_map.append(12689) index_map.append(18719) index_map.append(32321) index_map.append(37879) index_map.append(54167) index_map.append(77213) index_map.append(88843) index_map.append(99991) char_buffer_1 = ctypes.create_string_buffer( len(index_map) * ctypes.sizeof(ctypes.c_int)) char_buffer_2 = ctypes.create_string_buffer( len(index_map) * ctypes.sizeof(ctypes.c_int)) self._eclFileIndexedRead(ecl_file, "TEST2", 0, index_map, char_buffer_2) self._eclFileIndexedRead(ecl_file, "TEST1", 0, index_map, char_buffer_1) int_buffer_1 = ctypes.cast(char_buffer_1, ctypes.POINTER(ctypes.c_int)) int_buffer_2 = ctypes.cast(char_buffer_2, ctypes.POINTER(ctypes.c_int)) for index, index_map_value in enumerate(index_map): self.assertEqual(index_map_value, int_buffer_1[index]) self.assertEqual(index_map_value, int_buffer_2[index] - 3)
def test_fwrite(self): #work_area = TestArea("python/ecl_file/fwrite") with TestAreaContext("python/ecl_file/fwrite"): rst_file = EclFile(self.test_file) fortio = FortIO("ECLIPSE.UNRST", FortIO.WRITE_MODE) rst_file.fwrite(fortio) fortio.close() rst_file.close() self.assertFilesAreEqual("ECLIPSE.UNRST", self.test_file)
def test_fwrite( self ): #work_area = TestArea("python/ecl_file/fwrite") with TestAreaContext("python/ecl_file/fwrite"): rst_file = EclFile(self.test_file) fortio = FortIO("ECLIPSE.UNRST", FortIO.WRITE_MODE) rst_file.fwrite(fortio) fortio.close() rst_file.close() self.assertFilesAreEqual("ECLIPSE.UNRST", self.test_file)
def test_ecl_kw_indexed_read(self): with TestAreaContext("ecl_kw_indexed_read") as area: fortio = FortIO("index_test", mode=FortIO.WRITE_MODE) element_count = 100000 ecl_kw = EclKW("TEST", element_count, EclDataType.ECL_INT) for index in range(element_count): ecl_kw[index] = index ecl_kw.fwrite(fortio) fortio.close() fortio = FortIO("index_test", mode=FortIO.READ_MODE) new_ecl_kw = EclKW.fread(fortio) for index in range(element_count): self.assertEqual(new_ecl_kw[index], index) index_map = IntVector() index_map.append(2) index_map.append(3) index_map.append(5) index_map.append(7) index_map.append(11) index_map.append(13) index_map.append(313) index_map.append(1867) index_map.append(5227) index_map.append(7159) index_map.append(12689) index_map.append(18719) index_map.append(32321) index_map.append(37879) index_map.append(54167) index_map.append(77213) index_map.append(88843) index_map.append(99991) char_buffer = ctypes.create_string_buffer( len(index_map) * ctypes.sizeof(ctypes.c_int) ) self._freadIndexedData( fortio, 24, EclDataType.ECL_INT, element_count, index_map, char_buffer ) int_buffer = ctypes.cast(char_buffer, ctypes.POINTER(ctypes.c_int)) for index, index_map_value in enumerate(index_map): self.assertEqual(index_map_value, int_buffer[index])
def test_ecl_file_indexed_read(self): with TestAreaContext("ecl_file_indexed_read") as area: fortio = FortIO("ecl_file_index_test", mode=FortIO.WRITE_MODE) element_count = 100000 ecl_kw_1 = EclKW("TEST1", element_count, EclDataType.ECL_INT) ecl_kw_2 = EclKW("TEST2", element_count, EclDataType.ECL_INT) for index in range(element_count): ecl_kw_1[index] = index ecl_kw_2[index] = index + 3 ecl_kw_1.fwrite(fortio) ecl_kw_2.fwrite(fortio) fortio.close() ecl_file = EclFile("ecl_file_index_test") index_map = IntVector() index_map.append(2) index_map.append(3) index_map.append(5) index_map.append(7) index_map.append(11) index_map.append(13) index_map.append(313) index_map.append(1867) index_map.append(5227) index_map.append(7159) index_map.append(12689) index_map.append(18719) index_map.append(32321) index_map.append(37879) index_map.append(54167) index_map.append(77213) index_map.append(88843) index_map.append(99991) char_buffer_1 = ctypes.create_string_buffer(len(index_map) * ctypes.sizeof(ctypes.c_int)) char_buffer_2 = ctypes.create_string_buffer(len(index_map) * ctypes.sizeof(ctypes.c_int)) self._eclFileIndexedRead(ecl_file, "TEST2", 0, index_map, char_buffer_2) self._eclFileIndexedRead(ecl_file, "TEST1", 0, index_map, char_buffer_1) int_buffer_1 = ctypes.cast(char_buffer_1, ctypes.POINTER(ctypes.c_int)) int_buffer_2 = ctypes.cast(char_buffer_2, ctypes.POINTER(ctypes.c_int)) for index, index_map_value in enumerate(index_map): self.assertEqual(index_map_value, int_buffer_1[index]) self.assertEqual(index_map_value, int_buffer_2[index] - 3)
def test_ecl_kw_indexed_read(self): with TestAreaContext("ecl_kw_indexed_read") as area: fortio = FortIO("index_test", mode=FortIO.WRITE_MODE) element_count = 100000 ecl_kw = EclKW("TEST", element_count, EclDataType.ECL_INT) for index in range(element_count): ecl_kw[index] = index ecl_kw.fwrite(fortio) fortio.close() fortio = FortIO("index_test", mode=FortIO.READ_MODE) new_ecl_kw = EclKW.fread(fortio) for index in range(element_count): self.assertEqual(new_ecl_kw[index], index) index_map = IntVector() index_map.append(2) index_map.append(3) index_map.append(5) index_map.append(7) index_map.append(11) index_map.append(13) index_map.append(313) index_map.append(1867) index_map.append(5227) index_map.append(7159) index_map.append(12689) index_map.append(18719) index_map.append(32321) index_map.append(37879) index_map.append(54167) index_map.append(77213) index_map.append(88843) index_map.append(99991) char_buffer = ctypes.create_string_buffer(len(index_map) * ctypes.sizeof(ctypes.c_int)) self._freadIndexedData(fortio, 24, EclDataType.ECL_INT, element_count, index_map, char_buffer) int_buffer = ctypes.cast(char_buffer, ctypes.POINTER(ctypes.c_int)) for index, index_map_value in enumerate(index_map): self.assertEqual(index_map_value, int_buffer[index])
def test_kw_write(self): with TestAreaContext("python/ecl_kw/writing"): data = [random.random() for i in range(10000)] kw = EclKW("TEST", len(data), EclDataType.ECL_DOUBLE) i = 0 for d in data: kw[i] = d i += 1 pfx = 'EclKW(' self.assertEqual(pfx, repr(kw)[:len(pfx)]) fortio = FortIO("ECL_KW_TEST", FortIO.WRITE_MODE) kw.fwrite(fortio) fortio.close() fortio = FortIO("ECL_KW_TEST") kw2 = EclKW.fread(fortio) self.assertTrue(kw.equal(kw2)) ecl_file = EclFile("ECL_KW_TEST", flags=EclFileFlagEnum.ECL_FILE_WRITABLE) kw3 = ecl_file["TEST"][0] self.assertTrue(kw.equal(kw3)) ecl_file.save_kw(kw3) ecl_file.close() fortio = FortIO("ECL_KW_TEST", FortIO.READ_AND_WRITE_MODE) kw4 = EclKW.fread(fortio) self.assertTrue(kw.equal(kw4)) fortio.seek(0) kw4.fwrite(fortio) fortio.close() ecl_file = EclFile("ECL_KW_TEST") kw5 = ecl_file["TEST"][0] self.assertTrue(kw.equal(kw5))
def sector_to_fluxnum(args): """ Wrapper function that generates an ECL DATA file with single FLUXNUM based on user Region-of-Interest. This is the function that executes the different steps in the workflow for generating sector models in ECLIPSE. The sector models have the same resolution as the full-field model """ now = datetime.datetime.now() args.ECLIPSE_CASE = os.path.abspath(args.ECLIPSE_CASE).split(".")[0:1] if not args.ECLIPSE_CASE: raise Exception("ERROR: Case does not exist", " ") # Root name for writing to target directory eclipse_case_root = os.path.basename(args.ECLIPSE_CASE[0]) args.OUTPUT_CASE = args.OUTPUT_CASE.split(".")[0:1] if not args.OUTPUT_CASE: raise Exception("ERROR: Specify OUTPUT_NAME of final FLUX file", " ") if args.restart: args.restart = os.path.abspath(args.restart).split(".")[0:1] print("Reading grid ...") if args.egrid: args.egrid = os.path.abspath(args.egrid).split(".")[0:1] grid = EclGrid(f"{args.egrid[0]}.EGRID") else: grid = EclGrid(f"{args.ECLIPSE_CASE[0]}.EGRID") init = EclFile(f"{args.ECLIPSE_CASE[0]}.INIT") # Finding well completions completion_list, well_list = completions.get_completion_list( f"{args.ECLIPSE_CASE[0]}.DATA") # Check ROI arguments if (args.i is None or args.j is None or args.k is None) and args.fipnum is None: raise Exception("ERROR: Region of interest not set correctly!") if args.fipfile: fluxnum_new = flux_obj.FluxnumFipnum(grid, init, args.i, args.j, args.k, args.fipnum, args.fipfile) else: fluxnum_new = flux_obj.FluxnumFipnum(grid, init, args.i, args.j, args.k, args.fipnum) print("Generating FLUXNUM ...") if args.fluxfile: print("From input file ...") fluxnum_new.set_fluxnum_kw_from_file(args.fluxfile) else: fluxnum_new.set_fluxnum_kw() print("Checking completions ...", " ") # Checks for well completions in multiple FLUXNUM regions print("Including wells ...") fluxnum_new.include_well_completions(completion_list, well_list) # Second iteration to check for wells completed in multiple cells print("Including wells ...") fluxnum_new.include_well_completions(completion_list, well_list) print("Writing FLUXNUM file ...") fluxnum_new_kw = fluxnum_new.get_fluxnum_kw() fluxnum_filename = f"FLUXNUM_FIPNUM_{args.fipnum}_{now.microsecond:d}.grdecl" with cwrap.open(fluxnum_filename, "w") as file_handle: fluxnum_new_kw.write_grdecl(file_handle) print("Writing DUMPFLUX DATA-file ...") new_data_file = datafile_obj.Datafile(f"{args.ECLIPSE_CASE[0]}.DATA") if new_data_file.has_KW("DUMPFLUX") or new_data_file.has_KW("USEFLUX"): raise Exception( "ERROR: FLUX keywords already present in input ECL_CASE") new_data_file.create_DUMPFLUX_file(fluxnum_filename) if args.test: args.test = os.path.abspath(args.test).split(".")[0:1] if not os.path.isfile(f"{args.test[0]}.FLUX"): raise Exception("ERROR: FLUX file from DUMPFLUX run not created") # Needs the coordinates from the print("Generating new FLUX file...") grid_coarse = EclGrid(f"{args.test[0]}.EGRID") grid_fine = EclGrid(f"{args.test[0]}.EGRID") flux_fine = EclFile(f"{args.test[0]}.FLUX") else: print("Executing DUMPFLUX NOSIM run ...") if args.ecl_version: new_data_file.run_DUMPFLUX_nosim(args.ecl_version) else: new_data_file.run_DUMPFLUX_nosim() if not os.path.isfile(f"DUMPFLUX_{eclipse_case_root}.FLUX"): raise Exception("ERROR: FLUX file from DUMPFLUX run not created") # Needs the coordinates from the print("Generating new FLUX file...") grid_coarse = EclGrid(f"DUMPFLUX_{eclipse_case_root}.EGRID") grid_fine = EclGrid(f"DUMPFLUX_{eclipse_case_root}.EGRID") flux_fine = EclFile(f"DUMPFLUX_{eclipse_case_root}.FLUX") # Reads restart file if args.restart: rst_coarse = EclFile(f"{args.restart[0]}.UNRST") else: rst_coarse = EclFile(f"{args.ECLIPSE_CASE[0]}.UNRST") flux_object_fine = fluxfile_obj.Fluxfile(grid_fine, flux_fine) # Creating map f_c_map = fluxfile_obj.create_map_rst(flux_object_fine, grid_coarse, scale_i=1, scale_j=1, scale_k=1) # Importing elements # Open FortIO stream fortio = FortIO(f"{args.OUTPUT_CASE[0]}.FLUX", mode=FortIO.WRITE_MODE) fluxfile_obj.write_new_fluxfile_from_rst(flux_object_fine, grid_coarse, rst_coarse, f_c_map, fortio) # Close FortIO stream fortio.close() # Writing USEFLUX suggestion print("Writing suggestion for USEFLUX DATA-file ...") new_data_file.create_USEFLUX_file(fluxnum_filename, args.OUTPUT_CASE[0]) new_data_file.set_USEFLUX_header(args)