예제 #1
0
class EnkfFsManager(BaseCClass):
    TYPE_NAME = "enkf_fs_manager"

    _get_current_fs = ResPrototype("enkf_fs_obj enkf_main_get_fs_ref(enkf_fs_manager)")
    _switch_fs =      ResPrototype("void enkf_main_set_fs(enkf_fs_manager, enkf_fs, char*)")
    _fs_exists =      ResPrototype("bool enkf_main_fs_exists(enkf_fs_manager, char*)")
    _alloc_caselist = ResPrototype("stringlist_obj enkf_main_alloc_caselist(enkf_fs_manager)")
    _ensemble_size  = ResPrototype("int enkf_main_get_ensemble_size(enkf_fs_manager)")

    _is_initialized =                        ResPrototype("bool enkf_main_is_initialized(enkf_fs_manager, bool_vector)")
    _is_case_initialized =                   ResPrototype("bool enkf_main_case_is_initialized(enkf_fs_manager, char*, bool_vector)")
    _initialize_from_scratch =               ResPrototype("void enkf_main_initialize_from_scratch(enkf_fs_manager, stringlist, ert_run_context)")
    _initialize_case_from_existing =         ResPrototype("void enkf_main_init_case_from_existing(enkf_fs_manager, enkf_fs, int, enkf_fs)")
    _custom_initialize_from_existing =       ResPrototype("void enkf_main_init_current_case_from_existing_custom(enkf_fs_manager, enkf_fs, int, stringlist, bool_vector)")
    _initialize_current_case_from_existing = ResPrototype("void enkf_main_init_current_case_from_existing(enkf_fs_manager, enkf_fs, int)")

    _alloc_readonly_state_map = ResPrototype("state_map_obj enkf_main_alloc_readonly_state_map(enkf_fs_manager, char*)")
    _alloc_readonly_time_map =  ResPrototype("time_map_obj enkf_main_alloc_readonly_time_map(enkf_fs_manager, char*)")

    DEFAULT_CAPACITY = 5

    def __init__(self, enkf_main, capacity=DEFAULT_CAPACITY):
        """
        @type enkf_main: res.enkf.EnKFMain
        @type capacity: int
        """
        # enkf_main should be an EnKFMain, get the _RealEnKFMain object
        real_enkf_main = enkf_main.parent()

        super(EnkfFsManager, self).__init__(
            real_enkf_main.from_param(real_enkf_main).value ,
            parent=real_enkf_main ,
            is_reference=True)

        self._fs_rotator = FileSystemRotator(capacity)
        self._mount_root = real_enkf_main.getMountPoint()

        self._fs_type = real_enkf_main.getModelConfig().getFSType()
        self._fs_arg = None

    def __del__(self):
        # This object is a reference, so free() won't be called on it
        # Any clean-up must be done here
        self.umount()
        super(EnkfFsManager, self).__del__()

    def _createFullCaseName(self, mount_root, case_name):
        return os.path.join(mount_root, case_name)



    # The return value from the getFileSystem will be a weak reference to the
    # underlying enkf_fs object. That implies that the fs manager must be in
    # scope for the return value to be valid.
    def getFileSystem(self, case_name, mount_root=None):
        """
        @rtype: EnkfFs
        """
        if mount_root is None:
            mount_root = self._mount_root

        full_case_name = self._createFullCaseName(mount_root, case_name)

        if not full_case_name in self._fs_rotator:
            if not EnkfFs.exists(full_case_name):
                if self._fs_rotator.atCapacity():
                    self._fs_rotator.dropOldestFileSystem()

                EnkfFs.createFileSystem(full_case_name, self._fs_type, self._fs_arg)

            new_fs = EnkfFs(full_case_name)
            self._fs_rotator.addFileSystem(new_fs, full_case_name)

        fs = self._fs_rotator[full_case_name]

        return fs

    def isCaseRunning(self, case_name, mount_root=None):
        """ Returns true if case is mounted and write_count > 0
        @rtype: bool
        """
        if self.isCaseMounted(case_name, mount_root):
            case_fs = self.getFileSystem(case_name, mount_root)
            return case_fs.is_running()
        return False


    def caseExists(self, case_name):
        """ @rtype: bool """
        return case_name in self.getCaseList()


    def caseHasData(self, case_name):
        """ @rtype: bool """
        case_has_data = False
        state_map = self.getStateMapForCase(case_name)

        for state in state_map:
            if state == RealizationStateEnum.STATE_HAS_DATA:
                case_has_data = True

        return case_has_data


    def getCurrentFileSystem(self):
        """ Returns the currently selected file system
        @rtype: EnkfFs
        """
        current_fs = self._get_current_fs()
        case_name = current_fs.getCaseName()
        full_name = self._createFullCaseName(self._mount_root, case_name)

        if not full_name in self._fs_rotator:
            self._fs_rotator.addFileSystem(current_fs, full_name)

        return self.getFileSystem(case_name, self._mount_root)

    def umount(self):
        self._fs_rotator.umountAll()


    def getFileSystemCount(self):
        return len(self._fs_rotator)


    def getEnsembleSize(self):
        """ @rtype: int """
        return self._ensemble_size( )


    def switchFileSystem(self, file_system):
        """
        @type file_system: EnkfFs
        """
        self._switch_fs(file_system, None)


    def isCaseInitialized(self, case):
        return self._is_case_initialized(case, None)

    def isInitialized(self):
        """ @rtype: bool """
        return self._is_initialized(None) # what is the bool_vector mask???


    def getCaseList(self):
        """ @rtype: list[str] """
        caselist = [case for case in self._alloc_caselist()]
예제 #2
0
class ErtTemplates(BaseCClass):
    TYPE_NAME = "ert_templates"
    _alloc = ResPrototype(
        "void* ert_templates_alloc( subst_list, config_content )", bind=False
    )
    _alloc_default = ResPrototype(
        "void* ert_templates_alloc_default( subst_list )", bind=False
    )
    _free = ResPrototype("void ert_templates_free( ert_templates )")
    _alloc_list = ResPrototype("stringlist_ref ert_templates_alloc_list(ert_templates)")
    _get_template = ResPrototype(
        "ert_template_ref ert_templates_get_template(ert_templates, char*)"
    )
    _clear = ResPrototype("void ert_templates_clear(ert_templates)")
    _add_template = ResPrototype(
        "ert_template_ref ert_templates_add_template(ert_templates, char*, char*, char*, char*)"
    )
    _add_template_unbound = ResPrototype(
        "ert_template_ref ert_templates_add_template(ert_templates, char*, char*, char*, char*)",
        bind=False,
    )

    def __init__(self, parent_subst, config_content=None, config_dict=None):
        if not ((config_content is not None) ^ (config_dict is not None)):
            raise ValueError(
                "ErtTemplates must be instantiated with exactly one of config_content or config_dict"
            )

        if config_dict is not None:
            c_ptr = self._alloc_default(parent_subst)
            if c_ptr is None:
                raise ValueError("Failed to construct ErtTemplates instance")
            super(ErtTemplates, self).__init__(c_ptr)
            run_template = config_dict.get(ConfigKeys.RUN_TEMPLATE)
            if isinstance(run_template, list):
                for template_file_name, target_file, arguments in run_template:
                    path = config_dict.get(ConfigKeys.CONFIG_DIRECTORY)
                    if not isinstance(path, str):
                        raise ValueError(
                            "ErtTemplates requires {} to be set".format(
                                ConfigKeys.CONFIG_DIRECTORY
                            )
                        )
                    template_path = os.path.normpath(
                        os.path.join(path, template_file_name)
                    )
                    arguments_string = ", ".join(
                        ["{}={}".format(key, val) for key, val in arguments]
                    )
                    self._add_template(
                        None, template_path, target_file, arguments_string
                    )

        else:
            c_ptr = self._alloc(parent_subst, config_content)
            if c_ptr is None:
                raise ValueError("Failed to construct ErtTemplates instance")
            super(ErtTemplates, self).__init__(c_ptr)

    def getTemplateNames(self):
        """@rtype: StringList"""
        return self._alloc_list().setParent(self)

    def clear(self):
        self._clear()

    def get_template(self, key):
        """@rtype: ErtTemplate"""
        return self._get_template(key).setParent(self)

    def add_template(self, key, template_file, target_file, arg_string):
        """@rtype: ErtTemplate"""
        return self._add_template(
            key, template_file, target_file, arg_string
        ).setParent(self)

    def __eq__(self, other):
        if len(self.getTemplateNames()) != len(other.getTemplateNames()):
            return False
        if not all(
            name in self.getTemplateNames() for name in other.getTemplateNames()
        ):
            return False
        for name in self.getTemplateNames():
            if self.get_template(name) != other.get_template(name):
                return False
        return True

    def __ne__(self, other):
        return not self == other

    def __repr__(self):
        return "ErtTemplates({})".format(
            ", ".join(
                x + "=" + str(self.get_template(x)) for x in self.getTemplateNames()
            )
        )

    def free(self):
        self._free()
예제 #3
0
class EnsemblePlotGenKW(BaseCClass):
    TYPE_NAME = "ensemble_plot_gen_kw"

    _alloc = ResPrototype("void* enkf_plot_gen_kw_alloc(enkf_config_node)",
                          bind=False)
    _size = ResPrototype(
        "int   enkf_plot_gen_kw_get_size(ensemble_plot_gen_kw)")
    _load = ResPrototype(
        "void  enkf_plot_gen_kw_load(ensemble_plot_gen_kw, enkf_fs, bool, int, bool_vector)"
    )
    _get = ResPrototype(
        "ensemble_plot_gen_kw_vector_ref enkf_plot_gen_kw_iget(ensemble_plot_gen_kw, int)"
    )
    _iget_key = ResPrototype(
        "char* enkf_plot_gen_kw_iget_key(ensemble_plot_gen_kw, int)")
    _get_keyword_count = ResPrototype(
        "int   enkf_plot_gen_kw_get_keyword_count(ensemble_plot_gen_kw)")
    _should_use_log_scale = ResPrototype(
        "bool  enkf_plot_gen_kw_should_use_log_scale(ensemble_plot_gen_kw, int)"
    )
    _free = ResPrototype("void  enkf_plot_gen_kw_free(ensemble_plot_gen_kw)")

    def __init__(self, ensemble_config_node, file_system, input_mask=None):
        assert isinstance(ensemble_config_node, EnkfConfigNode)
        assert ensemble_config_node.getImplementationType(
        ) == ErtImplType.GEN_KW

        c_pointer = self._alloc(ensemble_config_node)
        super(EnsemblePlotGenKW, self).__init__(c_pointer)

        self.__load(file_system, input_mask)

    def __load(self, file_system, input_mask=None):
        assert isinstance(file_system, EnkfFs)
        if not input_mask is None:
            assert isinstance(input_mask, BoolVector)

        self._load(file_system, True, 0, input_mask)

    def __len__(self):
        """ @rtype: int """
        return self._size()

    def __getitem__(self, index):
        """ @rtype: EnsemblePlotGenKWVector """
        return self._get(index)

    def __iter__(self):
        cur = 0
        while cur < len(self):
            yield self[cur]
            cur += 1

    def getKeyWordCount(self):
        """ @rtype: int """
        return self._get_keyword_count()

    def getKeyWordForIndex(self, index):
        """ @rtype: str """
        return self._iget_key(index)

    def getIndexForKeyword(self, keyword):
        """ @rtype: int """
        for index in range(self.getKeyWordCount()):
            kw = self.getKeyWordForIndex(index)
            if kw == keyword:
                return index
        return None

    def shouldUseLogScale(self, index):
        """ @rtype: bool """
        return bool(self._should_use_log_scale(index))

    def free(self):
        self._free()

    def __repr__(self):
        return 'EnsemblePlotGenKW(size = %d) %s' % (len(self), self._ad_str())
예제 #4
0
class GenDataConfig(BaseCClass):
    TYPE_NAME = "gen_data_config"

    _alloc = ResPrototype(
        "void* gen_data_config_alloc_GEN_DATA_result( char* , gen_data_file_format_type)",
        bind=False,
    )
    _free = ResPrototype("void  gen_data_config_free( gen_data_config )")
    _get_output_format = ResPrototype(
        "gen_data_file_format_type gen_data_config_get_output_format(gen_data_config)"
    )
    _get_input_format = ResPrototype(
        "gen_data_file_format_type gen_data_config_get_input_format(gen_data_config)"
    )
    _get_template_file = ResPrototype(
        "char* gen_data_config_get_template_file(gen_data_config)")
    _get_template_key = ResPrototype(
        "char* gen_data_config_get_template_key(gen_data_config)")
    _get_initial_size = ResPrototype(
        "int   gen_data_config_get_initial_size(gen_data_config)")
    _has_report_step = ResPrototype(
        "bool  gen_data_config_has_report_step(gen_data_config, int)")
    _get_data_size = ResPrototype(
        "int   gen_data_config_get_data_size__(gen_data_config , int)")
    _get_key = ResPrototype("char* gen_data_config_get_key(gen_data_config)")
    _get_active_mask = ResPrototype(
        "bool_vector_ref gen_data_config_get_active_mask(gen_data_config)")
    _get_num_report_step = ResPrototype(
        "int   gen_data_config_num_report_step(gen_data_config)")
    _iget_report_step = ResPrototype(
        "int   gen_data_config_iget_report_step(gen_data_config, int)")

    def __init__(self, key, input_format=GenDataFileType.ASCII):
        # Can currently only create GEN_DATA instances which should be used
        # as result variables.
        c_pointer = self._alloc(key, input_format)
        super(GenDataConfig, self).__init__(c_pointer)

    def get_template_file(self):
        return self._get_template_file()

    def get_template_key(self):
        return self._get_template_key()

    def getDataSize(self, report_step):
        data_size = self._get_data_size(report_step)
        if data_size < 0:
            raise ValueError(
                "No data has been loaded for %s at report step:%d " %
                (self.getName(), report_step))
        else:
            return data_size

    def getActiveMask(self):
        return self._get_active_mask()

    def getName(self):
        return self.name()

    def name(self):
        return self._get_key()

    def get_initial_size(self):
        return self._get_initial_size()

    def getOutputFormat(self):
        return self._get_output_format()

    def getInputFormat(self):
        return self._get_input_format()

    def free(self):
        self._free()

    def __repr__(self):
        nm = self.name()
        tk = self.get_template_key()
        iz = self.get_initial_size()
        return "GenDataConfig(name = %s, template_key = %s, initial_size = %d) %s" % (
            nm,
            tk,
            iz,
            self._ad_str(),
        )

    def hasReportStep(self, report_step):
        """ @rtype: bool """
        return self._has_report_step(report_step)

    def getNumReportStep(self):
        """ @rtype: int """
        return self._get_num_report_step()

    def getReportStep(self, index):
        """ @rtype: int """
        return self._iget_report_step(index)

    def getReportSteps(self):
        """ @rtype: list of int """
        return [
            self.getReportStep(index)
            for index in range(self.getNumReportStep())
        ]

    def __ne__(self, other):
        return not self == other

    def __eq__(self, other):
        """ @rtype: bool"""
        if self.getName() != other.getName():
            return False

        if self.get_template_key() != other.get_template_key():
            return False

        if self.getInputFormat() != other.getInputFormat():
            return False

        if self.getOutputFormat() != other.getOutputFormat():
            return False

        if self.getReportSteps() != other.getReportSteps():
            return False

        return True
예제 #5
0
class EclConfig(BaseCClass):
    TYPE_NAME = "ecl_config"

    _alloc                  = ResPrototype("void* ecl_config_alloc(config_content)", bind = False)
    _free                   = ResPrototype("void  ecl_config_free( ecl_config )")
    _get_data_file          = ResPrototype("char* ecl_config_get_data_file(ecl_config)")
    _set_data_file          = ResPrototype("void  ecl_config_set_data_file(ecl_config , char*)")
    _validate_data_file     = ResPrototype("ui_return_obj ecl_config_validate_data_file(ecl_config , char*)")
    _get_gridfile           = ResPrototype("char* ecl_config_get_gridfile(ecl_config)")
    _set_gridfile           = ResPrototype("void  ecl_config_set_grid(ecl_config, char*)")
    _validate_gridfile      = ResPrototype("ui_return_obj ecl_config_validate_grid(ecl_config, char*)")
    _get_grid               = ResPrototype("ecl_grid_ref ecl_config_get_grid(ecl_config)")
    _get_schedule_file      = ResPrototype("char* ecl_config_get_schedule_file(ecl_config)")
    _set_schedule_file      = ResPrototype("void  ecl_config_set_schedule_file(ecl_config, char*, char*)")
    _validate_schedule_file = ResPrototype("ui_return_obj ecl_config_validate_schedule_file(ecl_config, char*)")
    _get_sched_file         = ResPrototype("sched_file_ref ecl_config_get_sched_file(ecl_config)")
    _get_init_section       = ResPrototype("char* ecl_config_get_init_section(ecl_config)")
    _set_init_section       = ResPrototype("void  ecl_config_set_init_section(ecl_config, char*)")
    _validate_init_section  = ResPrototype("ui_return_obj ecl_config_validate_init_section(ecl_config, char*)")
    _get_refcase_name       = ResPrototype("char* ecl_config_get_refcase_name(ecl_config)")
    _get_refcase            = ResPrototype("ecl_sum_ref ecl_config_get_refcase(ecl_config)")
    _load_refcase           = ResPrototype("void  ecl_config_load_refcase(ecl_config, char*)")
    _validate_refcase       = ResPrototype("ui_return_obj ecl_config_validate_refcase(ecl_config, char*)")
    _has_refcase            = ResPrototype("bool  ecl_config_has_refcase(ecl_config)")
    _get_depth_unit         = ResPrototype("char* ecl_config_get_depth_unit(ecl_config)")
    _get_pressure_unit      = ResPrototype("char* ecl_config_get_pressure_unit(ecl_config)")
    _get_start_date         = ResPrototype("time_t ecl_config_get_start_date(ecl_config)")
    _active                 = ResPrototype("bool ecl_config_active(ecl_config)")

    def __init__(self):
        c_ptr = self._alloc(None)
        if c_ptr:
            super(EclConfig, self).__init__(c_ptr)
        else:
            raise RuntimeError('Internal error: Failed constructing EclConfig!')

    def free(self):
        self._free()

    def getDataFile(self):
        return self._get_data_file()

    def setDataFile(self , datafile):
        self._set_data_file( datafile)

    def validateDataFile( self , datafile ):
        """ @rtype: UIReturn """
        return self._validate_data_file(  datafile )

    #-----------------------------------------------------------------

    def get_gridfile(self):
        """ @rtype: str """
        return self._get_gridfile()

    def set_gridfile(self, gridfile):
        self._set_gridfile(gridfile)

    def validateGridFile(self , gridfile):
        return self._validate_gridfile(gridfile)

    def getGrid(self):
        return self._get_grid()

    #-----------------------------------------------------------------

    def getScheduleFile(self):
        return self._get_schedule_file()

    def setScheduleFile(self, schedule_file, target_file = None):
        self._set_schedule_file(schedule_file, target_file)

    def validateScheduleFile(self , schedule_file):
        return self._validate_schedule_file(schedule_file)

    def get_sched_file(self):
        return self._get_sched_file()

    #-----------------------------------------------------------------

    def getInitSection(self):
        return self._get_init_section()

    def setInitSection(self, init_section):
        self._set_init_section(init_section)

    def validateInitSection(self, init_section):
        return self._validate_init_section(init_section)

    #-----------------------------------------------------------------

    def getRefcaseName(self):
        return self._get_refcase_name()

    def loadRefcase(self, refcase):
        self._load_refcase(refcase)

    def getRefcase(self):
        """ @rtype: EclSum """
        refcase = self._get_refcase()
        if not refcase is None:
            refcase.setParent(self)

        return refcase


    def validateRefcase(self, refcase):
        return self._validate_refcase(refcase)

    def hasRefcase(self):
        """ @rtype: bool """
        return self._has_refcase()

    #-----------------------------------------------------------------

    def getDepthUnit(self):
        return self._get_depth_unit()

    def getPressureUnit(self):
        return self._get_pressure_unit()

    #-----------------------------------------------------------------

    def getStartDate(self):
        return self._get_start_date()


    def active(self):
        """
        Has ECLIPSE been configured?"
        """
        return self._active( )
예제 #6
0
class LocalConfig(BaseCClass):
    """The LocalConfig class is created as a reference to an existing underlying C
    structure by the method EnkFMain.local_config(). When the pointer to the C
    local_config_type object has been properly wrapped we 'decorate' the Python
    object with references to the ensemble_config , observations and grid.

    This implies that the Python object LocalConfig is richer than the
    underlying C object local_config_type; the extra attributes are only used
    for validation.

    """

    TYPE_NAME = "local_config"

    _free = ResPrototype("void   local_config_free(local_config)")
    _clear = ResPrototype("void   local_config_clear(local_config)")
    _create_ministep = ResPrototype(
        "void   local_config_alloc_ministep(local_config, char*, analysis_module)"
    )
    _attach_ministep = ResPrototype(
        "void   local_updatestep_add_ministep(local_updatestep, local_ministep)",
        bind=False,
    )
    _create_obsdata = ResPrototype(
        "void   local_config_alloc_obsdata(local_config, char*)")
    _create_dataset = ResPrototype(
        "void   local_config_alloc_dataset(local_config, char*)")
    _has_obsdata = ResPrototype(
        "bool   local_config_has_obsdata(local_config, char*)")
    _has_dataset = ResPrototype(
        "bool   local_config_has_dataset(local_config, char*)")

    _get_updatestep = ResPrototype(
        "local_updatestep_ref local_config_get_updatestep(local_config)")
    _get_ministep = ResPrototype(
        "local_ministep_ref   local_config_get_ministep(local_config, char*)")
    _get_obsdata = ResPrototype(
        "local_obsdata_ref    local_config_get_obsdata(local_config, char*)")
    _copy_obsdata = ResPrototype(
        "local_obsdata_ref    local_config_alloc_obsdata_copy(local_config, char*, char*)"
    )
    _get_dataset = ResPrototype(
        "local_dataset_ref    local_config_get_dataset(local_config, char*)")
    _copy_dataset = ResPrototype(
        "local_dataset_ref    local_config_alloc_dataset_copy(local_config, char*, char*)"
    )
    _smry_fprintf = ResPrototype(
        "void local_config_summary_fprintf(local_config, char*)")

    def __init__(self):
        raise NotImplementedError("Class can not be instantiated directly!")

    def initAttributes(self, ensemble_config, obs, grid):
        self.ensemble_config = ensemble_config
        self.obs = obs
        self.grid = grid

    def __getObservations(self):
        return self.obs

    def __getEnsembleConfig(self):
        return self.ensemble_config

    def getGrid(self):
        # The grid can be None
        return self.grid

    def free(self):
        self._free()

    def clear(self):
        self._clear()

    def createMinistep(self, mini_step_key, analysis_module=None):
        """ @rtype: Ministep """
        assert isinstance(mini_step_key, str)
        if analysis_module:
            assert isinstance(analysis_module, AnalysisModule)
        self._create_ministep(mini_step_key, analysis_module)
        return self.getMinistep(mini_step_key)

    def createObsdata(self, obsdata_key):
        """ @rtype: Obsdata """
        assert isinstance(obsdata_key, str)
        if self._has_obsdata(obsdata_key):
            raise ValueError("Tried to add existing observation key:%s " %
                             obsdata_key)

        self._create_obsdata(obsdata_key)
        obsdata = self.getObsdata(obsdata_key)
        obsdata.initObservations(self.__getObservations())
        return obsdata

    def copyObsdata(self, src_key, target_key):
        """ @rtype: Obsdata """
        assert isinstance(src_key, str)
        assert isinstance(target_key, str)
        obsdata = self._copy_obsdata(src_key, target_key)
        obsdata.initObservations(self.__getObservations())
        return obsdata

    def createDataset(self, dataset_key):
        """ @rtype: Dataset """
        assert isinstance(dataset_key, str)
        if self._has_dataset(dataset_key):
            raise ValueError("Tried to add existing data key:%s " %
                             dataset_key)

        self._create_dataset(dataset_key)
        data = self.getDataset(dataset_key)
        data.initEnsembleConfig(self.__getEnsembleConfig())
        return data

    def copyDataset(self, src_key, target_key):
        """ @rtype: Dataset """
        assert isinstance(src_key, str)
        assert isinstance(target_key, str)
        data = self._copy_dataset(src_key, target_key)
        data.initEnsembleConfig(self.__getEnsembleConfig())
        return data

    def getUpdatestep(self):
        """ @rtype: UpdateStep """
        return self._get_updatestep()

    def getMinistep(self, mini_step_key):
        """ @rtype: Ministep """
        assert isinstance(mini_step_key, str)
        return self._get_ministep(mini_step_key)

    def getObsdata(self, obsdata_key):
        """ @rtype: Obsdata """
        assert isinstance(obsdata_key, str)
        return self._get_obsdata(obsdata_key)

    def getDataset(self, dataset_key):
        """ @rtype: Dataset """
        assert isinstance(dataset_key, str)
        return self._get_dataset(dataset_key)

    def attachMinistep(self, update_step, mini_step):
        assert isinstance(mini_step, LocalMinistep)
        assert isinstance(update_step, LocalUpdateStep)
        self._attach_ministep(update_step, mini_step)

    def writeSummaryFile(self, filename):
        """
        Writes a summary of the local config object
        The summary contains the Obsset with their respective
        number of observations and the Datasets with the number of active indices
        """
        assert isinstance(filename, str)
        self._smry_fprintf(filename)

    def __repr__(self):
        return self._create_repr()
예제 #7
0
class SiteConfig(BaseCClass):
    TYPE_NAME = "site_config"
    _alloc = ResPrototype("void* site_config_alloc(config_content)", bind=False)
    _alloc_full = ResPrototype(
        "void* site_config_alloc_full(ext_joblist, env_varlist, int)", bind=False
    )
    _alloc_load_user_config = ResPrototype(
        "void* site_config_alloc_load_user_config(char*)", bind=False
    )
    _free = ResPrototype("void site_config_free( site_config )")
    _get_installed_jobs = ResPrototype(
        "ext_joblist_ref site_config_get_installed_jobs(site_config)"
    )
    _get_license_root_path = ResPrototype(
        "char* site_config_get_license_root_path(site_config)"
    )
    _set_license_root_path = ResPrototype(
        "void site_config_set_license_root_path(site_config, char*)"
    )
    _get_location = ResPrototype("char* site_config_get_location()", bind=False)
    _get_config_file = ResPrototype("char* site_config_get_config_file(site_config)")
    _get_umask = ResPrototype("int site_config_get_umask(site_config)")

    def __init__(self, user_config_file=None, config_content=None, config_dict=None):

        configs = sum(
            [
                1
                for x in [user_config_file, config_content, config_dict]
                if x is not None
            ]
        )

        if configs > 1:
            raise ValueError(
                "Attempting to construct SiteConfig with multiple config objects"
            )

        if configs == 0:
            raise ValueError(
                "Attempting to construct SiteConfig with no config objects"
            )

        c_ptr = None
        if user_config_file is not None:
            if not os.path.isfile(user_config_file):
                raise IOError('No such configuration file "%s".' % user_config_file)
            c_ptr = self._alloc_load_user_config(user_config_file)

        elif config_content is not None:
            c_ptr = self._alloc(config_content)

        elif config_dict is not None:
            __license_root_path = None
            if ConfigKeys.LICENSE_PATH in config_dict:
                license_root_path = config_dict.get(ConfigKeys.LICENSE_PATH)
                license_root_path_site = os.path.realpath(license_root_path)
                __license_root_path = os.path.join(
                    license_root_path_site, os.getenv("USER"), str(os.getpid())
                )

            # Create joblist
            ext_job_list = ExtJoblist()
            for job in config_dict.get(ConfigKeys.INSTALL_JOB, []):
                if not os.path.isfile(job[ConfigKeys.PATH]):
                    print(
                        "WARNING: Unable to locate job file {}".format(
                            job[ConfigKeys.PATH]
                        )
                    )
                    continue
                try:
                    new_job = ExtJob(
                        config_file=job[ConfigKeys.PATH],
                        private=False,
                        name=job[ConfigKeys.NAME],
                        license_root_path=__license_root_path,
                    )
                    new_job.convertToCReference(None)
                    ext_job_list.add_job(job[ConfigKeys.NAME], new_job)
                except (ValueError, OSError):
                    print(
                        "WARNING: Unable to create job from {}".format(
                            job[ConfigKeys.PATH]
                        )
                    )

            for job_path in config_dict.get(ConfigKeys.INSTALL_JOB_DIRECTORY, []):
                if not os.path.isdir(job_path):
                    print("WARNING: Unable to locate job directory {}".format(job_path))
                    continue
                files = os.listdir(job_path)
                for file_name in files:
                    full_path = os.path.join(job_path, file_name)
                    if os.path.isfile(full_path):
                        try:
                            new_job = ExtJob(
                                config_file=full_path,
                                private=False,
                                license_root_path=__license_root_path,
                            )
                            new_job.convertToCReference(None)
                            ext_job_list.add_job(new_job.name(), new_job)
                        except (ValueError, OSError):
                            print(
                                "WARNING: Unable to create job from {}".format(
                                    full_path
                                )
                            )

            ext_job_list.convertToCReference(None)

            # Create varlist)
            env_var_list = EnvironmentVarlist()
            for (var, value) in config_dict.get(ConfigKeys.SETENV, []):
                env_var_list[var] = value

            env_var_list.convertToCReference(None)
            umask = config_dict.get(ConfigKeys.UMASK)

            c_ptr = self._alloc_full(ext_job_list, env_var_list, umask)

        if c_ptr is None:
            raise ValueError("Failed to construct SiteConfig instance.")

        super().__init__(c_ptr)

    def __repr__(self):
        return "Site Config {}".format(SiteConfig.getLocation())

    @property
    def config_file(self):
        return self._get_config_file()

    def get_installed_jobs(self):
        """@rtype: ExtJoblist"""
        return self._get_installed_jobs().setParent(self)

    def get_license_root_path(self):
        """@rtype: str"""
        return self._get_license_root_path()

    def set_license_root_pathmax_submit(self, path):
        self._set_license_root_path(path)

    @classmethod
    def getLocation(cls):
        """@rtype: str"""
        return cls._get_location()

    def free(self):
        self._free()

    @property
    def umask(self):
        return self._get_umask()

    def __eq__(self, other):
        if self.umask != other.umask:
            return False

        self_job_list = self.get_installed_jobs()
        other_job_list = other.get_installed_jobs()

        if set(other_job_list.getAvailableJobNames()) != set(
            self_job_list.getAvailableJobNames()
        ):
            return False

        if len(other_job_list.getAvailableJobNames()) != len(
            self_job_list.getAvailableJobNames()
        ):
            return False

        for job_name in other_job_list.getAvailableJobNames():

            if (
                other_job_list[job_name].get_config_file()
                != self_job_list[job_name].get_config_file()
            ):
                return False

            if (
                other_job_list[job_name].get_stderr_file()
                != self_job_list[job_name].get_stderr_file()
            ):
                return False

            if (
                other_job_list[job_name].get_stdout_file()
                != self_job_list[job_name].get_stdout_file()
            ):
                return False
        return True
예제 #8
0
class LocalObsdata(BaseCClass):
    TYPE_NAME = "local_obsdata"

    _alloc = ResPrototype("void* local_obsdata_alloc(char*)", bind=False)
    _free = ResPrototype("void  local_obsdata_free(local_obsdata)")
    _size = ResPrototype("int   local_obsdata_get_size(local_obsdata)")
    _has_node = ResPrototype(
        "bool  local_obsdata_has_node(local_obsdata, char*)")
    _add_node = ResPrototype(
        "bool  local_obsdata_add_node(local_obsdata, local_obsdata_node)")
    _del_node = ResPrototype(
        "void  local_obsdata_del_node(local_obsdata, char*)")
    _clear = ResPrototype("void  local_dataset_clear(local_obsdata)")
    _name = ResPrototype("char* local_obsdata_get_name(local_obsdata)")
    _iget_node = ResPrototype(
        "local_obsdata_node_ref local_obsdata_iget(local_obsdata, int)")
    _get_node = ResPrototype(
        "local_obsdata_node_ref local_obsdata_get(local_obsdata, char*)")
    _copy_active_list = ResPrototype(
        "active_list_ref local_obsdata_get_copy_node_active_list(local_obsdata, char*)"
    )
    _active_list = ResPrototype(
        "active_list_ref local_obsdata_get_node_active_list(local_obsdata, char*)"
    )

    def __init__(self, name, obs=None):
        # The obs instance should be a EnkFObs instance; some circular dependency problems
        # by importing it right away. It is not really optional, but it is made optional
        # here to be able to give a decent error message for old call sites which did not
        # supply the obs argument.
        if obs is None:
            msg = """

The LocalObsdata constructor has recently changed, as a second
argument you should pass the EnkFObs instance with all the
observations. You can typically get this instance from the ert main
object as:

    obs = ert.getObservations()
    local_obs = LocalObsData("YOUR-KEY" , obs)

"""
            raise Exception(msg)

        assert isinstance(name, str)

        c_ptr = self._alloc(name)
        if c_ptr:
            super(LocalObsdata, self).__init__(c_ptr)
            self.initObservations(obs)
        else:
            raise ValueError(
                'Unable to construct LocalObsdata with name "%s" from given obs.'
                % name)

    def initObservations(self, obs):
        self.obs = obs

    def __len__(self):
        """@rtype: int"""
        return self._size()

    def __getitem__(self, key):
        """@rtype: LocalObsdataNode"""
        if isinstance(key, int):
            if key < 0:
                key += len(self)
            if 0 <= key < len(self):
                node_ = self._iget_node(key)
                node_.setParent(self)
                return node_
            else:
                raise IndexError("Invalid index, valid range is [0, %d)" %
                                 len(self))
        else:
            if key in self:
                node_ = self._get_node(key)
                node_.setParent(self)
                return node_
            else:
                raise KeyError('Unknown key "%s".' % key)

    def __iter__(self):
        cur = 0
        while cur < len(self):
            yield self[cur]
            cur += 1

    def __contains__(self, item):
        """@rtype: bool"""
        if isinstance(item, str):
            return self._has_node(item)
        elif isinstance(item, LocalObsdataNode):
            return self._has_node(item.getKey())

        return False

    def __delitem__(self, key):
        assert isinstance(key, str)
        if key in self:
            self._del_node(key)
        else:
            raise KeyError('Unknown key "%s".' % key)

    def addNode(self, key, add_all_timesteps=True):
        """@rtype: LocalObsdataNode"""
        assert isinstance(key, str)
        if key in self.obs:
            node = LocalObsdataNode(key, add_all_timesteps)
            if node not in self:
                node.convertToCReference(self)
                self._add_node(node)
                return node
            else:
                raise KeyError("Tried to add existing observation key:%s " %
                               key)
        else:
            raise KeyError(
                "The observation node: %s is not recognized observation key" %
                key)

    def addNodeAndRange(self, key, step_1, step_2):
        """@rtype: LocalObsdataNode"""
        """ The time range will be removed in the future... """
        assert isinstance(key, str)
        assert isinstance(step_1, int)
        assert isinstance(step_2, int)
        node = self.addNode(key)
        node.addRange(step_1, step_2)
        return node

    def clear(self):
        self._clear()

    def addObsVector(self, obs_vector):
        self.addNode(obs_vector.getObservationKey())

    def name(self):
        return self._name()

    def getName(self):
        """@rtype: str"""
        return self.name()

    def getActiveList(self, key):
        """@rtype: ActiveList"""
        if key in self:
            return self._active_list(key)
        else:
            raise KeyError('Local key "%s" not recognized.' % key)

    def copy_active_list(self, key):
        """@rtype: ActiveList"""
        if key in self:
            return self._copy_active_list(key)
        else:
            raise KeyError('Local key "%s" not recognized.' % key)

    def free(self):
        self._free()

    def __repr__(self):
        return "LocalObsdata(len = %d, name = %s) at 0x%x" % (
            len(self),
            self.name(),
            self._address(),
        )
예제 #9
0
class Workflow(BaseCClass):
    TYPE_NAME = "workflow"
    _alloc = ResPrototype("void* workflow_alloc(char*, workflow_joblist)", bind=False)
    _free = ResPrototype("void     workflow_free(workflow)")
    _count = ResPrototype("int      workflow_size(workflow)")
    _iget_job = ResPrototype("workflow_job_ref workflow_iget_job(workflow, int)")
    _iget_args = ResPrototype("stringlist_ref   workflow_iget_arguments(workflow, int)")

    _try_compile = ResPrototype("bool workflow_try_compile(workflow, subst_list)")
    _get_last_error = ResPrototype("config_error_ref workflow_get_last_error(workflow)")
    _get_src_file = ResPrototype("char* worflow_get_src_file(workflow)")

    def __init__(self, src_file, job_list):
        """
        @type src_file: str
        @type job_list: WorkflowJoblist
        """
        c_ptr = self._alloc(src_file, job_list)
        super(Workflow, self).__init__(c_ptr)

        self.__running = False
        self.__cancelled = False
        self.__current_job = None
        self.__status = {}

    def __len__(self):
        return self._count()

    def __getitem__(self, index):
        """
        @type index: int
        @rtype: tuple of (WorkflowJob, arguments)
        """
        job = self._iget_job(index)
        args = self._iget_args(index)
        return job, args

    def __iter__(self):
        for index in range(len(self)):
            yield self[index]

    @property
    def src_file(self):
        return self._get_src_file()

    @staticmethod
    def _log_workflow_job_usage(job_name):
        payload = {
            "subsystem": "ert_workflow",
            "ert_job": job_name,
            "cwd": os.getcwd(),
        }
        log_message(payload)

    def run(self, ert, verbose=False, context=None):
        """
        @type ert: res.enkf.enkf_main.EnKFMain
        @type verbose: bool
        @type context: SubstitutionList
        @rtype: bool
        """
        # Reset status
        self.__status = {}
        self.__running = True
        success = self._try_compile(context)
        if not success:
            msg = (
                "** Warning: The workflow file {} is not valid - "
                "make sure the workflow jobs are defined accordingly\n"
            )
            sys.stderr.write(msg.format(self.src_file))

            self.__running = False
            return False

        for job, args in self:
            self.__current_job = job
            if not self.__cancelled:
                self._log_workflow_job_usage(job.name())

                return_value = job.run(ert, args, verbose)
                self.__status[job.name()] = {
                    "stdout": job.stdoutdata(),
                    "stderr": job.stderrdata(),
                    "completed": not job.hasFailed(),
                    "return": return_value,
                }

        self.__current_job = None
        self.__running = False
        return success

    def free(self):
        self._free()

    def isRunning(self):
        return self.__running

    def cancel(self):
        if self.__current_job is not None:
            self.__current_job.cancel()

        self.__cancelled = True

    def isCancelled(self):
        return self.__cancelled

    def wait(self):
        while self.isRunning():
            time.sleep(1)

    def getLastError(self):
        """ @rtype: ConfigError """
        return self._get_last_error()

    def getJobsReport(self):
        """ @rtype: {dict} """
        return self.__status

    @classmethod
    def createCReference(cls, c_pointer, parent=None):
        workflow = super(Workflow, cls).createCReference(c_pointer, parent)
        workflow.__running = False
        workflow.__cancelled = False
        workflow.__current_job = None
        return workflow

    def __ne__(self, other):
        return not (self == other)

    def __eq__(self, other):
        return os.path.realpath(self.src_file) == os.path.realpath(other.src_file)
예제 #10
0
class EnkfConfigNode(BaseCClass):
    TYPE_NAME = "enkf_config_node"

    _alloc = ResPrototype(
        "enkf_config_node_obj enkf_config_node_alloc(enkf_var_type_enum, ert_impl_type_enum, bool, char*, char* , char*, char*, void*)",
        bind=False,
    )
    _alloc_gen_data_everest = ResPrototype(
        "enkf_config_node_obj enkf_config_node_alloc_GEN_DATA_everest(char*, char* , int_vector)",
        bind=False,
    )
    _alloc_summary_node = ResPrototype(
        "enkf_config_node_obj enkf_config_node_alloc_summary(char*, load_fail_type)",
        bind=False,
    )
    _alloc_field_node = ResPrototype(
        "enkf_config_node_obj enkf_config_node_alloc_field(char*, ecl_grid, void*, bool)",
        bind=False,
    )
    _get_ref = ResPrototype("void* enkf_config_node_get_ref(enkf_config_node)"
                            )  # todo: fix return type
    _get_impl_type = ResPrototype(
        "ert_impl_type_enum enkf_config_node_get_impl_type(enkf_config_node)")
    _get_enkf_outfile = ResPrototype(
        "char* enkf_config_node_get_enkf_outfile(enkf_config_node)")
    _get_min_std_file = ResPrototype(
        "char* enkf_config_node_get_min_std_file(enkf_config_node)")
    _get_enkf_infile = ResPrototype(
        "char* enkf_config_node_get_enkf_infile(enkf_config_node)")
    _get_init_file = ResPrototype(
        "char* enkf_config_node_get_FIELD_fill_file(enkf_config_node, path_fmt)"
    )
    _get_init_file_fmt = ResPrototype(
        "char* enkf_config_node_get_init_file_fmt(enkf_config_node)")
    _get_var_type = ResPrototype(
        "enkf_var_type_enum enkf_config_node_get_var_type(enkf_config_node)"
    )  # todo: fix return type as enum
    _get_key = ResPrototype("char* enkf_config_node_get_key(enkf_config_node)")
    _get_obs_keys = ResPrototype(
        "stringlist_ref enkf_config_node_get_obs_keys(enkf_config_node)")
    _free = ResPrototype("void enkf_config_node_free(enkf_config_node)")
    _use_forward_init = ResPrototype(
        "bool enkf_config_node_use_forward_init(enkf_config_node)")

    # ensemble config aux
    _alloc_gen_param_full = ResPrototype(
        "enkf_config_node_obj enkf_config_node_alloc_GEN_PARAM_full( char*,\
                                                                                                      bool, \
                                                                                                      gen_data_file_format_type, \
                                                                                                      gen_data_file_format_type, \
                                                                                                      char*, \
                                                                                                      char*, \
                                                                                                      char*, \
                                                                                                      char*, \
                                                                                                      char*)",
        bind=False,
    )

    _alloc_gen_data_full = ResPrototype(
        "enkf_config_node_obj enkf_config_node_alloc_GEN_DATA_full( char*,\
                                                                                                    char*, \
                                                                                                    gen_data_file_format_type, \
                                                                                                    int_vector, \
                                                                                                    char*, \
                                                                                                    char*, \
                                                                                                    char*, \
                                                                                                    char*)",
        bind=False,
    )

    _alloc_gen_kw_full = ResPrototype(
        "enkf_config_node_obj enkf_config_node_alloc_GEN_KW_full( char*,\
                                                                                                bool, \
                                                                                                char*, \
                                                                                                char*, \
                                                                                                char*, \
                                                                                                char*, \
                                                                                                char*, \
                                                                                                char*)",
        bind=False,
    )

    _alloc_surface_full = ResPrototype(
        "enkf_config_node_obj enkf_config_node_alloc_SURFACE_full( char*,\
                                                                                                  bool, \
                                                                                                  char*, \
                                                                                                  char*, \
                                                                                                  char*, \
                                                                                                  char*)",
        bind=False,
    )

    _alloc_container = ResPrototype(
        "enkf_config_node_obj enkf_config_node_new_container(char*)",
        bind=False)
    _update_container = ResPrototype(
        "void enkf_config_node_update_container(enkf_config_node, enkf_config_node)"
    )
    _get_container_size = ResPrototype(
        "int enkf_config_node_container_size(enkf_config_node)")
    _iget_container_key = ResPrototype(
        "char* enkf_config_node_iget_container_key(enkf_config_node, int)")
    _update_parameter_field = ResPrototype(
        "void enkf_config_node_update_parameter_field(enkf_config_node, \
                                                                                         char*, \
                                                                                         char*, \
                                                                                         char*, \
                                                                                         enkf_truncation_type_enum, \
                                                                                         double, \
                                                                                         double, \
                                                                                         char*, \
                                                                                         char*)",
        bind=True,
    )
    _update_general_field = ResPrototype(
        "void enkf_config_node_update_general_field(enkf_config_node, \
                                                                                     char*, \
                                                                                     char*, \
                                                                                     char*, \
                                                                                     char*, \
                                                                                     enkf_truncation_type_enum, \
                                                                                     double, \
                                                                                     double, \
                                                                                     char*, \
                                                                                     char*, \
                                                                                     char*)",
        bind=True,
    )

    def __init__(self):
        raise NotImplementedError("Class can not be instantiated directly!")

    def get_container_size(self):
        return self._get_container_size()

    def get_container_key(self, index):
        return self._iget_container_key(index)

    def getImplementationType(self):
        """@rtype: ErtImplType"""
        return self._get_impl_type()

    def getVariableType(self):
        return self._get_var_type()

    def getPointerReference(self):
        return self._get_ref()

    def getUseForwardInit(self):
        return self._use_forward_init()

    def getInitFile(self, model_config):
        return self._enkf_config_node_get_init_file(
            model_config.getRunpathFormat())

    def get_min_std_file(self):
        return self._get_min_std_file()

    def get_enkf_outfile(self):
        return self._get_enkf_outfile()

    def getFieldModelConfig(self):
        """@rtype: FieldConfig"""
        return FieldConfig.createCReference(self._get_ref(), parent=self)

    def getDataModelConfig(self):
        """@rtype: GenDataConfig"""
        return GenDataConfig.createCReference(self._get_ref(), parent=self)

    def getKeywordModelConfig(self):
        """@rtype: GenKWConfig"""
        return GenKwConfig.createCReference(self._get_ref(), parent=self)

    def getSummaryModelConfig(self):
        """@rtype: SummaryConfig"""
        return SummaryConfig.createCReference(self._get_ref(), parent=self)

    def get_enkf_infile(self):
        return self._get_enkf_infile()

    def get_init_file_fmt(self):
        return self._get_init_file_fmt()

    def getObservationKeys(self):
        """@rtype:  StringList"""
        return self._get_obs_keys().setParent(self)

    @classmethod
    def createSummaryConfigNode(cls, key, load_fail_type):
        """
         @type key: str
         @type load_fail_type: LoadFailTypeEnum
        @rtype: EnkfConfigNode
        """

        assert isinstance(load_fail_type, LoadFailTypeEnum)
        return cls._alloc_summary_node(key, load_fail_type)

    @classmethod
    def createFieldConfigNode(cls,
                              key,
                              grid,
                              trans_table=None,
                              forward_init=False):
        """
        @type grid: EclGrid
        @rtype: EnkfConfigNode
        """
        return cls._alloc_field_node(key, grid, trans_table, forward_init)

    @classmethod
    def create_ext_param(cls, key, input_keys, output_file=None):
        config = ExtParamConfig(key, input_keys)
        output_file = output_file or key + ".json"
        node = cls._alloc(
            EnkfVarType.EXT_PARAMETER,
            ErtImplType.EXT_PARAM,
            False,
            key,
            None,
            output_file,
            None,
            ExtParamConfig.from_param(config),
        )
        config.convertToCReference(node)  # config gets freed when node dies
        return node

    # This method only exposes the details relevant for Everest usage.
    @classmethod
    def create_gen_data(cls, key, file_fmt, report_steps=(0, )):
        active_steps = IntVector()
        for step in report_steps:
            active_steps.append(step)

        config_node = cls._alloc_gen_data_everest(key, file_fmt, active_steps)
        if config_node is None:
            raise ValueError("Failed to create GEN_DATA node for:%s" % key)

        return config_node

    # GEN DATA FULL creation
    @classmethod
    def create_gen_data_full(
        cls,
        key,
        result_file,
        input_format,
        report_steps,
        ecl_file,
        init_file_fmt,
        template_file,
        data_key,
    ):
        active_steps = IntVector()
        for step in report_steps:
            active_steps.append(step)

        config_node = cls._alloc_gen_data_full(
            key,
            result_file,
            input_format,
            active_steps,
            ecl_file,
            init_file_fmt,
            template_file,
            data_key,
        )
        if config_node is None:
            raise ValueError(
                "Failed to create GEN_DATA with FULL specs node for:%s" % key)

        return config_node

    # GEN PARAM FULL creation
    @classmethod
    def create_gen_param(
        cls,
        key,
        forward_init,
        input_format,
        output_format,
        init_file_fmt,
        ecl_file,
        min_std_file,
        template_file,
        data_key,
    ):

        config_node = cls._alloc_gen_param_full(
            key,
            forward_init,
            input_format,
            output_format,
            init_file_fmt,
            ecl_file,
            min_std_file,
            template_file,
            data_key,
        )
        if config_node is None:
            raise ValueError("Failed to create GEN_PARAM node for:%s" % key)

        return config_node

    # GEN KW FULL creation
    @classmethod
    def create_gen_kw(
        cls,
        key,
        template_file,
        enkf_outfile,
        parameter_file,
        forward_init,
        mid_std_file,
        init_file_fmt,
        gen_kw_format,
    ):

        config_node = cls._alloc_gen_kw_full(
            key,
            forward_init,
            gen_kw_format,
            template_file,
            enkf_outfile,
            parameter_file,
            mid_std_file,
            init_file_fmt,
        )
        if config_node is None:
            raise ValueError("Failed to create GEN KW node for:%s" % key)

        return config_node

    # SURFACE FULL creation
    @classmethod
    def create_surface(
        cls,
        key,
        init_file_fmt,
        output_file,
        base_surface_file,
        min_std_file,
        forward_init,
    ):

        if base_surface_file is not None:
            base_surface_file = os.path.realpath(base_surface_file)
        config_node = cls._alloc_surface_full(
            key,
            forward_init,
            output_file,
            base_surface_file,
            min_std_file,
            init_file_fmt,
        )
        if config_node is None:
            raise ValueError("Failed to create SURFACE node for:%s" % key)

        return config_node

    # FIELD FULL creation
    @classmethod
    def create_field(
        cls,
        key,
        var_type_string,
        grid,
        field_trans_table,
        ecl_file,
        enkf_infile,
        forward_init,
        init_transform,
        output_transform,
        input_transform,
        min_std_file,
        min_key,
        max_key,
        init_file_fmt,
    ):

        truncation = EnkfTruncationType.TRUNCATE_NONE
        value_min = -1
        value_max = -1
        if min_key is not None:
            value_min = min_key
            truncation = truncation | EnkfTruncationType.TRUNCATE_MIN
        if max_key is not None:
            value_max = max_key
            truncation = truncation | EnkfTruncationType.TRUNCATE_MAX

        config_node = cls._alloc_field_node(key, grid, field_trans_table,
                                            forward_init)
        if config_node is None:
            raise ValueError("Failed to create FIELD node for:%s" % key)

        if var_type_string == ConfigKeys.PARAMETER_KEY:
            config_node._update_parameter_field(
                ecl_file,
                init_file_fmt,
                min_std_file,
                truncation,
                value_min,
                value_max,
                init_transform,
                output_transform,
            )

        elif var_type_string == ConfigKeys.GENERAL_KEY:
            config_node._update_general_field(
                ecl_file,
                enkf_infile,
                init_file_fmt,
                min_std_file,
                truncation,
                value_min,
                value_max,
                init_transform,
                input_transform,
                output_transform,
            )

        return config_node

    # CONTAINER creation
    @classmethod
    def create_container(cls, key):
        config_node = cls._alloc_container(key)

        if config_node is None:
            raise ValueError("Failed to create CONTAINER node for:%s" % key)

        return config_node

    def free(self):
        self._free()

    def __repr__(self):
        key = self.getKey()
        vt = self.getVariableType()
        imp = self.getImplementationType()
        content = "key = %s, var_type = %s, implementation = %s" % (key, vt,
                                                                    imp)
        return self._create_repr(content)

    def getModelConfig(self):
        implementation_type = self.getImplementationType()

        if implementation_type == ErtImplType.FIELD:
            return self.getFieldModelConfig()
        elif implementation_type == ErtImplType.GEN_DATA:
            return self.getDataModelConfig()
        elif implementation_type == ErtImplType.GEN_KW:
            return self.getKeywordModelConfig()
        elif implementation_type == ErtImplType.SUMMARY:
            return SummaryConfig.createCReference(self.getPointerReference(),
                                                  parent=self)
        elif implementation_type == ErtImplType.EXT_PARAM:
            return ExtParamConfig.createCReference(self.getPointerReference(),
                                                   parent=self)
        else:
            print(
                "[EnkfConfigNode::getModelConfig()] Unhandled implementation model type: %i"
                % implementation_type)
            # raise NotImplementedError("Unknown model type: %i" % type)

    def getKey(self):
        return self._get_key()

    def __ne__(self, other):
        return not self == other

    def __eq__(self, other):
        """@rtype: bool"""
        if self.getImplementationType() != other.getImplementationType():
            return False

        if self.getKey() != other.getKey():
            return False

        if self.getImplementationType() == ErtImplType.EXT_PARAM:
            if self.get_init_file_fmt() != other.get_init_file_fmt():
                return False
            if self.get_min_std_file() != other.get_min_std_file():
                return False
            if self.get_enkf_outfile() != other.get_enkf_outfile():
                return False
            if self.getUseForwardInit() != other.getUseForwardInit():
                return False
        elif self.getImplementationType() == ErtImplType.GEN_DATA:
            if self.getDataModelConfig() != other.getDataModelConfig():
                return False
            if self.get_init_file_fmt() != other.get_init_file_fmt():
                return False
            if self.get_enkf_outfile() != other.get_enkf_outfile():
                return False
            if self.get_enkf_infile() != other.get_enkf_infile():
                return False
            if self.getUseForwardInit() != other.getUseForwardInit():
                return False
        elif self.getImplementationType() == ErtImplType.GEN_KW:
            if self.getKeywordModelConfig() != other.getKeywordModelConfig():
                return False
            if self.get_init_file_fmt() != other.get_init_file_fmt():
                return False
            if self.get_min_std_file() != other.get_min_std_file():
                return False
            if self.get_enkf_outfile() != other.get_enkf_outfile():
                return False
            if self.getUseForwardInit() != other.getUseForwardInit():
                return False
        elif self.getImplementationType() == ErtImplType.CONTAINER:
            a = [
                self.get_container_key(i)
                for i in range(self.get_container_size())
            ]
            b = [
                other.get_container_key(i)
                for i in range(other.get_container_size())
            ]
            if a != b:
                return False
        elif self.getImplementationType() == ErtImplType.SUMMARY:
            if self.getSummaryModelConfig() != other.getSummaryModelConfig():
                return False
        elif self.getImplementationType() == ErtImplType.SURFACE:
            if self.get_init_file_fmt() != other.get_init_file_fmt():
                return False
            if self.getUseForwardInit() != other.getUseForwardInit():
                return False
            if self.get_enkf_outfile() != other.get_enkf_outfile():
                return False
            if self.get_min_std_file() != other.get_min_std_file():
                return False
        elif self.getImplementationType() == ErtImplType.FIELD:
            if self.getFieldModelConfig() != other.getFieldModelConfig():
                return False
            if self.getUseForwardInit() != other.getUseForwardInit():
                return False
            if self.get_init_file_fmt() != other.get_init_file_fmt():
                return False
            if self.get_min_std_file() != other.get_min_std_file():
                return False
            if self.get_enkf_outfile() != other.get_enkf_outfile():
                return False

        return True
예제 #11
0
class RunpathList(BaseCClass):
    TYPE_NAME = "runpath_list"
    _alloc = ResPrototype("void* runpath_list_alloc(char*)", bind=False)
    _free = ResPrototype("void  runpath_list_free(runpath_list)")
    _add = ResPrototype(
        "void  runpath_list_add(runpath_list, int, int, char*, char*)")
    _clear = ResPrototype("void  runpath_list_clear(runpath_list)")
    _size = ResPrototype("int   runpath_list_size(runpath_list)")
    _iens = ResPrototype("int   runpath_list_iget_iens(runpath_list, int)")
    _iteration = ResPrototype(
        "int   runpath_list_iget_iter(runpath_list, int)")
    _runpath = ResPrototype(
        "char* runpath_list_iget_runpath(runpath_list, int)")
    _basename = ResPrototype(
        "char* runpath_list_iget_basename(runpath_list, int)")
    _export = ResPrototype("void  runpath_list_fprintf(runpath_list)")
    _load = ResPrototype("bool  runpath_list_load(runpath_list)")

    _get_export_file = ResPrototype(
        "char* runpath_list_get_export_file(runpath_list)")
    _set_export_file = ResPrototype(
        "void runpath_list_set_export_file(runpath_list, char*)")

    def __init__(self, export_file):
        c_ptr = self._alloc(export_file)
        if c_ptr:
            super().__init__(c_ptr)
        else:
            raise IOError(
                'Could not construct RunpathList with export_file "%s".' %
                export_file)

    def __len__(self):
        return self._size()

    def __getitem__(self, index):
        """@rtype: RunpathNode"""
        ls = len(self)
        if isinstance(index, int):
            idx = index
            if idx < 0:
                idx += ls
            if not 0 <= idx < ls:
                raise IndexError("Index not in range: 0 <= %d < %d" %
                                 (index, ls))
            realization = self._iens(idx)
            iteration = self._iteration(idx)
            runpath = self._runpath(idx)
            basename = self._basename(idx)
            return RunpathNode(realization, iteration, runpath, basename)
        elif isinstance(index, slice):
            return [self[i] for i in range(*index.indices(ls))]
        raise TypeError("List indices must be integers, not %s." %
                        str(type(index)))

    def __iter__(self):
        index = 0
        while index < len(self):
            yield self[index]
            index += 1

    def getExportFile(self):
        return self._get_export_file()

    def setExportFile(self, export_file):
        self._set_export_file(export_file)

    def add(self, realization_number, iteration_number, runpath, basename):
        """
        @type realization_number: int
        @type iteration_number: int
        @type runpath: int
        @type basename: int
        """
        self._add(realization_number, iteration_number, runpath, basename)

    def clear(self):
        self._clear()

    def free(self):
        self._free()

    def __repr__(self):
        return "RunpathList(size = %d) %s" % (len(self), self._ad_str())

    def export(self):
        self._export()

    def load(self):
        if not self._load():
            raise IOError("Could not load from:%s" % self._get_export_file())
예제 #12
0
class ForwardModel(BaseCClass):
    TYPE_NAME = "forward_model"

    _alloc = ResPrototype("void* forward_model_alloc(ext_joblist)", bind=False)
    _free = ResPrototype("void forward_model_free( forward_model )")
    _clear = ResPrototype("void forward_model_clear(forward_model)")
    _add_job = ResPrototype(
        "ext_job_ref forward_model_add_job(forward_model, char*)")
    _alloc_joblist = ResPrototype(
        "stringlist_obj forward_model_alloc_joblist(forward_model)")
    _iget_job = ResPrototype(
        "ext_job_ref forward_model_iget_job( forward_model, int)")
    _get_length = ResPrototype("int forward_model_get_length(forward_model)")
    _formatted_fprintf = ResPrototype(
        "void forward_model_formatted_fprintf(forward_model, char*, char*, char*, subst_list, int, env_varlist)"
    )

    def __init__(self, ext_joblist):
        c_ptr = self._alloc(ext_joblist)
        if c_ptr:
            super(ForwardModel, self).__init__(c_ptr)
        else:
            raise ValueError(
                'Failed to construct forward model from provided ext_joblist %s'
                % ext_joblist)

    def __len__(self):
        return self._get_length()

    def joblist(self):
        """ @rtype: StringList """
        return self._alloc_joblist()

    def iget_job(self, index):
        """ @rtype: ExtJob """
        return self._iget_job(index).setParent(self)

    def add_job(self, name):
        """ @rtype: ExtJob """
        return self._add_job(name).setParent(self)

    def clear(self):
        self._clear()

    def free(self):
        self._free()

    def formatted_fprintf(self, run_id, path, data_root, global_args, umask,
                          env_varlist):
        self._formatted_fprintf(run_id, path, data_root, global_args, umask,
                                env_varlist)

    def __repr__(self):
        return self._create_repr('joblist=%s' % self.joblist())

    def get_size(self):
        return len(self)

    def __ne__(self, other):
        return not self == other

    def __eq__(self, other):
        for i in range(len(self)):
            if self.iget_job(i) != other.iget_job(i):
                return False
        return True
예제 #13
0
class ObsBlock(BaseCClass):
    TYPE_NAME = "obs_block"

    _alloc = ResPrototype(
        "void*  obs_block_alloc(char*, int, matrix, bool, double)", bind=False
    )
    _free = ResPrototype("void   obs_block_free(obs_block)")
    _total_size = ResPrototype("int    obs_block_get_size( obs_block )")
    _active_size = ResPrototype("int    obs_block_get_active_size( obs_block )")
    _iset = ResPrototype("void   obs_block_iset( obs_block , int , double , double)")
    _iget_value = ResPrototype("double obs_block_iget_value( obs_block , int)")
    _iget_std = ResPrototype("double obs_block_iget_std( obs_block , int)")
    _get_obs_key = ResPrototype("char* obs_block_get_key( obs_block )")
    _iget_is_active = ResPrototype("bool obs_block_iget_is_active( obs_block , int)")

    def __init__(self, obs_key, obs_size, global_std_scaling=1.0):
        error_covar: Optional[Matrix] = None
        error_covar_owner = False
        c_pointer = self._alloc(
            obs_key, obs_size, error_covar, error_covar_owner, global_std_scaling
        )
        super().__init__(c_pointer)

    def totalSize(self):
        return self._total_size()

    def activeSize(self):
        return self.active()

    def active(self):
        return self._active_size()

    def __len__(self):
        """Returns the total size"""
        return self.totalSize()

    def is_active(self, index):
        return self._iget_is_active(index)

    def get_obs_key(self):
        return self._get_obs_key()

    def __setitem__(self, index, value):
        if len(value) != 2:
            raise TypeError(
                "The value argument must be a two element tuple: (value , std)"
            )
        d, std = value

        if isinstance(index, int):
            if index < 0:
                index += len(self)
            if 0 <= index < len(self):
                self._iset(index, d, std)
            else:
                raise IndexError(
                    "Invalid index: %d. Valid range: [0,%d)" % (index, len(self))
                )
        else:
            raise TypeError(
                "The index item must be integer, not %s." % str(type(index))
            )

    def __getitem__(self, index):
        if isinstance(index, int):
            if index < 0:
                index += len(self)
            if 0 <= index < len(self):
                value = self._iget_value(index)
                std = self._iget_std(index)
                return (value, std)
            else:
                raise IndexError(
                    "Invalid index:%d - valid range: [0,%d)" % (index, len(self))
                )
        else:
            raise TypeError(
                "The index item must be integer, not %s." % str(type(index))
            )

    def free(self):
        self._free()
예제 #14
0
class ConfigKeys:

    _config_directory_key = ResPrototype("char* config_keys_get_config_directory_key()", bind=False)
    _queue_system_key     = ResPrototype("char* config_keys_get_queue_system_key()", bind=False)
    _run_template_key     = ResPrototype("char* config_keys_get_run_template_key()", bind=False)
    _custom_kw_key        = ResPrototype("char* config_keys_get_custom_kw_key()", bind=False)
    _gen_kw_key           = ResPrototype("char* config_keys_get_gen_kw_key()", bind=False)
    _history_source_key   = ResPrototype("char* config_keys_get_history_source_key()", bind=False)
    _queue_option_key     = ResPrototype("char* config_keys_get_queue_option_key()", bind=False)
    _install_job_key      = ResPrototype("char* config_keys_get_install_job_key()", bind=False)
    _path_key             = ResPrototype("char* config_keys_get_path_key()", bind=False)
    _show_refcase_key     = ResPrototype("char* config_keys_get_show_refcase_key()", bind=False)
    _show_history_key     = ResPrototype("char* config_keys_get_show_history_key()", bind=False)
    _install_job_directory_key = ResPrototype("char* config_keys_get_install_job_directory_key()", bind=False)
    _plot_settings_key    = ResPrototype("char* config_keys_get_plot_setting_key()", bind=False)
    _forward_model_key    = ResPrototype("char* config_keys_get_forward_model_key()", bind=False)
    _simulation_job_key   = ResPrototype("char* config_keys_get_simulation_job_key()", bind=False)
    _log_file_key         = ResPrototype("char* config_keys_get_log_file_key()", bind=False)
    _log_level_key        = ResPrototype("char* config_keys_get_log_level_key()", bind=False)
    _lsf_resources_key    = ResPrototype("char* config_keys_get_lsf_resources_key()", bind=False)
    _lsf_server_key       = ResPrototype("char* config_keys_get_lsf_server_key()", bind=False)
    _lsf_queue_key        = ResPrototype("char* config_keys_get_lsf_queue_key()", bind=False)
    _update_log_path      = ResPrototype("char* config_keys_get_update_log_path_key()", bind=False)
    _store_seed           = ResPrototype("char* config_keys_get_store_seed_key()", bind=False)
    _load_seed            = ResPrototype("char* config_keys_get_load_seed_key()", bind=False)
    _summary              = ResPrototype("char* config_keys_get_summary_key()", bind=False)
    _jobname              = ResPrototype("char* config_keys_get_jobname_key()", bind=False)
    _max_runtime          = ResPrototype("char* config_keys_get_max_runtime_key()", bind=False)
    _min_realizations     = ResPrototype("char* config_keys_get_min_realizations_key()", bind=False)
    _max_submit           = ResPrototype("char* config_keys_get_max_submit_key()", bind=False)
    _umask                = ResPrototype("char* config_keys_get_umask_key()", bind=False)
    _data_file            = ResPrototype("char* config_keys_get_data_file_key()", bind=False)
    _runpath              = ResPrototype("char* config_keys_get_runpath_key()", bind=False)
    _runpath_file         = ResPrototype("char* config_keys_get_runpath_file_key()", bind=False)
    _eclbase              = ResPrototype("char* config_keys_get_eclbase_key()", bind=False)
    _num_realizations     = ResPrototype("char* config_keys_get_num_realizations_key()", bind=False)
    _enspath              = ResPrototype("char* config_keys_get_enspath_key()", bind=False)
    _grid                 = ResPrototype("char* config_keys_get_grid_key()", bind=False)
    _refcase              = ResPrototype("char* config_keys_get_refcase_key()", bind=False)
    _history_source       = ResPrototype("char* config_keys_get_history_source_key()", bind=False)
    _obs_config           = ResPrototype("char* config_keys_get_obs_config_key()", bind=False)
    _time_map             = ResPrototype("char* config_keys_get_time_map_key()", bind=False)
    _gen_data             = ResPrototype("char* config_keys_get_gen_data_key()", bind=False)
    _result_file          = ResPrototype("char* config_keys_get_result_file()", bind=False)
    _report_steps         = ResPrototype("char* config_keys_get_report_steps()", bind=False)
    _input_format         = ResPrototype("char* config_keys_get_input_format()", bind=False)
    _ecl_file             = ResPrototype("char* config_keys_get_ecl_file()", bind=False)
    _output_format        = ResPrototype("char* config_keys_get_output_format()", bind=False)
    _init_files           = ResPrototype("char* config_keys_get_init_files()", bind=False)
    _random_seed          = ResPrototype("char* config_keys_get_random_seed()", bind=False)
    _license_path_key     = ResPrototype("char* config_keys_get_license_path_key()", bind=False)
    _setenv_key           = ResPrototype("char* config_keys_get_setenv_key()", bind=False)

    ARGLIST          = "ARGLIST"
    CONFIG_DIRECTORY = _config_directory_key()
    DEFINES          = "DEFINES"
    INTERNALS        = "INTERNALS"
    SIMULATION       = "SIMULATION"
    LOGGING          = "LOGGING"
    SEED             = "SEED"
    QUEUE_SYSTEM     = _queue_system_key()
    RUN_TEMPLATE     = _run_template_key()
    TEMPLATE         = "TEMPLATE"
    EXPORT           = "EXPORT"
    CUSTOM_KW        = _custom_kw_key()
    GEN_KW           = _gen_kw_key()
    NAME             = "NAME"
    OUT_FILE         = "OUT_FILE"
    PARAMETER_FILE   = "PARAMETER_FILE"
    PATH             = "PATH"
    QUEUE_OPTION     = _queue_option_key()
    DRIVER_NAME      = "DRIVER_NAME"
    OPTION           = "OPTION"
    VALUE            = "VALUE"
    INSTALL_JOB      = _install_job_key()
    PATH_KEY         = _path_key
    SHOW_REFCASE_KEY = _show_refcase_key
    SHOW_HISTORY_KEY = _show_history_key
    FORWARD_MODEL    = _forward_model_key()
    SIMULATION_JOB   = _simulation_job_key()
    LOG_FILE         = _log_file_key()
    LOG_LEVEL        = _log_level_key()
    LSF_RESOURCE_KEY = _lsf_resources_key()
    LSF_QUEUE_NAME_KEY = _lsf_queue_key()
    LSF_SERVER_KEY   = _lsf_server_key()
    LSF_KEY          = 'LSF'
    UPDATE_LOG_PATH  = _update_log_path()
    STORE_SEED       = _store_seed()
    LOAD_SEED        = _load_seed()
    RANDOM_SEED      = _random_seed()
    SUMMARY          = _summary()
    JOBNAME          = _jobname()
    MAX_RUNTIME      = _max_runtime()
    MIN_REALIZATIONS = _min_realizations()
    MAX_SUBMIT       = _max_submit()
    UMASK            = _umask()
    MAX_RUNNING      = "MAX_RUNNING"
    DATA_FILE        = _data_file()
    RUNPATH          = _runpath()
    RUNPATH_FILE     = _runpath_file()
    ECLBASE          = _eclbase()
    NUM_REALIZATIONS = _num_realizations()
    ENSPATH          = _enspath()
    GRID             = _grid()
    REFCASE          = _refcase()
    HISTORY_SOURCE   = _history_source()
    OBS_CONFIG       = _obs_config()
    TIME_MAP         = _time_map()
    GEN_DATA         = _gen_data()
    RESULT_FILE      = _result_file()
    REPORT_STEPS     = _report_steps()
    INPUT_FORMAT     = _input_format()
    ECL_FILE         = _ecl_file()
    OUTPUT_FORMAT    = _output_format()
    INIT_FILES       = _init_files()
    LICENSE_PATH     = _license_path_key()
    INSTALL_JOB_DIRECTORY = _install_job_directory_key()
    SETENV = _setenv_key()
예제 #15
0
class JobQueueNode(BaseCClass):
    TYPE_NAME = "job_queue_node"

    _alloc = ResPrototype("void* job_queue_node_alloc_python(char*,"\
                                            "char*,"\
                                            "char*,"\
                                            "int, "\
                                            "stringlist,"\
                                            "int, "\
                                            "char*,"\
                                            "char*,"\
                                            "char*"\
                                            ")", bind=False)
    _free = ResPrototype("void job_queue_node_free(job_queue_node)")
    _submit = ResPrototype(
        "int job_queue_node_submit_simple(job_queue_node, driver)")
    _kill = ResPrototype(
        "bool job_queue_node_kill_simple(job_queue_node, driver)")

    _get_status = ResPrototype("int job_queue_node_get_status(job_queue_node)")
    _update_status = ResPrototype(
        "bool job_queue_node_update_status_simple(job_queue_node, driver)")
    _set_status = ResPrototype(
        "void job_queue_node_set_status(job_queue_node, int)")

    def __init__(self, job_script, job_name, run_path, num_cpu, status_file,
                 ok_file, exit_file, done_callback_function,
                 exit_callback_function, callback_arguments):
        self.done_callback_function = done_callback_function
        self.exit_callback_function = exit_callback_function
        self.callback_arguments = callback_arguments

        argc = 1
        argv = StringList()
        argv.append(run_path)
        self.started = False
        self.run_path = run_path
        c_ptr = self._alloc(job_name, run_path, job_script, argc, argv,
                            num_cpu, ok_file, status_file, exit_file, None,
                            None, None, None)

        if c_ptr is not None:
            super(JobQueueNode, self).__init__(c_ptr)
        else:
            raise ValueError("Unable to create job node object")

    def free(self):
        self._free()

    @property
    def status(self):
        return self._get_status()

    def submit(self, driver):
        self._submit(driver)

    def run_done_callback(self):
        return self.done_callback_function(self.callback_arguments)

    def run_exit_callback(self):
        return self.exit_callback_function(self.callback_arguments)

    def is_running(self):
        return (self.status == JobStatusType.JOB_QUEUE_PENDING
                or self.status == JobStatusType.JOB_QUEUE_SUBMITTED
                or self.status == JobStatusType.JOB_QUEUE_RUNNING
                or self.status == JobStatusType.JOB_QUEUE_UNKNOWN
                )  # dont stop monitoring if LSF commands are unavailable

    def job_monitor(self, driver):

        self._submit(driver)
        self.update_status(driver)

        while self.is_running():
            time.sleep(1)
            self.update_status(driver)

        if self.status == JobStatusType.JOB_QUEUE_DONE:
            self.run_done_callback()
        elif self.status == JobStatusType.JOB_QUEUE_EXIT:
            self._set_status(JobStatusType.JOB_QUEUE_FAILED)
            self.run_exit_callback()
        elif self.status == JobStatusType.JOB_QUEUE_WAITING:
            self.started = False

    def run(self, driver):
        self.started = True
        x = Thread(target=self.job_monitor, args=(driver, ))
        x.start()
        return x

    def stop(self, driver):
        self._kill(driver)

    def update_status(self, driver):
        if self.status != JobStatusType.JOB_QUEUE_WAITING:
            self._update_status(driver)
예제 #16
0
파일: local_config.py 프로젝트: oysteoh/ert
class LocalConfig(BaseCClass):
    """The LocalConfig class is created as a reference to an existing underlying C
    structure by the method EnkFMain.local_config(). When the pointer to the C
    local_config_type object has been properly wrapped we 'decorate' the Python
    object with references to the ensemble_config , observations and grid.

    This implies that the Python object LocalConfig is richer than the
    underlying C object local_config_type; the extra attributes are only used
    for validation.

    """

    TYPE_NAME = "local_config"

    _free = ResPrototype("void   local_config_free(local_config)")
    _clear = ResPrototype("void   local_config_clear(local_config)")
    _clear_active = ResPrototype(
        "void   local_config_clear_active(local_config)")
    _create_ministep = ResPrototype(
        "local_ministep_ref local_config_alloc_ministep(local_config, char*, analysis_module)"
    )
    _attach_ministep = ResPrototype(
        "void   local_updatestep_add_ministep(local_updatestep, local_ministep)",
        bind=False,
    )
    _create_obsdata = ResPrototype(
        "void   local_config_alloc_obsdata(local_config, char*)")
    _has_obsdata = ResPrototype(
        "bool   local_config_has_obsdata(local_config, char*)")

    _get_updatestep = ResPrototype(
        "local_updatestep_ref local_config_get_updatestep(local_config)")
    _get_ministep = ResPrototype(
        "local_ministep_ref   local_config_get_ministep(local_config, char*)")
    _get_obsdata = ResPrototype(
        "local_obsdata_ref    local_config_get_obsdata(local_config, char*)")
    _copy_obsdata = ResPrototype(
        "local_obsdata_ref    local_config_alloc_obsdata_copy(local_config, char*, char*)"
    )

    def __init__(self):
        raise NotImplementedError("Class can not be instantiated directly!")

    def initAttributes(self, ensemble_config, obs, grid):
        self.ensemble_config = ensemble_config
        self.obs = obs
        self.grid = grid

    def __getObservations(self):
        return self.obs

    def __getEnsembleConfig(self):
        return self.ensemble_config

    def getGrid(self):
        # The grid can be None
        return self.grid

    def free(self):
        self._free()

    def clear(self):
        self._clear()

    def clear_active(self):
        self._clear_active()

    def createMinistep(self, mini_step_key, analysis_module=None):
        """@rtype: Ministep"""
        assert isinstance(mini_step_key, str)
        if analysis_module:
            assert isinstance(analysis_module, AnalysisModule)
        ministep = self._create_ministep(mini_step_key, analysis_module)
        if ministep is None:
            raise KeyError(
                "Ministep:  {} already exists".format(mini_step_key))
        ministep.set_ensemble_config(self.__getEnsembleConfig())
        return ministep

    def createObsdata(self, obsdata_key):
        """@rtype: Obsdata"""
        assert isinstance(obsdata_key, str)
        if self._has_obsdata(obsdata_key):
            raise ValueError("Tried to add existing observation key:%s " %
                             obsdata_key)

        self._create_obsdata(obsdata_key)
        obsdata = self.getObsdata(obsdata_key)
        obsdata.initObservations(self.__getObservations())
        return obsdata

    def copyObsdata(self, src_key, target_key):
        """@rtype: Obsdata"""
        assert isinstance(src_key, str)
        assert isinstance(target_key, str)
        if not self._has_obsdata(src_key):
            raise KeyError(f"The observation set {src_key} does not exist")

        obsdata = self._copy_obsdata(src_key, target_key)
        obsdata.initObservations(self.__getObservations())
        return obsdata

    def getUpdatestep(self):
        """@rtype: UpdateStep"""
        return self._get_updatestep()

    def getMinistep(self, mini_step_key):
        """@rtype: Ministep"""
        assert isinstance(mini_step_key, str)
        return self._get_ministep(mini_step_key)

    def getObsdata(self, obsdata_key):
        """@rtype: Obsdata"""
        assert isinstance(obsdata_key, str)
        return self._get_obsdata(obsdata_key)

    def attachMinistep(self, update_step, mini_step):
        assert isinstance(mini_step, LocalMinistep)
        assert isinstance(update_step, LocalUpdateStep)
        self._attach_ministep(update_step, mini_step)

    def __repr__(self):
        return self._create_repr()
예제 #17
0
class GenKw(BaseCClass):
    TYPE_NAME = "gen_kw"
    _alloc = ResPrototype("void*  gen_kw_alloc(gen_kw_config)", bind=False)
    _free = ResPrototype("void   gen_kw_free(gen_kw_config)")
    _export_parameters = ResPrototype(
        "void   gen_kw_write_export_file(gen_kw , char*)")
    _export_template = ResPrototype(
        "void   gen_kw_ecl_write_template(gen_kw , char* )")
    _data_iget = ResPrototype("double gen_kw_data_iget(gen_kw, int, bool)")
    _data_iset = ResPrototype("void   gen_kw_data_iset(gen_kw, int, double)")
    _set_values = ResPrototype(
        "void   gen_kw_data_set_vector(gen_kw, double_vector)")
    _data_get = ResPrototype("double gen_kw_data_get(gen_kw, char*, bool)")
    _data_set = ResPrototype("void   gen_kw_data_set(gen_kw, char*, double)")
    _size = ResPrototype("int    gen_kw_data_size(gen_kw)")
    _has_key = ResPrototype("bool   gen_kw_data_has_key(gen_kw, char*)")
    _ecl_write = ResPrototype(
        "void   gen_kw_ecl_write(gen_kw,    char* , char* , void*)")
    _iget_key = ResPrototype("char*  gen_kw_get_name(gen_kw, int)")

    def __init__(self, gen_kw_config):
        """
         @type gen_kw_config: GenKwConfig
        """
        c_ptr = self._alloc(gen_kw_config)

        if c_ptr:
            super(GenKw, self).__init__(c_ptr)
            self.__str__ = self.__repr__
        else:
            raise ValueError(
                'Cannot issue a GenKw from the given keyword config: %s.' %
                str(gen_kw_config))

    def exportParameters(self, file_name):
        """ @type: str """
        self._export_parameters(file_name)

    def exportTemplate(self, file_name):
        """ @type: str """
        self._export_template(file_name)

    def __getitem__(self, key):
        """
        @type key: int or str
        @rtype: float
        """
        do_transform = False
        if isinstance(key, str):
            if not key in self:
                raise KeyError("Key %s does not exist" % (key))
            return self._data_get(key, do_transform)
        elif isinstance(key, int):
            if not 0 <= key < len(self):
                raise IndexError("Index out of range 0 <= %d < %d" %
                                 (key, len(self)))
            return self._data_iget(key, do_transform)
        else:
            raise TypeError(
                "Illegal type for indexing, must be int or str, got: %s" %
                (key))

    def __setitem__(self, key, value):
        """
        @type key: int or str
        @type value: float
        """
        if isinstance(key, str):
            if not key in self:
                raise KeyError("Key %s does not exist" % (key))
            self._data_set(key, value)
        elif isinstance(key, int):
            if not 0 <= key < len(self):
                raise IndexError("Index out of range 0 <= %d < %d" %
                                 (key, len(self)))
            self._data_iset(key, value)
        else:
            raise TypeError(
                "Illegal type for indexing, must be int or str, got: %s" %
                (key))

    def items(self):
        do_transform = False
        v = []
        for index in range(len(self)):
            v.append(
                (self._iget_key(index), self._data_iget(index, do_transform)))
        return v

    def eclWrite(self, path, filename):
        if not path is None:
            if not os.path.isdir(path):
                raise IOError("The directory:%s does not exist" % path)

        self._ecl_write(path, filename, None)

    def setValues(self, values):
        if len(values) == len(self):
            if isinstance(values, DoubleVector):
                self._set_values(d)
            else:
                d = DoubleVector()
                for (index, v) in enumerate(values):
                    if isinstance(v, numbers.Number):
                        d[index] = v
                    else:
                        raise TypeError("Values must numeric: %s is invalid" %
                                        v)
                self._set_values(d)
        else:
            raise ValueError("Size mismatch between GenKW and values")

    def __len__(self):
        """ @rtype: int """
        return self._size()

    def __contains__(self, item):
        return self._has_key(item)

    def free(self):
        self._free()

    def __repr__(self):
        return 'GenKw(len = %d) at 0x%x' % (len(self), self._address())
예제 #18
0
class EnsemblePlotGenData(BaseCClass):
    TYPE_NAME = "ensemble_plot_gen_data"

    _alloc = ResPrototype("void* enkf_plot_gendata_alloc(enkf_config_node)", bind=False)
    _size = ResPrototype("int   enkf_plot_gendata_get_size(ensemble_plot_gen_data)")
    _load = ResPrototype(
        "void  enkf_plot_gendata_load(ensemble_plot_gen_data, enkf_fs, int, bool_vector)"
    )
    _get = ResPrototype(
        "ensemble_plot_gen_data_vector_ref enkf_plot_gendata_iget(ensemble_plot_gen_data, int)"
    )
    _min_values = ResPrototype(
        "double_vector_ref enkf_plot_gendata_get_min_values(ensemble_plot_gen_data)"
    )
    _max_values = ResPrototype(
        "double_vector_ref enkf_plot_gendata_get_max_values(ensemble_plot_gen_data)"
    )
    _free = ResPrototype("void  enkf_plot_gendata_free(ensemble_plot_gen_data)")

    def __init__(self, ensemble_config_node, file_system, report_step, input_mask=None):
        assert isinstance(ensemble_config_node, EnkfConfigNode)
        assert ensemble_config_node.getImplementationType() == ErtImplType.GEN_DATA

        c_ptr = self._alloc(ensemble_config_node)
        if c_ptr:
            super(EnsemblePlotGenData, self).__init__(c_ptr)
        else:
            raise ValueError(
                "Unable to construct EnsemplePlotGenData from given config node!"
            )

        self.__load(file_system, report_step, input_mask)

    def __load(self, file_system, report_step, input_mask=None):
        assert isinstance(file_system, EnkfFs)
        if not input_mask is None:
            assert isinstance(input_mask, BoolVector)

        self._load(file_system, report_step, input_mask)

    def __len__(self):
        """ @rtype: int """
        return self._size()

    def __getitem__(self, index):
        """ @rtype: EnsemblePlotGenDataVector """
        return self._get(index)

    def __iter__(self):
        cur = 0
        while cur < len(self):
            yield self[cur]
            cur += 1

    def getMaxValues(self):
        """ @rtype: DoubleVector """
        return self._max_values().setParent(self)

    def getMinValues(self):
        """ @rtype: DoubleVector """
        return self._min_values().setParent(self)

    def free(self):
        self._free()

    def __repr__(self):
        return "EnsemblePlotGenData(size = %d) %s" % (len(self), self._ad_str())
예제 #19
0
class GenObservation(BaseCClass):
    TYPE_NAME = "gen_obs"

    _alloc = ResPrototype("void*  gen_obs_alloc__(gen_data_config , char*)", bind=False)
    _free = ResPrototype("void   gen_obs_free(gen_obs)")
    _load = ResPrototype("void   gen_obs_load_observation(gen_obs , char*)")
    _scalar_set = ResPrototype("void   gen_obs_set_scalar(gen_obs , double , double)")
    _get_std = ResPrototype("double gen_obs_iget_std(gen_obs, int)")
    _get_value = ResPrototype("double gen_obs_iget_value(gen_obs, int)")
    _get_std_scaling = ResPrototype("double gen_obs_iget_std_scaling(gen_obs, int)")
    _get_size = ResPrototype("int    gen_obs_get_size(gen_obs)")
    _get_data_index = ResPrototype("int    gen_obs_get_obs_index(gen_obs, int)")
    _load_data_index = ResPrototype("void   gen_obs_load_data_index(gen_obs , char*)")
    _add_data_index = ResPrototype(
        "void   gen_obs_attach_data_index(gen_obs , int_vector)"
    )
    _update_std_scaling = ResPrototype(
        "void   gen_obs_update_std_scale(gen_obs , double , active_list)"
    )
    _get_value_vector = ResPrototype(
        "void   gen_obs_load_values(gen_obs, int, double*)"
    )
    _get_std_vector = ResPrototype("void   gen_obs_load_std(gen_obs, int, double*)")

    def __init__(
        self,
        obs_key,
        data_config: GenDataConfig,
        scalar_value=None,
        obs_file=None,
        data_index=None,
    ):
        c_ptr = self._alloc(data_config, obs_key)
        if c_ptr:
            super().__init__(c_ptr)
        else:
            raise ValueError(
                "Unable to construct GenObservation with given obs_key and data_config!"
            )

        if scalar_value is None and obs_file is None:
            raise ValueError(
                "Exactly one the scalar_value and obs_file arguments must be present"
            )

        if scalar_value is not None and obs_file is not None:
            raise ValueError(
                "Exactly one the scalar_value and obs_file arguments must be present"
            )

        if obs_file is not None:
            if not os.path.isfile(obs_file):
                raise IOError(
                    "The file with observation data:%s does not exist" % obs_file
                )
            else:
                self._load(obs_file)
        else:
            obs_value, obs_std = scalar_value
            self._scalar_set(obs_value, obs_std)

        if data_index is not None:
            if os.path.isfile(data_index):
                self._load_data_index(data_index)
            else:
                index_list = IntVector.active_list(data_index)
                self._add_data_index(index_list)

    def __len__(self):
        return self._get_size()

    def __getitem__(self, obs_index):
        if obs_index < 0:
            obs_index += len(self)

        if 0 <= obs_index < len(self):
            return (self.getValue(obs_index), self.getStandardDeviation(obs_index))
        else:
            raise IndexError("Invalid index.  Valid range: [0,%d)" % len(self))

    def getValue(self, obs_index):
        """@rtype: float"""
        return self._get_value(obs_index)

    def getStandardDeviation(self, obs_index):
        """@rtype: float"""
        return self._get_std(obs_index)

    def getStdScaling(self, obs_index):
        """@rtype: float"""
        return self._get_std_scaling(obs_index)

    def updateStdScaling(self, factor, active_list: ActiveList):
        self._update_std_scaling(factor, active_list)

    def get_data_points(self):
        np_vector = np.zeros(len(self))
        self._get_value_vector(
            len(self), np_vector.ctypes.data_as(ctypes.POINTER(ctypes.c_double))
        )
        return np_vector

    def get_std(self):
        np_vector = np.zeros(len(self))
        self._get_std_vector(
            len(self), np_vector.ctypes.data_as(ctypes.POINTER(ctypes.c_double))
        )
        return np_vector

    def getSize(self):
        """@rtype: float"""
        return len(self)

    def getIndex(self, obs_index):
        """@rtype: int"""
        return self.getDataIndex(obs_index)

    def getDataIndex(self, obs_index):
        return self._get_data_index(obs_index)

    def free(self):
        self._free()

    def __repr__(self):
        si = len(self)
        ad = self._ad_str()
        return "GenObservation(size = %d) %s" % (si, ad)
예제 #20
0
파일: enkf_obs.py 프로젝트: oyvindeide/ert
class EnkfObs(BaseCClass):
    TYPE_NAME = "enkf_obs"

    _get_size = ResPrototype("int enkf_obs_get_size( enkf_obs )")
    _valid = ResPrototype("bool enkf_obs_is_valid(enkf_obs)")

    _clear = ResPrototype("void enkf_obs_clear( enkf_obs )")
    _alloc_typed_keylist = ResPrototype(
        "stringlist_obj enkf_obs_alloc_typed_keylist(enkf_obs, enkf_obs_impl_type)"
    )
    _alloc_matching_keylist = ResPrototype(
        "stringlist_obj enkf_obs_alloc_matching_keylist(enkf_obs, char*)")
    _has_key = ResPrototype("bool enkf_obs_has_key(enkf_obs, char*)")
    _obs_type = ResPrototype(
        "enkf_obs_impl_type enkf_obs_get_type(enkf_obs, char*)")
    _get_vector = ResPrototype(
        "obs_vector_ref enkf_obs_get_vector(enkf_obs, char*)")
    _iget_vector = ResPrototype(
        "obs_vector_ref enkf_obs_iget_vector(enkf_obs, int)")
    _iget_obs_time = ResPrototype(
        "time_t enkf_obs_iget_obs_time(enkf_obs, int)")
    _add_obs_vector = ResPrototype(
        "void enkf_obs_add_obs_vector(enkf_obs, obs_vector)")

    def __len__(self):
        return self._get_size()

    def __contains__(self, key):
        return self._has_key(key)

    def __iter__(self):
        """@rtype: ObsVector"""
        iobs = 0
        while iobs < len(self):
            vector = self[iobs]
            yield vector
            iobs += 1

    def __getitem__(self, key_or_index):
        """@rtype: ObsVector"""
        if isinstance(key_or_index, str):
            if self.hasKey(key_or_index):
                return self._get_vector(key_or_index).setParent(self)
            else:
                raise KeyError("Unknown key: %s" % key_or_index)
        elif isinstance(key_or_index, int):
            idx = key_or_index
            if idx < 0:
                idx += len(self)
            if 0 <= idx < len(self):
                return self._iget_vector(idx).setParent(self)
            else:
                raise IndexError(
                    "Invalid index: %d.  Valid range is [0, %d)." %
                    (key_or_index, len(self)))
        else:
            raise TypeError(
                "Key or index must be of type str or int, not %s." %
                str(type(key_or_index)))

    def getTypedKeylist(
        self,
        observation_implementation_type: EnkfObservationImplementationType
    ) -> StringList:
        """
        @type observation_implementation_type: EnkfObservationImplementationType
        @rtype: StringList
        """
        return self._alloc_typed_keylist(observation_implementation_type)

    def obsType(self, key):
        if key in self:
            return self._obs_type(key)
        else:
            raise KeyError("Unknown observation key:%s" % key)

    def getMatchingKeys(self, pattern, obs_type=None):
        """
        Will return a list of all the observation keys matching the input
        pattern. The matching is based on fnmatch().
        """
        key_list = self._alloc_matching_keylist(pattern)
        if obs_type:
            new_key_list = []
            for key in key_list:
                if self.obsType(key) == obs_type:
                    new_key_list.append(key)
            return new_key_list
        else:
            return key_list

    def hasKey(self, key):
        """@rtype: bool"""
        return key in self

    def getObservationTime(self, index):
        """@rtype: CTime"""
        return self._iget_obs_time(index)

    def addObservationVector(self, observation_vector):
        assert isinstance(observation_vector, ObsVector)

        observation_vector.convertToCReference(self)

        self._add_obs_vector(observation_vector)

    @property
    def valid(self):
        return self._valid()

    def clear(self):
        self._clear()

    def __repr__(self):
        validity = "valid" if self.valid else "invalid"
        return self._create_repr("%s, len=%d" % (validity, len(self)))
예제 #21
0
class ExtParam(BaseCClass):
    TYPE_NAME = "ext_param"
    _alloc = ResPrototype("void*  ext_param_alloc( ext_param_config )",
                          bind=False)
    _free = ResPrototype("void   ext_param_free( ext_param )")
    _iset = ResPrototype("void   ext_param_iset( ext_param, int, double)")
    _iiset = ResPrototype(
        "void   ext_param_iiset( ext_param, int, int, double)")
    _key_set = ResPrototype(
        "void   ext_param_key_set( ext_param, char*, double)")
    _key_suffix_set = ResPrototype(
        "void   ext_param_key_suffix_set( ext_param, char*, char*, double)")
    _iget = ResPrototype("double ext_param_iget( ext_param, int)")
    _iiget = ResPrototype("double ext_param_iiget( ext_param, int, int)")
    _key_get = ResPrototype("double ext_param_key_get( ext_param, char*)")
    _key_suffix_get = ResPrototype(
        "double ext_param_key_suffix_get( ext_param, char*, char*)")
    _export = ResPrototype("void   ext_param_json_export( ext_param, char*)")
    _get_config = ResPrototype("void* ext_param_get_config(ext_param)")

    def __init__(self, config):
        c_ptr = self._alloc(config)
        super(ExtParam, self).__init__(c_ptr)

    def __contains__(self, key):
        return key in self.config

    def __len__(self):
        return len(self.config)

    def __getitem__(self, index):
        if isinstance(index, tuple):
            # if the index is key suffix, assume they are both strings
            key, suffix = index
            if not isinstance(key, string_types) or not isinstance(
                    suffix, string_types):
                raise TypeError(
                    "Expected a pair of strings, got {}".format(index))
            self._check_key_suffix(key, suffix)
            return self._key_suffix_get(key, suffix)

        # index is just the key, it can be either a string or an int
        if isinstance(index, string_types):
            self._check_key_suffix(index)
            return self._key_get(index)

        index = self._roll_key_index(index)
        self._check_index(index)
        return self._iget(index)

    def __setitem__(self, index, value):
        if isinstance(index, tuple):
            # if the index is key suffix, assume they are both strings
            key, suffix = index
            if not isinstance(key, string_types) or not isinstance(
                    suffix, string_types):
                raise TypeError(
                    "Expected a pair of strings, got {}".format(index))
            self._check_key_suffix(key, suffix)
            self._key_suffix_set(key, suffix, value)
            return

        # index is just the key, it can be either a string or an int
        if isinstance(index, string_types):
            self._check_key_suffix(index)
            self._key_set(index, value)
        else:
            index = self._roll_key_index(index)
            self._check_index(index)
            self._iset(index, value)

    def _roll_key_index(self, index):
        """Support indexing from the end of the list of keys"""
        return index if index >= 0 else index + len(self)

    def _check_index(self, kidx, sidx=None):
        """Raise if any of the following is true:
        - kidx is not a valid index for keys
        - the key referred to by kidx has no suffixes, but sidx is given
        - the key referred to by kidx has suffixes, but sidx is None
        - the key referred to by kidx has suffixes, and sidx is not a valid
          suffix index
        """
        if kidx < 0 or kidx >= len(self):
            raise IndexError(
                "Invalid key index {}. Valid range is [0, {})".format(
                    kidx, len(self)))
        key, suffixes = self.config[kidx]
        if not suffixes:
            if sidx is None:
                return  # we are good
            raise IndexError(
                "Key {} has no suffixes, but suffix {} requested".format(
                    key, sidx))
        assert len(suffixes) > 0
        if sidx is None:
            raise IndexError(
                "Key {} has suffixes, a suffix index must be specified".format(
                    key))
        if sidx < 0 or sidx >= len(suffixes):
            raise IndexError(
                ("Suffix index {} is out of range for key {}. Valid range is "
                 "[0, {})").format(sidx, key, len(suffixes)))

    def _check_key_suffix(self, key, suffix=None):
        """Raise if any of the following is true:
        - key is not present in config
        - key has no suffixes but a suffix is given
        - key has suffixes but suffix is None
        - key has suffixes but suffix is not among them
        """
        if not key in self:
            raise KeyError("No such key: {}".format(key))
        suffixes = self.config[key]
        if not suffixes:
            if suffix is None:
                return
            raise KeyError(
                "Key {} has no suffixes, but suffix {} requested".format(
                    key, suffix))
        assert len(suffixes) > 0
        if suffix is None:
            raise KeyError(
                "Key {} has suffixes, a suffix must be specified".format(key))
        if suffix not in suffixes:
            raise KeyError(
                "Key {} has suffixes {}. Can't find the requested suffix {}".
                format(key, suffixes, suffix))

    @property
    def config(self):
        return ExtParamConfig.createCReference(self._get_config(), self)

    # This could in the future be specialized to take a numpy vector,
    # which could be vector-assigned in C.
    def set_vector(self, values):
        if len(values) != len(self):
            raise ValueError("Size mismatch")

        for index, value in enumerate(values):
            self[index] = value

    def free(self):
        self._free()

    def export(self, fname):
        self._export(fname)
예제 #22
0
class ErtWorkflowList(BaseCClass):
    TYPE_NAME = "ert_workflow_list"

    _alloc_namelist = ResPrototype(
        "stringlist_obj ert_workflow_list_alloc_namelist(ert_workflow_list)")
    _has_workflow = ResPrototype(
        "bool ert_workflow_list_has_workflow(ert_workflow_list, char*)")
    _get_workflow = ResPrototype(
        "workflow_ref ert_workflow_list_get_workflow(ert_workflow_list, char*)"
    )
    _get_context = ResPrototype(
        "subst_list_ref ert_workflow_list_get_context(ert_workflow_list)")
    _add_job = ResPrototype(
        "void ert_workflow_list_add_job(ert_workflow_list, char*, char*)")
    _has_job = ResPrototype(
        "bool ert_workflow_list_has_job(ert_workflow_list, char*)")
    _get_job = ResPrototype(
        "workflow_job_ref ert_workflow_list_get_job(ert_workflow_list, char*)")
    _get_job_names = ResPrototype(
        "stringlist_obj ert_workflow_list_get_job_names(ert_workflow_list)")
    _free = ResPrototype("void ert_workflow_list_free(ert_workflow_list)")

    def __init__(self):
        raise NotImplementedError("Class can not be instantiated directly!")

    def getWorkflowNames(self):
        """ @rtype: StringList """
        return self._alloc_namelist()

    def __contains__(self, workflow_name):
        assert isinstance(workflow_name, str)
        return self._has_workflow(workflow_name)

    def __getitem__(self, item):
        """ @rtype: Workflow """
        if not item in self:
            raise KeyError(
                "Item '%s' is not in the list of available workflows." % item)

        return self._get_workflow(item).setParent(self)

    def getContext(self):
        """ @rtype: SubstitutionList """
        return self._get_context()

    def free(self):
        self._free()

    def __str__(self):
        return 'ErtWorkflowList with jobs: %s' + str(self.getJobNames())

    def addJob(self, job_name, job_path):
        """
        @type job_name: str
        @type job_path: str
        """
        self._add_job(job_name, job_path)

    def hasJob(self, job_name):
        """
         @type job_name: str
         @rtype: bool
        """
        return self._has_job(job_name)

    def getJob(self, job_name):
        """ @rtype: WorkflowJob """
        return self._get_job(job_name)

    def getJobNames(self):
        """ @rtype: StringList """
        return self._get_job_names()

    def getPluginJobs(self):
        """ @rtype: list of WorkflowJob """
        plugins = []
        for job_name in self.getJobNames():
            job = self.getJob(job_name)
            if job.isPlugin():
                plugins.append(job)
        return plugins
예제 #23
0
class QueueConfig(BaseCClass):

    TYPE_NAME = "queue_config"

    _free = ResPrototype("void queue_config_free( queue_config )")
    _alloc_job_queue = ResPrototype(
        "job_queue_obj queue_config_alloc_job_queue( queue_config )")
    _alloc = ResPrototype("void* queue_config_alloc_load(char*)", bind=False)
    _alloc_full = ResPrototype(
        "void* queue_config_alloc_full(char*, bool, int, int, queue_driver_enum)",
        bind=False,
    )
    _alloc_content = ResPrototype("void* queue_config_alloc(config_content)",
                                  bind=False)
    _alloc_local_copy = ResPrototype(
        "queue_config_obj queue_config_alloc_local_copy( queue_config )")
    _has_job_script = ResPrototype(
        "bool queue_config_has_job_script( queue_config )")
    _get_job_script = ResPrototype(
        "char* queue_config_get_job_script(queue_config)")
    _max_submit = ResPrototype("int queue_config_get_max_submit(queue_config)")
    _queue_system = ResPrototype(
        "char* queue_config_get_queue_system(queue_config)")
    _queue_driver = ResPrototype(
        "driver_ref queue_config_get_queue_driver(queue_config, char*)")
    _get_num_cpu = ResPrototype("int queue_config_get_num_cpu(queue_config)")

    _lsf_queue_opt = ResPrototype("char* queue_config_lsf_queue_name()",
                                  bind=False)
    _lsf_server_opt = ResPrototype("char* queue_config_lsf_server()",
                                   bind=False)
    _lsf_resource_opt = ResPrototype("char* queue_config_lsf_resource()",
                                     bind=False)
    _lsf_driver_opt = ResPrototype("char* queue_config_lsf_driver_name()",
                                   bind=False)

    def __init__(self,
                 user_config_file=None,
                 config_content=None,
                 config_dict=None):
        configs = sum([
            1 for x in [user_config_file, config_content, config_dict]
            if x is not None
        ])

        if configs > 1:
            raise ValueError(
                "Attempting to create QueueConfig object with multiple config objects"
            )

        if configs == 0:
            raise ValueError(
                "Attempting to create QueueConfig object with no config objects"
            )

        c_ptr = None
        if user_config_file is not None:
            c_ptr = self._alloc(user_config_file)

        if config_content is not None:
            c_ptr = self._alloc_content(config_content)

        if config_dict is not None:
            c_ptr = self._alloc_full(
                config_dict[ConfigKeys.JOB_SCRIPT],
                config_dict[ConfigKeys.USER_MODE],
                config_dict[ConfigKeys.MAX_SUBMIT],
                config_dict[ConfigKeys.NUM_CPU],
                config_dict[ConfigKeys.QUEUE_SYSTEM],
            )
        if not c_ptr:
            raise ValueError("Unable to create QueueConfig instance")

        super(QueueConfig, self).__init__(c_ptr)

        # Need to create
        if config_dict is not None:
            queue_options = config_dict.get(ConfigKeys.QUEUE_OPTION)
            for option in queue_options:
                self.driver.set_option(option[ConfigKeys.NAME],
                                       option[ConfigKeys.VALUE])

    def create_job_queue(self):
        queue = JobQueue(self.driver, max_submit=self.max_submit)
        return queue

    def create_local_copy(self):
        return self._alloc_local_copy()

    def has_job_script(self):
        return self._has_job_script()

    def free(self):
        self._free()

    @property
    def max_submit(self):
        return self._max_submit()

    @property
    def queue_name(self):
        return self.driver.get_option(QueueConfig.LSF_QUEUE_NAME_KEY)

    @property
    def queue_system(self):
        """The queue system in use, e.g. LSF or LOCAL"""
        return self._queue_system()

    @property
    def job_script(self):
        return self._get_job_script()

    @property
    def driver(self):
        return self._queue_driver(self.queue_system).setParent(self)

    def _assert_lsf(self, key="driver"):
        sys = self.queue_system
        if sys != QueueConfig.LSF_KEY:
            fmt = "Cannot fetch LSF {key}, current queue is {system}"
            raise ValueError(fmt.format(key=key, system=self.queue_system))

    @property
    def _lsf_driver(self):
        self._assert_lsf()
        driver = self._queue_driver(self.LSF_KEY)
        return driver.setParent(self)

    @property
    def lsf_resource(self):
        self._assert_lsf(key=QueueConfig.LSF_RESOURCE_KEY)
        return self._lsf_driver.get_option(self.LSF_RESOURCE_KEY)

    @property
    def lsf_server(self):
        self._assert_lsf(key=QueueConfig.LSF_SERVER_KEY)
        return self._lsf_driver.get_option(self.LSF_SERVER_KEY)

    @property
    def num_cpu(self):
        return self._get_num_cpu()

    def __eq__(self, other):

        if self.max_submit != other.max_submit:
            return False
        if self.queue_system != other.queue_system:
            return False
        if self.num_cpu != other.num_cpu:
            return False
        if self.job_script != other.job_script:
            return False

        if self.queue_system != "LOCAL":
            if self.queue_name != other.queue_name:
                return False
            if self.lsf_resource != other.lsf_resource:
                return False
            if self.lsf_server != other.lsf_server:
                return False

        return True

    LSF_KEY = _lsf_driver_opt()
    LSF_QUEUE_NAME_KEY = _lsf_queue_opt()
    LSF_RESOURCE_KEY = _lsf_resource_opt()
    LSF_SERVER_KEY = _lsf_server_opt()
예제 #24
0
class JobQueue(BaseCClass):
    # If the queue is created with size == 0 that means that it will
    # just grow as needed; for the queue layer to know when to exit
    # you must call the function submit_complete() when you have no
    # more jobs to submit.
    #
    # If the number of jobs is known in advance you can create the
    # queue with a finite value for size, in that case it is not
    # necessary to explitly inform the queue layer when all jobs have
    # been submitted.
    TYPE_NAME = "job_queue"
    _alloc = ResPrototype(
        "void* job_queue_alloc( int , char* , char* , char* )", bind=False)
    _start_user_exit = ResPrototype(
        "bool job_queue_start_user_exit( job_queue )")
    _get_user_exit = ResPrototype("bool job_queue_get_user_exit( job_queue )")
    _free = ResPrototype("void job_queue_free( job_queue )")
    _set_max_job_duration = ResPrototype(
        "void job_queue_set_max_job_duration( job_queue , int)")
    _get_max_job_duration = ResPrototype(
        "int  job_queue_get_max_job_duration( job_queue )")
    _set_driver = ResPrototype(
        "void job_queue_set_driver( job_queue , void* )")
    _kill_job = ResPrototype("bool job_queue_kill_job( job_queue , int )")
    _run_jobs = ResPrototype(
        "void job_queue_run_jobs_threaded(job_queue , int , bool)")
    _iget_driver_data = ResPrototype(
        "void* job_queue_iget_driver_data( job_queue , int)")

    _num_running = ResPrototype("int  job_queue_get_num_running( job_queue )")
    _num_complete = ResPrototype(
        "int  job_queue_get_num_complete( job_queue )")
    _num_waiting = ResPrototype("int  job_queue_get_num_waiting( job_queue )")
    _num_pending = ResPrototype("int  job_queue_get_num_pending( job_queue )")

    _is_running = ResPrototype("bool job_queue_is_running( job_queue )")
    _submit_complete = ResPrototype(
        "void job_queue_submit_complete( job_queue )")
    _iget_sim_start = ResPrototype(
        "time_t job_queue_iget_sim_start( job_queue , int)")
    _get_active_size = ResPrototype(
        "int  job_queue_get_active_size( job_queue )")
    _set_pause_on = ResPrototype("void job_queue_set_pause_on(job_queue)")
    _set_pause_off = ResPrototype("void job_queue_set_pause_off(job_queue)")
    _get_max_submit = ResPrototype("int job_queue_get_max_submit(job_queue)")

    _get_job_status = ResPrototype(
        "job_status_type_enum job_queue_iget_job_status(job_queue, int)")

    _get_ok_file = ResPrototype("char* job_queue_get_ok_file(job_queue)")
    _get_exit_file = ResPrototype("char* job_queue_get_exit_file(job_queue)")
    _get_status_file = ResPrototype(
        "char* job_queue_get_status_file(job_queue)")
    _add_job = ResPrototype(
        "int job_queue_add_job_node(job_queue, job_queue_node)")

    def __repr__(self):
        nrun, ncom, nwait, npend = (
            self._num_running(),
            self._num_complete(),
            self._num_waiting(),
            self._num_pending(),
        )
        isrun = "running" if self.isRunning else "not running"
        cnt = ("%s, num_running=%d, num_complete=%d, "
               "num_waiting=%d, num_pending=%d, active=%d")
        return self._create_repr(cnt %
                                 (isrun, nrun, ncom, nwait, npend, len(self)))

    def __init__(self, driver, max_submit=2, size=0):
        """
        Short doc...
        The @max_submit argument says how many times the job be submitted
        (including a failure)
              max_submit = 2: means that we can submit job once more
        The @size argument is used to say how many jobs the queue will
        run, in total.
              size = 0: That means that you do not tell the queue in
                advance how many jobs you have. The queue will just run
                all the jobs you add, but you have to inform the queue in
                some way that all jobs have been submitted. To achieve
                this you should call the submit_complete() method when all
                jobs have been submitted.#

              size > 0: The queue will know exactly how many jobs to run,
                and will continue until this number of jobs have completed
                - it is not necessary to call the submit_complete() method
                in this case.
        """

        OK_file = "OK"
        status_file = "STATUS"
        exit_file = "ERROR"
        self.job_list = []
        self._stopped = False
        c_ptr = self._alloc(max_submit, OK_file, status_file, exit_file)
        super().__init__(c_ptr)
        self.size = size

        self.driver = driver
        self._set_driver(driver.from_param(driver))
        self._differ = QueueDiffer()

    def kill_job(self, queue_index):
        """
        Will kill job nr @index.
        """
        self._kill_job(queue_index)

    def start(self, blocking=False):
        verbose = False
        self._run_jobs(self.size, verbose)

    def clear(self):
        pass

    def block_waiting(self):
        """
        Will block as long as there are waiting jobs.
        """
        while self.num_waiting > 0:
            time.sleep(1)

    def block(self):
        """
        Will block as long as there are running jobs.
        """
        while self.isRunning:
            time.sleep(1)

    def submit_complete(self):
        """
        Method to inform the queue that all jobs have been submitted.

        If the queue has been created with size == 0 the queue has no
        way of knowing when all jobs have completed; hence in that
        case you must call the submit_complete() method when all jobs
        have been submitted.

        If you know in advance exactly how many jobs you will run that
        should be specified with the size argument when creating the
        queue, in that case it is not necessary to call the
        submit_complete() method.
        """
        self._submit_complete()

    @property
    def isRunning(self):
        return self._is_running()

    @property
    def num_running(self):
        return self._num_running()

    @property
    def num_pending(self):
        return self._num_pending()

    @property
    def num_waiting(self):
        return self._num_waiting()

    @property
    def num_complete(self):
        return self._num_complete()

    def __getitem__(self, index):
        idx = index
        ls = len(self)
        if idx < 0:
            idx += ls
        if 0 <= idx < ls:
            return self._iget_driver_data(idx)
        raise IndexError("index out of range, was: %d should be in [0, %d)" %
                         (index, ls))

    def exists(self, index):
        return self[index]

    def get_max_running(self):
        return self.driver.get_max_running()

    def set_max_running(self, max_running):
        self.driver.set_max_running(max_running)

    def get_max_job_duration(self):
        return self._get_max_job_duration()

    def set_max_job_duration(self, max_duration):
        self._set_max_job_duration(max_duration)

    @property
    def max_submit(self):
        return self._get_max_submit()

    def killAllJobs(self):
        # The queue will not set the user_exit flag before the
        # queue is in a running state. If the queue does not
        # change to running state within a timeout the C function
        # will return False, and that False value is just passed
        # along.
        user_exit = self._start_user_exit()
        if user_exit:
            while self.isRunning:
                time.sleep(0.1)
            return True
        else:
            return False

    def igetSimStart(self, job_index):
        return self._iget_sim_start(self, job_index)

    def getUserExit(self) -> bool:
        # Will check if a user_exit has been initated on the job. The
        # queue can be queried about this status until a
        # job_queue_reset() call is invoked, and that should not be
        # done before the queue is recycled to run another batch of
        # simulations.
        return self._get_user_exit()

    def set_pause_on(self):
        self._set_pause_on()

    def set_pause_off(self):
        self._set_pause_off()

    def free(self):
        self._free()

    def __len__(self):
        return self._get_active_size()

    def getJobStatus(self, job_number):
        """@rtype: JobStatusType"""
        return self._get_job_status(job_number)

    def is_active(self):
        for job in self.job_list:
            if (job.thread_status == ThreadStatus.READY
                    or job.thread_status == ThreadStatus.RUNNING
                    or job.thread_status == ThreadStatus.STOPPING):
                return True
        return False

    def fetch_next_waiting(self):
        for job in self.job_list:
            if job.thread_status == ThreadStatus.READY:
                return job
        return None

    def count_status(self, status):
        return len([job for job in self.job_list if job.status == status])

    @property
    def stopped(self):
        return self._stopped

    def kill_all_jobs(self) -> None:
        self._stopped = True

    @property
    def queue_size(self):
        return len(self.job_list)

    @property
    def ok_file(self):
        return self._get_ok_file()

    @property
    def exit_file(self):
        return self._get_exit_file()

    @property
    def status_file(self):
        return self._get_status_file()

    def add_job(self, job, iens):
        job.convertToCReference(None)
        queue_index = self._add_job(job)
        self.job_list.append(job)
        self._differ.add_state(queue_index, iens, job.status.value)
        return queue_index

    def count_running(self):
        return sum(job.thread_status == ThreadStatus.RUNNING
                   for job in self.job_list)

    def max_running(self):
        if self.get_max_running() == 0:
            return len(self.job_list)
        else:
            return self.get_max_running()

    def available_capacity(self):
        return not self.stopped and self.count_running() < self.max_running()

    def stop_jobs(self):
        for job in self.job_list:
            job.stop()
        while self.is_active():
            time.sleep(1)

    async def stop_jobs_async(self):
        for job in self.job_list:
            job.stop()
        while self.is_active():
            await asyncio.sleep(1)

    def assert_complete(self):
        for job in self.job_list:
            if job.thread_status != ThreadStatus.DONE:
                msg = ("Unexpected job status type after "
                       "running job: {} with thread status: {}")
                raise AssertionError(msg.format(job.status, job.thread_status))

    def launch_jobs(self, pool_sema):
        # Start waiting jobs
        while self.available_capacity():
            job = self.fetch_next_waiting()
            if job is None:
                break
            job.run(
                driver=self.driver,
                pool_sema=pool_sema,
                max_submit=self.max_submit,
            )

    def execute_queue(self, pool_sema, evaluators):
        while self.is_active() and not self.stopped:
            self.launch_jobs(pool_sema)

            time.sleep(1)

            if evaluators is not None:
                for func in evaluators:
                    func()

        if self.stopped:
            self.stop_jobs()

        self.assert_complete()

    @staticmethod
    def _translate_change_to_cloudevent(ee_id: str, real_id: str,
                                        status: JobStatusType) -> CloudEvent:
        return CloudEvent(
            {
                "type": _queue_state_event_type(status),
                "source": f"/ert/ee/{ee_id}/real/{real_id}/step/{0}",
                "datacontenttype": "application/json",
            },
            {
                "queue_event_type": status,
            },
        )

    @staticmethod
    async def _publish_changes(
        ee_id: str,
        changes,
        ws_uri: str,
        ssl_context: ssl.SSLContext,
        headers: Mapping[str, str],
    ):
        events = deque([
            JobQueue._translate_change_to_cloudevent(ee_id, real_id, status)
            for real_id, status in changes.items()
        ])
        retries = 0
        while True:
            try:
                async with connect(ws_uri,
                                   ssl=ssl_context,
                                   extra_headers=headers) as websocket:
                    while events:
                        await asyncio.wait_for(
                            websocket.send(to_json(events[0])), 60)
                        events.popleft()
                    return
            except (ConnectionClosedError, asyncio.TimeoutError) as e:
                if retries >= 10:
                    logger.exception(
                        "Connection to websocket %s failed, unable to publish changes",
                        ws_uri,
                    )
                    raise

                # websockets for python > 3.6 comes with builtin backoff, implement a
                # crude one here
                retries += 1
                backoff = max(3, min(60, 2**retries))
                logger.info(
                    "Connection to websocket %s was closed, retry in %d seconds.",
                    ws_uri,
                    backoff,
                    exc_info=e,
                )

                await asyncio.sleep(backoff)

    async def execute_queue_async(  # pylint: disable=too-many-arguments
        self,
        ws_uri: str,
        ee_id: str,
        pool_sema: threading.BoundedSemaphore,
        evaluators: Callable[..., Any],
        cert: Optional[Union[str, bytes]] = None,
        token: Optional[str] = None,
    ) -> None:
        if evaluators is None:
            evaluators = []
        if cert is not None:
            ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
            ssl_context.load_verify_locations(cadata=cert)
        else:
            ssl_context = True if ws_uri.startswith("wss") else None
        headers = Headers()
        if token is not None:
            headers["token"] = token

        try:
            await JobQueue._publish_changes(ee_id, self._differ.snapshot(),
                                            ws_uri, ssl_context, headers)
            while True:
                self.launch_jobs(pool_sema)

                await asyncio.sleep(1)

                for func in evaluators:
                    func()

                await JobQueue._publish_changes(
                    ee_id, self.changes_after_transition(), ws_uri,
                    ssl_context, headers)

                if self.stopped:
                    raise asyncio.CancelledError

                if not self.is_active():
                    break

        except asyncio.CancelledError:
            logger.debug("queue cancelled, stopping jobs...")
            await self.stop_jobs_async()
            logger.debug("jobs stopped, re-raising CancelledError")
            raise

        except Exception:
            logger.exception(
                "unexpected exception in queue",
                exc_info=True,
            )
            await self.stop_jobs_async()
            logger.debug("jobs stopped, re-raising exception")
            raise

        self.assert_complete()
        self._differ.transition(self.job_list)
        await JobQueue._publish_changes(ee_id, self._differ.snapshot(), ws_uri,
                                        ssl_context, headers)

    # pylint: disable=too-many-arguments
    def add_job_from_run_arg(
        self,
        run_arg: "RunArg",
        res_config: "ResConfig",
        max_runtime: Optional[int],
        ok_cb: Callable[..., Any],
        exit_cb: Callable[..., Any],
    ) -> None:
        job_name = run_arg.job_name
        run_path = run_arg.runpath
        job_script = res_config.queue_config.job_script
        num_cpu = res_config.queue_config.num_cpu
        if num_cpu == 0:
            num_cpu = res_config.ecl_config.num_cpu

        job = JobQueueNode(
            job_script=job_script,
            job_name=job_name,
            run_path=run_path,
            num_cpu=num_cpu,
            status_file=self.status_file,
            ok_file=self.ok_file,
            exit_file=self.exit_file,
            done_callback_function=ok_cb,
            exit_callback_function=exit_cb,
            callback_arguments=[run_arg, res_config],
            max_runtime=max_runtime,
        )

        if job is None:
            return
        run_arg._set_queue_index(self.add_job(job, run_arg.iens))

    def add_ee_stage(self, stage, callback_timeout=None):
        job = JobQueueNode(
            job_script=stage.job_script,
            job_name=stage.job_name,
            run_path=stage.run_path,
            num_cpu=stage.num_cpu,
            status_file=self.status_file,
            ok_file=self.ok_file,
            exit_file=self.exit_file,
            done_callback_function=stage.done_callback,
            exit_callback_function=stage.exit_callback,
            callback_arguments=stage.callback_arguments,
            max_runtime=stage.max_runtime,
            callback_timeout=callback_timeout,
        )
        if job is None:
            raise ValueError("JobQueueNode constructor created None job")

        iens = stage.run_arg.iens
        stage.run_arg._set_queue_index(self.add_job(job, iens))

    def stop_long_running_jobs(self,
                               minimum_required_realizations: int) -> None:
        finished_realizations = self.count_status(JobStatusType.JOB_QUEUE_DONE)
        if finished_realizations < minimum_required_realizations:
            return

        completed_jobs = [
            job for job in self.job_list
            if job.status == JobStatusType.JOB_QUEUE_DONE
        ]
        average_runtime = sum([job.runtime for job in completed_jobs]) / float(
            len(completed_jobs))

        for job in self.job_list:
            if job.runtime > LONG_RUNNING_FACTOR * average_runtime:
                job.stop()

    def snapshot(self) -> Optional[Dict[int, str]]:
        """Return the whole state, or None if there was no snapshot."""
        return self._differ.snapshot()

    def changes_after_transition(self) -> Dict[int, str]:
        old_state, new_state = self._differ.transition(self.job_list)
        return self._differ.diff_states(old_state, new_state)

    def add_ensemble_evaluator_information_to_jobs_file(
        self,
        ee_id: str,
        dispatch_url: str,
        cert: Optional[Union[str, bytes]],
        token: Optional[str],
    ) -> None:
        for q_index, q_node in enumerate(self.job_list):
            if cert is not None:
                cert_path = f"{q_node.run_path}/{CERT_FILE}"
                with open(cert_path, "w") as cert_file:
                    cert_file.write(cert)
            with open(f"{q_node.run_path}/{JOBS_FILE}", "r+") as jobs_file:
                data = json.load(jobs_file)

                data["ee_id"] = ee_id
                data["real_id"] = self._differ.qindex_to_iens(q_index)
                data["step_id"] = 0
                data["dispatch_url"] = dispatch_url
                data["ee_token"] = token
                data["ee_cert_path"] = cert_path if cert is not None else None

                jobs_file.seek(0)
                jobs_file.truncate()
                json.dump(data, jobs_file, indent=4)
예제 #25
0
class BlockObservation(BaseCClass):
    TYPE_NAME = "block_obs"

    _alloc              = ResPrototype("void*  block_obs_alloc( char* , block_data_config , ecl_grid )", bind = False)
    _free               = ResPrototype("void   block_obs_free( block_obs )")
    _iget_i             = ResPrototype("int    block_obs_iget_i(block_obs, int)")
    _iget_j             = ResPrototype("int    block_obs_iget_j( block_obs, int)")
    _iget_k             = ResPrototype("int    block_obs_iget_k( block_obs , int)")
    _get_size           = ResPrototype("int    block_obs_get_size( block_obs )")
    _get_std            = ResPrototype("double block_obs_iget_std( block_obs, int )")
    _get_std_scaling    = ResPrototype("double block_obs_iget_std_scaling( block_obs, int )")
    _update_std_scaling = ResPrototype("void   block_obs_update_std_scale(block_obs , double , active_list)")
    _get_value          = ResPrototype("double block_obs_iget_value( block_obs, int)")
    _get_depth          = ResPrototype("double block_obs_iget_depth( block_obs, int)")
    _add_field_point    = ResPrototype("void   block_obs_append_field_obs( block_obs, int,int,int,double,double)")
    _add_summary_point  = ResPrototype("void   block_obs_append_summary_obs( block_obs, int, int, int, double, double)")
    _iget_data          = ResPrototype("double block_obs_iget_data(block_obs, void*, int, node_id)")



    def __init__(self , obs_key , data_config , grid):
        c_ptr = self._alloc( obs_key , data_config , grid )
        super(BlockObservation, self).__init__(c_ptr)


    def getCoordinate(self, index):
        """ @rtype: tuple of (int, int, int) """
        i = self._iget_i(index)
        j = self._iget_j(index)
        k = self._iget_k(index)
        return i, j, k

    def __len__(self):
        """ @rtype: int """
        return self._get_size()

    def __iter__(self):
        cur = 0
        while cur < len(self):
            yield cur
            cur += 1

    def addPoint(self , i,j,k , value , std , sum_key = None):
        if sum_key is None:
            self._add_field_point(i,j,k,value,std)
        else:
            self._add_summary_point(i,j,k,sum_key,value,std)


    def getValue(self, index):
        """ @rtype: float """
        return self._get_value(index)

    def getStd(self, index):
        """ @rtype: float """
        return self._get_std(index)

    def getStdScaling(self , index):
        """ @rtype: float """
        return self._get_std_scaling(index)

    def updateStdScaling(self , factor , active_list):
        self._update_std_scaling(factor , active_list)


    def getDepth(self, index):
        """ @rtype: float """
        return self._get_depth(index)

    def getData(self, state, obs_index, node_id):
        """
        @type state: c_void_p
        @type obs_index: int
        @type node_id: NodeId
        @rtype: float """

        return self._iget_data(state, obs_index, node_id)


    def free(self):
        self._free()

    def __repr__(self):
        return 'BlockObservation(size = %d) at 0x%x' % (len(self), self._address())
예제 #26
0
class AnalysisConfig(BaseCClass):
    TYPE_NAME = "analysis_config"

    _alloc = ResPrototype("void* analysis_config_alloc(config_content)",
                          bind=False)
    _alloc_load = ResPrototype("void* analysis_config_alloc_load(char*)",
                               bind=False)
    _alloc_full = ResPrototype(
        "void* analysis_config_alloc_full(double, bool, "
        "bool, int, char*, double, bool, bool, "
        "bool, double, int, int)",
        bind=False)

    _add_module_copy = ResPrototype(
        "void analysis_config_add_module_copy( analysis_config, "
        "char* , char* )")
    _load_external_module = ResPrototype(
        "bool analysis_config_load_external_module( analysis_config, "
        "char* , char* )")

    _free = ResPrototype("void analysis_config_free( analysis_config )")
    _get_rerun = ResPrototype(
        "int analysis_config_get_rerun( analysis_config )")
    _set_rerun = ResPrototype(
        "void analysis_config_set_rerun( analysis_config, bool)")
    _get_rerun_start = ResPrototype(
        "int analysis_config_get_rerun_start( analysis_config )")
    _set_rerun_start = ResPrototype(
        "void analysis_config_set_rerun_start( analysis_config, int)")
    _get_log_path = ResPrototype(
        "char* analysis_config_get_log_path( analysis_config)")
    _set_log_path = ResPrototype(
        "void analysis_config_set_log_path( analysis_config, char*)")
    _get_merge_observations = ResPrototype(
        "bool analysis_config_get_merge_observations(analysis_config)")
    _set_merge_observations = ResPrototype(
        "void analysis_config_set_merge_observations(analysis_config, bool)")
    _get_iter_config = ResPrototype(
        "analysis_iter_config_ref analysis_config_get_iter_config(analysis_config)"
    )
    _have_enough_realisations = ResPrototype(
        "bool analysis_config_have_enough_realisations(analysis_config, int, int)"
    )
    _get_max_runtime = ResPrototype(
        "int analysis_config_get_max_runtime(analysis_config)")
    _set_max_runtime = ResPrototype(
        "void analysis_config_set_max_runtime(analysis_config, int)")
    _get_stop_long_running = ResPrototype(
        "bool analysis_config_get_stop_long_running(analysis_config)")
    _set_stop_long_running = ResPrototype(
        "void analysis_config_set_stop_long_running(analysis_config, bool)")
    _get_active_module_name = ResPrototype(
        "char* analysis_config_get_active_module_name(analysis_config)")
    _get_module_list = ResPrototype(
        "stringlist_obj analysis_config_alloc_module_names(analysis_config)")
    _get_module = ResPrototype(
        "analysis_module_ref analysis_config_get_module(analysis_config, char*)"
    )
    _select_module = ResPrototype(
        "bool analysis_config_select_module(analysis_config, char*)")
    _has_module = ResPrototype(
        "bool analysis_config_has_module(analysis_config, char*)")
    _get_alpha = ResPrototype(
        "double analysis_config_get_alpha(analysis_config)")
    _set_alpha = ResPrototype(
        "void analysis_config_set_alpha(analysis_config, double)")
    _get_std_cutoff = ResPrototype(
        "double analysis_config_get_std_cutoff(analysis_config)")
    _set_std_cutoff = ResPrototype(
        "void analysis_config_set_std_cutoff(analysis_config, double)")
    _set_global_std_scaling = ResPrototype(
        "void analysis_config_set_global_std_scaling(analysis_config, double)")
    _get_global_std_scaling = ResPrototype(
        "double analysis_config_get_global_std_scaling(analysis_config)")

    def __init__(self,
                 user_config_file=None,
                 config_content=None,
                 config_dict=None):
        configs = sum([
            1 for x in [user_config_file, config_content, config_dict]
            if x is not None
        ])

        if configs > 1:
            raise ValueError(
                "Attempting to create AnalysisConfig object with multiple config objects"
            )

        if configs == 0:
            raise ValueError(
                "Error trying to create AnalysisConfig without any configuration"
            )

        c_ptr = None

        if user_config_file is not None:
            if not isfile(user_config_file):
                raise IOError('No such configuration file "%s".' %
                              user_config_file)

            c_ptr = self._alloc_load(user_config_file)
            if c_ptr:
                super(AnalysisConfig, self).__init__(c_ptr)
            else:
                raise ValueError(
                    'Failed to construct AnalysisConfig instance from config file %s.'
                    % user_config_file)

        if config_content is not None:
            c_ptr = self._alloc(config_content)
            if c_ptr:
                super(AnalysisConfig, self).__init__(c_ptr)
            else:
                raise ValueError(
                    'Failed to construct AnalysisConfig instance.')

        if config_dict is not None:
            c_ptr = self._alloc_full(
                config_dict.get(ConfigKeys.ALPHA_KEY, 3.0),
                config_dict.get(ConfigKeys.MERGE_OBSERVATIONS, False),
                config_dict.get(ConfigKeys.RERUN_KEY, False),
                config_dict.get(ConfigKeys.RERUN_START_KEY, 0),
                realpath(
                    config_dict.get(ConfigKeys.UPDATE_LOG_PATH, 'update_log')),
                config_dict.get(ConfigKeys.STD_CUTOFF_KEY, 1e-6),
                config_dict.get(ConfigKeys.STOP_LONG_RUNNING, False),
                config_dict.get(ConfigKeys.SINGLE_NODE_UPDATE, False),
                config_dict.get(ConfigKeys.STD_CORRELATED_OBS, False),
                config_dict.get(ConfigKeys.GLOBAL_STD_SCALING, 1.0),
                config_dict.get(ConfigKeys.MAX_RUNTIME, 0),
                config_dict.get(ConfigKeys.MIN_REALIZATIONS, 0))
            if c_ptr:
                super(AnalysisConfig, self).__init__(c_ptr)

                #external modules
                ext_modules_list = config_dict.get(ConfigKeys.ANALYSIS_LOAD,
                                                   [])
                for ext_module in ext_modules_list:
                    self._load_external_module(
                        ext_module[ConfigKeys.LIB_NAME],
                        ext_module[ConfigKeys.USER_NAME])

                #copy modules
                analysis_copy_list = config_dict.get(ConfigKeys.ANALYSIS_COPY,
                                                     [])
                for analysis_copy in analysis_copy_list:
                    self._add_module_copy(analysis_copy[ConfigKeys.SRC_NAME],
                                          analysis_copy[ConfigKeys.DST_NAME])

                #set var list
                set_var_list = config_dict.get(ConfigKeys.ANALYSIS_SET_VAR, [])
                for set_var in set_var_list:
                    module = self._get_module(set_var[ConfigKeys.MODULE_NAME])
                    module._set_var(set_var[ConfigKeys.VAR_NAME],
                                    str(set_var[ConfigKeys.VALUE]))

                if ConfigKeys.ANALYSIS_SELECT in config_dict:
                    self._select_module(
                        config_dict[ConfigKeys.ANALYSIS_SELECT])

            else:
                raise ValueError(
                    'Failed to construct AnalysisConfig from dict.')

    def get_rerun(self):
        return self._get_rerun()

    def set_rerun(self, rerun):
        self._set_rerun(rerun)

    def get_rerun_start(self):
        return self._get_rerun_start()

    def set_rerun_start(self, index):
        self._set_rerun_start(index)

    def get_log_path(self):
        return self._get_log_path()

    def set_log_path(self, path):
        self._set_log_path(path)

    def getEnkfAlpha(self):
        """ :rtype: float """
        return self._get_alpha()

    def setEnkfAlpha(self, alpha):
        self._set_alpha(alpha)

    def getStdCutoff(self):
        """ :rtype: float """
        return self._get_std_cutoff()

    def setStdCutoff(self, std_cutoff):
        self._set_std_cutoff(std_cutoff)

    def get_merge_observations(self):
        return self._get_merge_observations()

    def set_merge_observations(self, merge_observations):
        return self._set_merge_observations(merge_observations)

    def getAnalysisIterConfig(self):
        """ @rtype: AnalysisIterConfig """
        return self._get_iter_config().setParent(self)

    def get_stop_long_running(self):
        """ @rtype: bool """
        return self._get_stop_long_running()

    def set_stop_long_running(self, stop_long_running):
        self._set_stop_long_running(stop_long_running)

    def get_max_runtime(self):
        """ @rtype: int """
        return self._get_max_runtime()

    def set_max_runtime(self, max_runtime):
        self._set_max_runtime(max_runtime)

    def free(self):
        self._free()

    def activeModuleName(self):
        """ :rtype: str """
        return self._get_active_module_name()

    def getModuleList(self):
        """ :rtype: StringList """
        return self._get_module_list()

    def getModule(self, module_name):
        """ @rtype: AnalysisModule """
        return self._get_module(module_name)

    def hasModule(self, module_name):
        """ @rtype: bool """
        return self._has_module(module_name)

    def selectModule(self, module_name):
        """ @rtype: bool """
        return self._select_module(module_name)

    def getActiveModule(self):
        """ :rtype: AnalysisModule """
        return self.getModule(self.activeModuleName())

    def setGlobalStdScaling(self, std_scaling):
        self._set_global_std_scaling(std_scaling)

    def getGlobalStdScaling(self):
        return self._get_global_std_scaling()

    def haveEnoughRealisations(self, realizations, ensemble_size):
        return self._have_enough_realisations(realizations, ensemble_size)

    def __ne__(self, other):
        return not self == other

    def __eq__(self, other):
        if realpath(self.get_log_path()) != realpath(other.get_log_path()):
            return False

        if self.get_max_runtime() != other.get_max_runtime():
            return False

        if self.getGlobalStdScaling() != other.getGlobalStdScaling():
            return False

        if self.get_stop_long_running() != other.get_stop_long_running():
            return False

        if self.getStdCutoff() != other.getStdCutoff():
            return False

        if self.getEnkfAlpha() != other.getEnkfAlpha():
            return False

        if self.get_merge_observations() != other.get_merge_observations():
            return False

        if self.get_rerun() != other.get_rerun():
            return False

        if self.get_rerun_start() != other.get_rerun_start():
            return False

        if list(self.getModuleList()) != list(other.getModuleList()):
            return False

        if self.activeModuleName() != other.activeModuleName():
            return False

        if self.getAnalysisIterConfig() != other.getAnalysisIterConfig():
            return False

        #compare each module
        for a in list(self.getModuleList()):
            if self.getModule(a) != other.getModule(a):
                return False

        return True
예제 #27
0
class EnkfFs(BaseCClass):
    TYPE_NAME = "enkf_fs"

    _mount = ResPrototype("void* enkf_fs_mount(char* )", bind=False)
    _exists = ResPrototype("bool  enkf_fs_exists(char*)", bind=False)
    _disk_version = ResPrototype("int   enkf_fs_disk_version(char*)", bind=False)
    _update_disk_version = ResPrototype(
        "bool  enkf_fs_update_disk_version(char*, int, int)", bind=False
    )
    _decref = ResPrototype("int   enkf_fs_decref(enkf_fs)")
    _incref = ResPrototype("int   enkf_fs_incref(enkf_fs)")
    _get_refcount = ResPrototype("int   enkf_fs_get_refcount(enkf_fs)")
    _has_node = ResPrototype(
        "bool  enkf_fs_has_node(enkf_fs,     char*,  int,   int, int, int)"
    )
    _has_vector = ResPrototype(
        "bool  enkf_fs_has_vector(enkf_fs,   char*,  int,   int, int)"
    )
    _get_case_name = ResPrototype("char* enkf_fs_get_case_name(enkf_fs)")
    _is_read_only = ResPrototype("bool  enkf_fs_is_read_only(enkf_fs)")
    _is_running = ResPrototype("bool  enkf_fs_is_running(enkf_fs)")
    _fsync = ResPrototype("void  enkf_fs_fsync(enkf_fs)")
    _create = ResPrototype(
        "enkf_fs_obj   enkf_fs_create_fs(char* , enkf_fs_type_enum , void* , bool)",
        bind=False,
    )
    _get_time_map = ResPrototype("time_map_ref  enkf_fs_get_time_map(enkf_fs)")
    _get_state_map = ResPrototype("state_map_ref enkf_fs_get_state_map(enkf_fs)")
    _summary_key_set = ResPrototype(
        "summary_key_set_ref enkf_fs_get_summary_key_set(enkf_fs)"
    )

    def __init__(self, mount_point):
        c_ptr = self._mount(mount_point)
        super(EnkfFs, self).__init__(c_ptr)

    def copy(self):
        fs = self.createPythonObject(self._address())
        self._incref()
        return fs

    # This method will return a new Python object which shares the underlying
    # enkf_fs instance as self. The name weakref is used because the Python
    # object returned from this method does *not* manipulate the reference
    # count of the underlying enkf_fs instance, and specifically it does not
    # inhibit destruction of this object.
    def weakref(self):
        fs = self.createCReference(self._address())
        return fs

    def getTimeMap(self):
        """ @rtype: TimeMap """
        return self._get_time_map().setParent(self)

    def getStateMap(self):
        """ @rtype: StateMap """
        return self._get_state_map().setParent(self)

    def getCaseName(self):
        """ @rtype: str """
        return self._get_case_name()

    def isReadOnly(self):
        """ @rtype: bool """
        return self._is_read_only()

    def refCount(self):
        return self._get_refcount()

    def is_running(self):
        return self._is_running()

    @classmethod
    def exists(cls, path):
        return cls._exists(path)

    @classmethod
    def diskVersion(cls, path):
        disk_version = cls._disk_version(path)
        if disk_version < 0:
            raise IOError("No such filesystem: %s" % path)
        return disk_version

    @classmethod
    def updateVersion(cls, path, src_version, target_version):
        return cls._update_disk_version(path, src_version, target_version)

    @classmethod
    def createFileSystem(cls, path, mount=False):
        assert isinstance(path, str)
        fs_type = EnKFFSType.BLOCK_FS_DRIVER_ID
        arg = None
        fs = cls._create(path, fs_type, arg, mount)
        return fs

    # The umount( ) method should not normally be called explicitly by
    # downstream code, but in situations where file descriptors is at premium
    # it might be beneficial to call it explicitly. In that case it is solely
    # the responsability of the calling scope to ensure that it is not called
    # repeatedly - that will lead to hard failure!
    def umount(self):
        if self.isReference():
            raise AssertionError(
                "Calling umount() on a reference is an application error"
            )

        if self:
            self._decref()
            self._invalidateCPointer()
        else:
            raise AssertionError("Tried to umount for second time - application error")

    def free(self):
        if self:
            self.umount()

    def __repr__(self):
        cn = self.getCaseName()
        ad = self._ad_str()
        return "EnkfFs(case_name = %s) %s" % (cn, ad)

    def fsync(self):
        self._fsync()

    def getSummaryKeySet(self):
        """ @rtype: SummaryKeySet """
        return self._summary_key_set().setParent(self)

    def realizationList(self, state):
        """
        Will return list of realizations with state == the specified state.
        @type state: res.enkf.enums.RealizationStateEnum
        @rtype: ecl.util.IntVector
        """
        state_map = self.getStateMap()
        return state_map.realizationList(state)
예제 #28
0
class ErtRunContext(BaseCClass):
    TYPE_NAME = "ert_run_context"
    _alloc = ResPrototype(
        "void* ert_run_context_alloc( enkf_run_mode_enum , enkf_init_mode_enum, enkf_fs , enkf_fs, bool_vector, path_fmt ,char*, subst_list, int)",
        bind=False)
    _alloc_ensemble_experiment = ResPrototype(
        "ert_run_context_obj ert_run_context_alloc_ENSEMBLE_EXPERIMENT( enkf_fs, bool_vector, path_fmt ,char*, subst_list, int)",
        bind=False)
    _alloc_ensemble_smoother = ResPrototype(
        "ert_run_context_obj ert_run_context_alloc_SMOOTHER_RUN( enkf_fs , enkf_fs, bool_vector, path_fmt ,char*, subst_list, int)",
        bind=False)
    _alloc_ensemble_smoother_update = ResPrototype(
        "ert_run_context_obj ert_run_context_alloc_SMOOTHER_UPDATE(enkf_fs , enkf_fs )",
        bind=False)
    _alloc_case_init = ResPrototype(
        "ert_run_context_obj ert_run_context_alloc_CASE_INIT(enkf_fs, bool_vector)",
        bind=False)
    _alloc_runpath_list = ResPrototype(
        "stringlist_obj ert_run_context_alloc_runpath_list(bool_vector, path_fmt, subst_list, int)",
        bind=False)
    _alloc_runpath = ResPrototype(
        "char* ert_run_context_alloc_runpath(int, path_fmt, subst_list, int)",
        bind=False)
    _get_size = ResPrototype("int ert_run_context_get_size( ert_run_context )")
    _free = ResPrototype("void ert_run_context_free( ert_run_context )")
    _iactive = ResPrototype(
        "bool ert_run_context_iactive( ert_run_context , int)")
    _iget = ResPrototype(
        "run_arg_ref ert_run_context_iget_arg( ert_run_context , int)")
    _get_id = ResPrototype("char* ert_run_context_get_id( ert_run_context )")
    _get_mask = ResPrototype(
        "bool_vector_obj ert_run_context_alloc_iactive( ert_run_context )")
    _get_iter = ResPrototype("int ert_run_context_get_iter( ert_run_context )")
    _get_target_fs = ResPrototype(
        "enkf_fs_ref ert_run_context_get_update_target_fs( ert_run_context )")
    _get_sim_fs = ResPrototype(
        "enkf_fs_ref ert_run_context_get_sim_fs( ert_run_context )")
    _get_init_mode = ResPrototype(
        "enkf_init_mode_enum ert_run_context_get_init_mode( ert_run_context )")

    def __init__(self,
                 run_type,
                 sim_fs,
                 target_fs,
                 mask,
                 path_fmt,
                 jobname_fmt,
                 subst_list,
                 itr,
                 init_mode=EnkfInitModeEnum.INIT_CONDITIONAL):
        c_ptr = self._alloc(run_type, init_mode, sim_fs, target_fs, mask,
                            path_fmt, jobname_fmt, subst_list, itr)
        super(ErtRunContext, self).__init__(c_ptr)

        # The C object ert_run_context uses a shared object for the
        # path_fmt and subst_list objects. We therefor hold on
        # to a reference here - to inhibt Python GC of these objects.
        self._path_fmt = path_fmt
        self._subst_list = subst_list

    @classmethod
    def case_init(cls, sim_fs, mask):
        return cls._alloc_case_init(sim_fs, mask)

    @classmethod
    def ensemble_experiment(cls, sim_fs, mask, path_fmt, jobname_fmt,
                            subst_list, itr):
        run_context = cls._alloc_ensemble_experiment(sim_fs, mask, path_fmt,
                                                     jobname_fmt, subst_list,
                                                     itr)

        # The C object ert_run_context uses a shared object for the
        # path_fmt and subst_list objects. We therefor hold on
        # to a reference here - to inhibt Python GC of these objects.
        run_context._path_fmt = path_fmt
        run_context._subst_list = subst_list

        return run_context

    @classmethod
    def ensemble_smoother(cls, sim_fs, target_fs, mask, path_fmt, jobname_fmt,
                          subst_list, itr):
        run_context = cls._alloc_ensemble_smoother(sim_fs, target_fs, mask,
                                                   path_fmt, jobname_fmt,
                                                   subst_list, itr)

        # The C object ert_run_context uses a shared object for the
        # path_fmt and subst_list objects. We therefor hold on
        # to a reference here - to inhibt Python GC of these objects.
        run_context._path_fmt = path_fmt
        run_context._subst_list = subst_list

        return run_context

    @classmethod
    def ensemble_smoother_update(cls, sim_fs, target_fs):
        return cls._alloc_ensemble_smoother_update(sim_fs, target_fs)

    def is_active(self, index):
        if 0 <= index < len(self):
            return self._iactive(index)
        else:
            raise IndexError("Index:%d invalid. Legal range: [0,%d)" %
                             (index, len(self)))

    def __len__(self):
        return self._get_size()

    def __getitem__(self, index):
        if isinstance(index, int):
            if not self.is_active(index):
                return None

            if 0 <= index < len(self):
                run_arg = self._iget(index)
                run_arg.setParent(self)
                return run_arg
            else:
                raise IndexError("Index:%d invalid. Legal range: [0,%d)" %
                                 (index, len(self)))
        else:
            raise TypeError("Invalid type - expetected integer")

    def free(self):
        self._free()

    def __repr__(self):
        return 'ErtRunContext(size = %d) %s' % (len(self), self._ad_str())

    @classmethod
    def createRunpathList(cls, mask, runpath_fmt, subst_list, iter=0):
        """ @rtype: ecl.util.stringlist.StringList """
        return cls._alloc_runpath_list(mask, runpath_fmt, subst_list, iter)

    @classmethod
    def createRunpath(cls, iens, runpath_fmt, subst_list, iter=0):
        """ @rtype: str """
        return cls._alloc_runpath(iens, runpath_fmt, subst_list, iter)

    def get_id(self):
        return self._get_id()

    def get_mask(self):
        return self._get_mask()

    def get_iter(self):
        return self._get_iter()

    def get_target_fs(self):
        return self._get_target_fs()

    def get_sim_fs(self):
        return self._get_sim_fs()

    def get_init_mode(self):
        return self._get_init_mode()
예제 #29
0
class EnkfSimulationRunner(BaseCClass):
    TYPE_NAME = "enkf_simulation_runner"

    _create_run_path = ResPrototype(
        "bool enkf_main_create_run_path(enkf_simulation_runner, ert_run_context)"
    )

    def __init__(self, enkf_main):
        assert isinstance(enkf_main, BaseCClass)
        # enkf_main should be an EnKFMain, get the _RealEnKFMain object
        real_enkf_main = enkf_main.parent()
        super(EnkfSimulationRunner, self).__init__(
            real_enkf_main.from_param(real_enkf_main).value,
            parent=real_enkf_main,
            is_reference=True,
        )

    def _enkf_main(self):
        return self.parent()

    def runSimpleStep(self, job_queue, run_context):
        """ @rtype: int """
        #### run simplestep ####
        self._enkf_main().initRun(run_context)

        if run_context.get_step():
            ecl_config = self._enkf_main().ecl_config.assert_restart()

        #### deselect load and parent failure #####
        iactive = run_context.get_mask()

        run_context.get_sim_fs().getStateMap().deselectMatching(
            iactive,
            RealizationStateEnum.STATE_LOAD_FAILURE
            | RealizationStateEnum.STATE_PARENT_FAILURE,
        )

        #### start queue ####
        self.start_queue(run_context, job_queue)

        #### deactivate failed realizations ####
        totalOk = 0
        totalFailed = 0
        for i in range(len(run_context)):
            if run_context.is_active(i):
                run_arg = run_context[i]
                if (run_arg.run_status == RunStatusType.JOB_LOAD_FAILURE or
                        run_arg.run_status == RunStatusType.JOB_RUN_FAILURE):
                    run_context.deactivate_realization(i)
                    totalFailed += 1
                else:
                    totalOk += 1

        run_context.get_sim_fs().fsync()

        ## Should be converted tp a looger
        if totalFailed == 0:
            print(
                "All {} active jobs complete and data loaded.".format(totalOk))
        else:
            print("{} active job(s) failed.".format(totalFailed))

        return totalOk

    def createRunPath(self, run_context):
        """ @rtype: bool """
        return self._create_run_path(run_context)

    def runEnsembleExperiment(self, job_queue, run_context):
        """ @rtype: int """
        return self.runSimpleStep(job_queue, run_context)

    @staticmethod
    def runWorkflows(runtime, ert):
        """:type res.enkf.enum.HookRuntimeEnum"""
        hook_manager = ert.getHookManager()
        hook_manager.runWorkflows(runtime, ert)

    def start_queue(self, run_context, job_queue):
        max_runtime = self._enkf_main().analysisConfig().get_max_runtime()
        if max_runtime == 0:
            max_runtime = None

        done_callback_function = EnKFState.forward_model_ok_callback
        exit_callback_function = EnKFState.forward_model_exit_callback

        # submit jobs
        for i in range(len(run_context)):
            if not run_context.is_active(i):
                continue
            run_arg = run_context[i]
            job_queue.add_job_from_run_arg(
                run_arg,
                self._enkf_main().resConfig(),
                max_runtime,
                done_callback_function,
                exit_callback_function,
            )

        job_queue.submit_complete()
        queue_evaluators = None
        if (self._enkf_main().analysisConfig().get_stop_long_running()
                and self._enkf_main().analysisConfig(
                ).minimum_required_realizations > 0):
            queue_evaluators = [
                partial(
                    job_queue.stop_long_running_jobs,
                    self._enkf_main().analysisConfig().
                    minimum_required_realizations,
                )
            ]

        jqm = JobQueueManager(job_queue, queue_evaluators)
        jqm.execute_queue()
예제 #30
0
class EnkfNode(BaseCClass):
    TYPE_NAME = "enkf_node"
    _alloc         = ResPrototype("void* enkf_node_alloc(enkf_config_node)", bind = False)
    _alloc_private = ResPrototype("void* enkf_node_alloc_private_container(enkf_config_node)", bind = False)
    _free          = ResPrototype("void  enkf_node_free(enkf_node)")
    _get_name      = ResPrototype("char* enkf_node_get_key(enkf_node)")
    _value_ptr     = ResPrototype("void* enkf_node_value_ptr(enkf_node)")
    _try_load      = ResPrototype("bool  enkf_node_try_load(enkf_node, enkf_fs, node_id)")
    _store         = ResPrototype("bool  enkf_node_store(enkf_node, enkf_fs, bool, node_id)")
    _get_impl_type = ResPrototype("ert_impl_type_enum enkf_node_get_impl_type(enkf_node)")
    _ecl_write     = ResPrototype("void enkf_node_ecl_write(enkf_node, char*, void*, int)")

    def __init__(self, config_node, private=False):
        self._private = private
        if private:
            c_pointer = self._alloc_private(config_node)
        else:
            c_pointer = self._alloc(config_node)

        if c_pointer:
            super(EnkfNode, self).__init__(c_pointer, config_node, True)
        else:
            p_err = 'private ' if private else ''
            raise ValueError('Unable to create %sEnkfNode from given config node.' % p_err)

    @classmethod
    def exportMany(cls , config_node , file_format , fs , iens_list  , report_step = 0 , file_type = None , arg = None):
        node = EnkfNode( config_node )
        for iens in iens_list:
            filename = file_format % iens
            node_id = NodeId( report_step , iens )
            if node.tryLoad(fs , node_id):
                if node.export( filename , file_type = file_type , arg = arg):
                    print("%s[%03d] -> %s" % (config_node.getKey() , iens , filename))
            else:
                sys.stderr.write("** ERROR: Could not load realisation:%d - export failed" % iens)


    def export(self , filename , file_type = None , arg = None):
        impl_type = self.getImplType()
        if impl_type == ErtImplType.FIELD:
            field_node = self.asField( )
            return field_node.export( filename , file_type = file_type , init_file = arg)
        else:
            raise NotImplementedError("The export method is only implemented for field")



    def valuePointer(self):
        return self._value_ptr( )


    def getImplType(self):
        """ @rtype: res.enkf.enums.ert_impl_type_enum.ErtImplType """
        return self._get_impl_type( )


    def asGenData(self):
        """ @rtype: GenData """
        impl_type = self.getImplType( )
        assert impl_type == ErtImplType.GEN_DATA

        return GenData.createCReference(self.valuePointer(), self)

    def asGenKw(self):
        """ @rtype: GenKw """
        impl_type = self.getImplType( )
        assert impl_type == ErtImplType.GEN_KW

        return GenKw.createCReference(self.valuePointer(), self)

    def asCustomKW(self):
        """ @rtype: CustomKW """
        impl_type = self.getImplType( )
        assert impl_type == ErtImplType.CUSTOM_KW

        return CustomKW.createCReference(self.valuePointer(), self)

    def asField(self):
        """ @rtype: CustomKW """
        impl_type = self.getImplType( )
        assert impl_type == ErtImplType.FIELD

        return Field.createCReference(self.valuePointer(), self)

    def as_summary(self):
        """ @rtype: CustomKW """
        impl_type = self.getImplType( )
        assert impl_type == ErtImplType.SUMMARY

        return Summary.createCReference(self.valuePointer(), self)

    def as_ext_param(self):
        """ @rtype: CustomKW """
        impl_type = self.getImplType( )
        assert impl_type == ErtImplType.EXT_PARAM

        return ExtParam.createCReference(self.valuePointer(), self)


    def tryLoad(self, fs, node_id):
        """
        @type fs: EnkfFS
        @type node_id: NodeId
        @rtype: bool
        """
        if not isinstance(fs, EnkfFs):
            raise TypeError('fs must be an EnkfFs, not %s' % type(fs))
        if not isinstance(node_id, NodeId):
            raise TypeError('node_id must be a NodeId, not %s' % type(node_id))

        return self._try_load(fs, node_id)

    def name(self):
        """ @rtype: str """
        return self._get_name( )

    def load(self, fs, node_id):
        if not self.tryLoad(fs, node_id):
            raise Exception("Could not load node: %s iens: %d report: %d" % (self.name(), node_id.iens, node_id.report_step))

    def save(self, fs, node_id):
        assert isinstance(fs, EnkfFs)
        assert isinstance(node_id, NodeId)

        return self._store(fs, False, node_id)

    def free(self):
        self._free( )

    def __repr__(self):
        pp = ', private' if self._private else ''
        return 'EnkfNode(name = "%s"%s) %s' % (self.name(), pp, self._ad_str())


    def ecl_write(self, path):
        filestream_ptr = None
        report_step = 0
        self._ecl_write( path, filestream_ptr, report_step )