def set_gravity_interpolator(self, density_block=None, pos_density=None, inplace=True, compile_theano: bool = True, theano_optimizer=None, verbose: list = None): """ Args: geo_model: inplace: compile_theano Returns: """ assert self.grid.gravity_grid is not None, 'First you need to set up a gravity grid to compile the graph' assert density_block is not None or pos_density is not None, 'If you do not pass the density block you need to pass' \ 'the position of surface values where density is' \ ' assigned' # TODO Possibly this is only necessary when computing gravity self.grid.active_grids = np.zeros(4, dtype=bool) self.grid.set_active('gravity') self.interpolator.set_initial_results_matrices() # TODO output is dep if theano_optimizer is not None: self.additional_data.options.df.at[ 'values', 'theano_optimizer'] = theano_optimizer if verbose is not None: self.additional_data.options.df.at['values', 'verbosity'] = verbose # TODO add kwargs self.rescaling.rescale_data() self.update_structure() # This two should be unnecessary now too self.surface_points.sort_table() self.orientations.sort_table() self.interpolator_gravity = InterpolatorGravity( self.surface_points, self.orientations, self.grid, self.surfaces, self.series, self.faults, self.additional_data) # geo_model.interpolator.set_theano_graph(geo_model.interpolator.create_theano_graph()) self.interpolator_gravity.create_theano_graph(self.additional_data, inplace=True) # set shared variables self.interpolator_gravity.set_theano_shared_tz_kernel() self.interpolator_gravity.set_all_shared_parameters(reset=True) if compile_theano is True: self.interpolator_gravity.compile_th_fn(density_block, pos_density, inplace=inplace) return self.additional_data.options
def set_gravity_interpolator(self, density_block=None, pos_density=None, tz=None, compile_theano: bool = True, theano_optimizer=None, verbose: list = None): """ Method to create a graph and compile the theano code to compute forward gravity. Args: density_block (Optional[np.array]): numpy array of the size of the grid.values with the correspondent density to each of the voxels. If it is not passed the density block will be also computed at run time but you will need to specify with value of the Surface object is density. pos_density (Optional[int]): Only necessary when density block is not passed. Location on the Surfaces().df where density is located (starting on id being 0). TODO allow the user to pass the name of the column. tz (Optional[np.array]): Numpy array of the size of grid.values with each component z of the vector device-voxel. In None is passed it will be automatically computed on the self.grid.centered grid compile_theano (bool): [s0] theano_optimizer (str {'fast_run', 'fast_compile'}): [s1] verbose (list): Returns: :class:`Options` """ assert self.grid.centered_grid is not None, 'First you need to set up a gravity grid to compile the graph' assert density_block is not None or pos_density is not None, 'If you do not pass the density block you need to'\ ' pass the position of surface values where' \ ' density is assigned' # TODO output is dep if theano_optimizer is not None: self.additional_data.options.df.at['values', 'theano_optimizer'] = theano_optimizer if verbose is not None: self.additional_data.options.df.at['values', 'verbosity'] = verbose self.interpolator_gravity = InterpolatorGravity( self.surface_points, self.orientations, self.grid, self.surfaces, self.series, self.faults, self.additional_data) # geo_model.interpolator.set_theano_graph(geo_model.interpolator.create_theano_graph()) self.interpolator_gravity.create_theano_graph(self.additional_data, inplace=True) # set shared variables self.interpolator_gravity.set_theano_shared_tz_kernel(tz) self.interpolator_gravity.set_all_shared_parameters(reset_ctrl=True) if compile_theano is True: self.interpolator_gravity.compile_th_fn(density_block, pos_density, inplace=True) return self.additional_data.options
class Model(DataMutation): """Container class of all objects that constitute a GemPy model. In addition the class provides the methods that act in more than one of this class. Model is a child class of :class:`DataMutation` and :class:`MetaData`. """ def __init__(self, project_name='default_project'): self.meta = MetaData(project_name=project_name) super().__init__() self.interpolator_gravity = None def __repr__(self): return self.meta.project_name + ' ' + self.meta.date def new_model(self, name_project='default_project'): """Reset the model object.""" self.__init__(name_project) def save_model_pickle(self, path=False): """ Short term model storage. Object to a python pickle (serialization of python). Be aware that if the dependencies versions used to export and import the pickle differ it may give problems Args: path (str): path where save the pickle Returns: True """ # Deleting qi attribute otherwise doesnt allow to pickle if hasattr(self, 'qi'): self.__delattr__('qi') sys.setrecursionlimit(10000) if not path: path = './'+self.meta.project_name import pickle with open(path+'.pickle', 'wb') as f: # Pickle the 'data' dictionary using the highest protocol available. pickle.dump(self, f, pickle.HIGHEST_PROTOCOL) return True @staticmethod def load_model_pickle(path): """ Read InputData object from python pickle. Args: path (str): path where save the pickle Returns: :class:`gempy.core.model.Model` """ import pickle with open(path, 'rb') as f: # The protocol version used is detected automatically, so we do not # have to specify it. model = pickle.load(f) return model def save_model(self, name=None, path=None): """ Save model in new folder. Input data is saved as csv files. Solutions, extent and resolutions are saved as npy. Args: name (str): name of the newly created folder and the part of the files name path (str): path where save the model folder. Returns: True """ if name is None: name = self.meta.project_name if not path: path = './' path = f'{path}/{name}' if os.path.isdir(path): print("Directory already exists, files will be overwritten") else: os.mkdir(f'{path}') # save dataframes as csv self.surface_points.df.to_csv(f'{path}/{name}_surface_points.csv') self.surfaces.df.to_csv(f'{path}/{name}_surfaces.csv') self.orientations.df.to_csv(f'{path}/{name}_orientations.csv') self.series.df.to_csv(f'{path}/{name}_series.csv') self.faults.df.to_csv(f'{path}/{name}_faults.csv') self.faults.faults_relations_df.to_csv(f'{path}/{name}_faults_relations.csv') self.additional_data.kriging_data.df.to_csv(f'{path}/{name}_kriging_data.csv') self.additional_data.rescaling_data.df.to_csv(f'{path}/{name}_rescaling_data.csv') self.additional_data.options.df.to_csv(f'{path}/{name}_options.csv') # # save resolution and extent as npy np.save(f'{path}/{name}_extent.npy', self.grid.regular_grid.extent) np.save(f'{path}/{name}_resolution.npy', self.grid.regular_grid.resolution) # # save solutions as npy # np.save(f'{path}/{name}_lith_block.npy' ,self.solutions.lith_block) # np.save(f'{path}/{name}_scalar_field_lith.npy', self.solutions.scalar_field_matrix) # # np.save(f'{path}/{name}_gradient.npy', self.solutions.gradient) # np.save(f'{path}/{name}_values_block.npy', self.solutions.matr) return True @setdoc([SurfacePoints.read_surface_points.__doc__, Orientations.read_orientations.__doc__]) def read_data(self, path_i=None, path_o=None, add_basement=True, **kwargs): """ Read data from a csv Args: path_i: Path to the data bases of surface_points. Default os.getcwd(), path_o: Path to the data bases of orientations. Default os.getcwd() add_basement (bool): if True add a basement surface. This wont be interpolated it just gives the values for the volume below the last surface. **kwargs: update_surfaces (bool): True Returns: True """ if 'update_surfaces' not in kwargs: kwargs['update_surfaces'] = True if path_i: self.surface_points.read_surface_points(path_i, inplace=True, **kwargs) if path_o: self.orientations.read_orientations(path_o, inplace=True, **kwargs) if add_basement is True: self.surfaces.add_surface(['basement']) self.map_series_to_surfaces({'Basement': 'basement'}, set_series=True) self.rescaling.rescale_data() self.additional_data.update_structure() self.additional_data.update_default_kriging() return True def get_data(self, itype='data', numeric=False): """ Method that returns the surface_points and orientations pandas Dataframes. Can return both at the same time or only one of the two Args: itype: input_data data type, either 'orientations', 'surface_points' or 'all' for both. numeric(bool): Return only the numerical values of the dataframe. This is much lighter database for storing traces verbosity (int): Number of properties shown Returns: pandas.core.frame.DataFrame: Data frame with the raw data """ # TODO adapt this show_par_f = self.orientations.df.columns show_par_i = self.surface_points.df.columns if numeric: show_par_f = self.orientations._columns_o_num show_par_i = self.surface_points._columns_i_num dtype = 'float' if itype == 'orientations': raw_data = self.orientations.df[show_par_f] # .astype(dtype) # Be sure that the columns are in order when used for operations if numeric: raw_data = raw_data[['X', 'Y', 'Z', 'G_x', 'G_y', 'G_z', 'dip', 'azimuth', 'polarity']] elif itype == 'surface_points': raw_data = self.surface_points.df[show_par_i] # .astype(dtype) # Be sure that the columns are in order when used for operations if numeric: raw_data = raw_data[['X', 'Y', 'Z', 'G_x', 'G_y', 'G_z', 'dip', 'azimuth', 'polarity']] elif itype == 'data': raw_data = pn.concat([self.surface_points.df[show_par_i], # .astype(dtype), self.orientations.df[show_par_f]], # .astype(dtype)], keys=['surface_points', 'orientations'], sort=False) # Be sure that the columns are in order when used for operations if numeric: raw_data = raw_data[['X', 'Y', 'Z', 'G_x', 'G_y', 'G_z', 'dip', 'azimuth', 'polarity']] elif itype == 'surfaces': raw_data = self.surfaces elif itype == 'series': raw_data = self.series elif itype == 'faults': raw_data = self.faults elif itype == 'faults_relations_df' or itype == 'faults_relations': raw_data = self.faults.faults_relations_df elif itype == 'additional data' or itype == 'additional_data': raw_data = self.additional_data elif itype == 'kriging': raw_data = self.additional_data.kriging_data else: raise AttributeError('itype has to be \'data\', \'additional data\', \'surface_points\', \'orientations\',' ' \'surfaces\',\'series\', \'faults\' or \'faults_relations_df\'') return raw_data def get_additional_data(self): return self.additional_data.get_additional_data() @setdoc_pro([ds.compile_theano, ds.theano_optimizer]) def set_gravity_interpolator(self, density_block=None, pos_density=None, tz=None, compile_theano: bool = True, theano_optimizer=None, verbose: list = None): """ Method to create a graph and compile the theano code to compute forward gravity. Args: density_block (Optional[np.array]): numpy array of the size of the grid.values with the correspondent density to each of the voxels. If it is not passed the density block will be also computed at run time but you will need to specify with value of the Surface object is density. pos_density (Optional[int]): Only necessary when density block is not passed. Location on the Surfaces().df where density is located (starting on id being 0). TODO allow the user to pass the name of the column. tz (Optional[np.array]): Numpy array of the size of grid.values with each component z of the vector device-voxel. In None is passed it will be automatically computed on the self.grid.centered grid compile_theano (bool): [s0] theano_optimizer (str {'fast_run', 'fast_compile'}): [s1] verbose (list): Returns: :class:`Options` """ assert self.grid.centered_grid is not None, 'First you need to set up a gravity grid to compile the graph' assert density_block is not None or pos_density is not None, 'If you do not pass the density block you need to'\ ' pass the position of surface values where' \ ' density is assigned' # TODO output is dep if theano_optimizer is not None: self.additional_data.options.df.at['values', 'theano_optimizer'] = theano_optimizer if verbose is not None: self.additional_data.options.df.at['values', 'verbosity'] = verbose self.interpolator_gravity = InterpolatorGravity( self.surface_points, self.orientations, self.grid, self.surfaces, self.series, self.faults, self.additional_data) # geo_model.interpolator.set_theano_graph(geo_model.interpolator.create_theano_graph()) self.interpolator_gravity.create_theano_graph(self.additional_data, inplace=True) # set shared variables self.interpolator_gravity.set_theano_shared_tz_kernel(tz) self.interpolator_gravity.set_all_shared_parameters(reset_ctrl=True) if compile_theano is True: self.interpolator_gravity.compile_th_fn(density_block, pos_density, inplace=True) return self.additional_data.options
def set_interpolator(geo_model: Model, type='geo', compile_theano: bool = True, theano_optimizer=None, verbose: list = None, grid='shared', **kwargs): """ Method to create a graph and compile the theano code to compute the interpolation. Args: geo_model (:class:`Model`): [s0] type (str:{geo, grav}): type of interpolation. compile_theano (bool): [s1] theano_optimizer (str {'fast_run', 'fast_compile'}): [s2] verbose: kwargs: - pos_density (Optional[int]): Only necessary when type='grav'. Location on the Surfaces().df where density is located (starting on id being 0). Returns: """ if theano_optimizer is not None: geo_model.additional_data.options.df.at[ 'values', 'theano_optimizer'] = theano_optimizer if verbose is not None: geo_model.additional_data.options.df.at['values', 'verbosity'] = verbose # TODO add kwargs geo_model.rescaling.rescale_data() update_additional_data(geo_model) geo_model.surface_points.sort_table() geo_model.orientations.sort_table() # The graph object contains all theano methods. Therefore is independent to which side # of the graph we compile: geo_model.interpolator.create_theano_graph(geo_model.additional_data, inplace=True, **kwargs) if type == 'geo': if compile_theano is True: geo_model.interpolator.set_all_shared_parameters(reset_ctrl=True) geo_model.interpolator.compile_th_fn_geo(inplace=True, grid=grid) else: if grid == 'shared': geo_model.interpolator.set_theano_shared_grid(grid) elif type == 'grav': pos_density = kwargs.get('pos_density', 1) tz = kwargs.get('tz', 'auto') # First we need to upgrade the interpolator object: print( 'Interpolator object upgraded from InterpolatorModel to InterpolatorGravity.' ) geo_model.interpolator = InterpolatorGravity( geo_model.surface_points, geo_model.orientations, geo_model.grid, geo_model.surfaces, geo_model.series, geo_model.faults, geo_model.additional_data, **kwargs) if tz is 'auto' and geo_model.grid.centered_grid is not None: print('Calculating the tz components for the centered grid...') tz = geo_model.interpolator.calculate_tz() print('Done') # Set the shared parameters for this piece of tree geo_model.interpolator.set_theano_shared_tz_kernel(tz) geo_model.interpolator.set_all_shared_parameters(reset_ctrl=True) if compile_theano is True: geo_model.interpolator.compile_th_fn_grav(density=None, pos_density=pos_density, inplace=True) return geo_model.interpolator