class _BigDumbParams(param.Parameterized): action = param.Action(default_action, allow_None=True) array = param.Array(np.array([1.0, 2.0])) boolean = param.Boolean(True, allow_None=True) callable = param.Callable(default_action, allow_None=True) class_selector = param.ClassSelector(int, is_instance=False, allow_None=True) color = param.Color("#FFFFFF", allow_None=True) composite = param.Composite(["action", "array"], allow_None=True) try: data_frame = param.DataFrame( pd.DataFrame({"A": 1.0, "B": np.arange(5)}), allow_None=True ) except TypeError: data_frame = param.DataFrame(pd.DataFrame({"A": 1.0, "B": np.arange(5)})) date = param.Date(datetime.now(), allow_None=True) date_range = param.DateRange((datetime.min, datetime.max), allow_None=True) dict_ = param.Dict({"foo": "bar"}, allow_None=True, doc="dict means dictionary") dynamic = param.Dynamic(default=default_action, allow_None=True) file_selector = param.FileSelector( os.path.join(FILE_DIR_DIR, "LICENSE"), path=os.path.join(FILE_DIR_DIR, "*"), allow_None=True, ) filename = param.Filename( os.path.join(FILE_DIR_DIR, "LICENSE"), allow_None=True ) foldername = param.Foldername(os.path.join(FILE_DIR_DIR), allow_None=True) hook_list = param.HookList( [CallableObject(), CallableObject()], class_=CallableObject, allow_None=True ) integer = param.Integer(10, allow_None=True) list_ = param.List([1, 2, 3], allow_None=True, class_=int) list_selector = param.ListSelector([2, 2], objects=[1, 2, 3], allow_None=True) magnitude = param.Magnitude(0.5, allow_None=True) multi_file_selector = param.MultiFileSelector( [], path=os.path.join(FILE_DIR_DIR, "*"), allow_None=True, check_on_set=True, ) number = param.Number(-10.0, allow_None=True, doc="here is a number") numeric_tuple = param.NumericTuple((5.0, 10.0), allow_None=True) object_selector = param.ObjectSelector( False, objects={"False": False, "True": 1}, allow_None=True ) path = param.Path(os.path.join(FILE_DIR_DIR, "LICENSE"), allow_None=True) range_ = param.Range((-1.0, 2.0), allow_None=True) series = param.Series(pd.Series(range(5)), allow_None=True) string = param.String("foo", allow_None=True, doc="this is a string") tuple_ = param.Tuple((3, 4, "fi"), allow_None=True) x_y_coordinates = param.XYCoordinates((1.0, 2.0), allow_None=True)
class CreateModel(param.Parameterized): """Abstract base class for creating models.""" __abstract = True project_base_directory = param.Foldername(default=os.getcwd(), doc=""" Base directory to which name will be appended to write project files to.""", precedence=0) project_name = param.String(default='vicksburg_south', doc=""" Name of project. Required for new model.""") def _map_kw(self,p): kw = {} kw['project_directory'] = os.path.abspath(os.path.join(p.project_base_directory, p.project_name)) # Currently allows overwriting existing files os.makedirs(kw['project_directory'],exist_ok=True) kw['project_name'] = p.project_name return kw def __call__(self,**params): raise NotImplementedError
class MyParameterized(param.Parameterized): enable = param.Boolean(True, doc="A sample Boolean parameter", allow_None=True) what_proportion = param.Magnitude(default=0.9) age = param.Number(49, bounds=(0, 100), doc="Any Number between 0 to 100") how_many = param.Integer() favorite_quote = param.String(default="Hello, world!") choose_file_or_folder = param.Path(search_paths='./') choose_folder = param.Foldername(search_paths="./") choose_file = param.Filename(search_paths="./") select_a_file = param.FileSelector(path='./*') select_multiple_files = param.MultiFileSelector(path='./*') favorite_color = param.ObjectSelector( default="green", objects=["red", "yellow", "green"]) favorite_fruit = param.Selector(default="Apple", objects=["Orange", "Apple", "Mango"]) select_multiple = param.ListSelector(default=[3, 5], objects=[1, 2, 3, 4, 5]) birthday = param.CalendarDate(dt.date(2017, 1, 1), bounds=(dt.date(2017, 1, 1), dt.date(2017, 2, 1))) appointment = param.Date(dt.datetime(2017, 1, 1), bounds=(dt.datetime(2017, 1, 1), dt.datetime(2017, 2, 1))) least_favorite_color = param.Color(default='#FF0000') dataset = param.DataFrame(pd.util.testing.makeDataFrame().iloc[:3]) this_strange_thing = param.Tuple(default=(False, ), allow_None=True) some_numbers = param.NumericTuple(default=(1, 2, 3.0, 4.0)) home_city = param.XYCoordinates(default=(-111.65, 40.23)) bounds = param.Range(default=(-10, 10))
class AudioFolder(AudioFile): """ Returns a rolling spectrogram, i.e. the spectral density over time of a rolling window of the input audio signal, for all files in the specified folder. """ filename = param.Filename(precedence=(-1)) folderpath = param.Foldername( default='sounds/sine_waves/normalized', doc="""Folder path (can be relative to Param's base path) to a folder containing audio files. The audio can be in any format accepted by audiolab, i.e. WAV, AIFF, or FLAC.""") gap_between_sounds = param.Number( default=0.0, bounds=(0.0, None), doc="""The gap in seconds to insert between consecutive soundfiles.""") def __init__(self, **params): super(AudioFolder, self).__init__(**params) self._load_audio_folder() def _load_audio_folder(self): folder_contents = os.listdir(self.folderpath) self.sound_files = [] for file in folder_contents: if file[-4:] == ".wav" or file[-3:] == ".wv" or file[ -5:] == ".aiff" or file[-4:] == ".aif" or file[ -5:] == ".flac": self.sound_files.append(self.folderpath + "/" + file) self.filename = self.sound_files[0] self._load_audio_file() self.next_file = 1 def extract_specific_interval(self, interval_start, interval_end): """ Overload if special behaviour is required when a series ends. """ interval_start = int(interval_start) interval_end = int(interval_end) if interval_start >= interval_end: raise ValueError( "Requested interval's start point is past the requested end point." ) elif interval_start > self.time_series.size: if self.repeat: interval_end = interval_end - interval_start interval_start = 0 else: raise ValueError( "Requested interval's start point is past the end of the time series." ) if interval_end < self.time_series.size: interval = self.time_series[interval_start:interval_end] else: requested_interval_size = interval_end - interval_start remaining_signal = self.time_series[interval_start:self. time_series.size] if self.next_file == len(self.sound_files) and self.repeat: self.next_file = 0 if self.next_file < len(self.sound_files): next_source = audiolab.Sndfile( self.sound_files[self.next_file], 'r') self.next_file += 1 if next_source.samplerate != self.sample_rate: raise ValueError( "All sound files must be of the same sample rate") if self.gap_between_sounds > 0: remaining_signal = hstack( (remaining_signal, zeros(int(self.gap_between_sounds * self.sample_rate), dtype=self.precision))) self.time_series = hstack( (remaining_signal, next_source.read_frames(next_source.nframes, dtype=self.precision))) interval = self.time_series[0:requested_interval_size] self._next_interval_start = requested_interval_size else: self.warning("Returning last interval of the time series.") self._next_interval_start = self.time_series.size + 1 samples_per_interval = self.interval_length * self.sample_rate interval = hstack( (remaining_signal, zeros(samples_per_interval - remaining_signal.size))) return interval
class AdhModel(Model): """ Class object to hold all data related to an adh simulation. The adhModel object primarily stores, modifies, and outputs data. It contains pass through methods for reading, and writing the mesh, hotstart, boundary condition, and result files. Included are methods to read individual AdH files or read a suite of model files. There is also a validate method to ensure that the data within the model is valid and consistent across internal objects. An AdhModel object contains a suite of model parameters and one AdhMesh object. The AdhMesh object contains the mesh itself and an AdhSimulation object. The AdHSimulation object stores the boundary condition information, the hotstart, and the results - all the data for an individual model run. """ model_name = param.String( default="adh_model", doc="File name prefix for ADH input files.", ) model_path = param.Foldername( default=os.getcwd(), doc="Path on disk where ADH files are located.", ) version = param.Number(default=5.0, bounds=(4.5, None), softbounds=(4.5, 5.0), doc="Version of AdH executable") project_name = param.String(default='default', doc='Global project name') units = param.ObjectSelector(default='meters', objects=['meters', 'feet']) path_type = param.ClassSelector(default=GeoPath, class_=Path, is_instance=False, doc=""" The element type to draw into.""") mesh = param.ClassSelector(class_=AdhMesh) def __init__(self, **params): super(AdhModel, self).__init__(**params) proj = Projection() if 'crs' in params: proj.set_crs(params['crs']) else: proj.set_crs(ccrs.GOOGLE_MERCATOR) self.mesh = AdhMesh(projection=proj) @property def projection(self): return self.mesh.projection @property def simulation(self): return self.mesh.current_sim def read(self, path, project_name='*', crs=None, fmt='nc'): """Read in AdH model files as an xarray.Dataset object Args: path(str, required): path to the AdH project files project_name(str, optional, default='*'): the root name of the AdH project. If not specified then it will be derived from the first mesh file found in `path`. crs(cartopy.CRS, optional, default=None): The projection of the mesh file. fmt(stt, optional, default='nc'): The format of the file being passed in. Valid options are ['nc', '2dm', '3dm'] Returns: xarray.Dataset variables for the nodes, mesh elements, output datasets and hot-start file datasets """ #TODO look at filename for default format? fmts = { 'nc': self.from_netcdf, 'ascii': self.from_ascii, '2dm': self.from_ascii, '3dm': self.from_ascii } return fmts[fmt](path=path, project_name=project_name, crs=crs) def write(self, path, fmt='nc'): if fmt != 'nc': raise IOError('The only option currently available is nc (netcdf)') else: # write mesh self.write_mesh(file_name=path, fmt=fmt) # write hotstart self.write_hotstart(file_name=path) # write boundary conditions self.write_bc(file_name=path, validate=True, fmt='bc') # write results self.write_results(file_name=path, fmt='nc') def from_netcdf(self, *args, **kwargs): """Read suite of model files from netcdf file and store data in this model object NOTE: Boundary conditions are not stored in netcdf, so they must be read from *.bc file Args: *args: variable length argument list. **kwargs: arbitrary keyword arguments. Returns: None """ nc_file = os.path.join(f'{kwargs["path"]}', f'{kwargs["project_name"]}.nc') # read mesh self.read_mesh(*args, **kwargs) # read hotstart self.read_hotstart(path=nc_file, fmt='nc') # read boundary conditions (must be read as ascii) # todo add warning? bc_file = os.path.join(f'{kwargs["path"]}', f'{kwargs["project_name"]}.bc') self.read_bc(bc_file, fmt='bc') # read results self.read_results(path=nc_file, fmt='nc') def from_ascii(self, *args, **kwargs): """Read suite of model files from ASCII file and store data in this model object Args: *args: variable length argument list. **kwargs: arbitrary keyword arguments. Returns: None """ # set the mesh file name mesh_file = os.path.join(f'{kwargs["path"]}', f'{kwargs["project_name"]}.3dm') # read mesh self.read_mesh(mesh_file, project_name=kwargs['project_name'], crs=kwargs['crs'], fmt='3dm') # set hotstart file name hot_file = os.path.join(f'{kwargs["path"]}', f'{kwargs["project_name"]}.hot') # read hotstart self.read_hotstart(path=hot_file, fmt='ascii') # read boundary conditions bc_file = os.path.join(f'{kwargs["path"]}', f'{kwargs["project_name"]}.bc') self.read_bc(bc_file, fmt='bc') # read results self.read_results(kwargs['path'], project_name=kwargs['project_name'], fmt='ascii') def read_mesh(self, *args, **kwargs): return self.mesh.read(*args, **kwargs) def write_mesh(self, *args, **kwargs): return self.mesh.write(*args, **kwargs) def read_bc(self, *args, **kwargs): return self.simulation.read_bc(*args, **kwargs) def write_bc(self, *args, **kwargs): return self.simulation.write_bc(*args, **kwargs) def read_hotstart(self, *args, **kwargs): return self.simulation.read_hotstart(*args, **kwargs) def write_hotstart(self, *args, **kwargs): return self.simulation.write_hotstart(*args, **kwargs) def read_results(self, *args, **kwargs): return self.simulation.read_results(*args, **kwargs) def write_results(self, *args, **kwargs): return self.simulation.write_results(*args, **kwargs) def read_result(self, *args, **kwargs): return self.simulation.read_result(*args, **kwargs) def write_result(self, *args, **kwargs): return self.simulation.write_result(*args, **kwargs) def validate(self): # ensure mesh units and model units match if self.units != self.mesh.units: log.warning('Model units do not match mesh units')