def multidimensional_meshgrid(*arrays): """ Utitility function to create a multidimensional grid based on a list of arrays. Each array defines a range in one dimension. """ reversed_quantities = tuple(reversed(arrays)) lengths = map(len, reversed_quantities) dim = len(reversed_quantities) size = 1 for length in lengths: size *= length result = [] for i, quantity in enumerate(reversed_quantities): shape = numpy.ones(dim, dtype=numpy.int) shape[i] = lengths[i] if quantities.is_quantity(quantity): array = quantity.value_in(quantity.unit) else: array = quantity array = array.reshape(shape) for j, length in enumerate(lengths): if j != i: array = array.repeat(length, axis=j) if quantities.is_quantity(quantity): result.append(quantity.unit.new_quantity(array)) else: result.append(array) return tuple(result[::-1])
def set_values_in_store(self, indices, attributes, list_of_values_to_set): for attribute, values_to_set in zip(attributes, list_of_values_to_set): if attribute in self.mapping_from_attribute_to_quantities: storage = self.mapping_from_attribute_to_quantities[attribute] else: storage = InMemoryAttribute.new_attribute(attribute, len(self.particle_keys), values_to_set) self.mapping_from_attribute_to_quantities[attribute] = storage try: storage.set_values(indices, values_to_set) except ValueError as ex: # hack to set values between # with quanities with units.none # and when values are stored without units # need to be removed when units.none is completely gone if is_quantity(values_to_set) and not storage.has_units(): if not values_to_set.unit.base: storage.set_values(indices, values_to_set.value_in(units.none)) else: raise AttributeError("exception in setting attribute '{0}', error was '{1}'".format(attribute, ex)) elif not is_quantity(values_to_set) and storage.has_units(): if not storage.quantity.unit.base: storage.set_values(indices, units.none.new_quantity(values_to_set)) else: raise AttributeError("exception in setting attribute '{0}', error was '{1}'".format(attribute, ex)) else: raise AttributeError("exception in setting attribute '{0}', error was '{1}'".format(attribute, ex))
def multidimensional_meshgrid(*arrays): """ Utitility function to create a multidimensional grid based on a list of arrays. Each array defines a range in one dimension. """ reversed_quantities = tuple(reversed(arrays)) lengths = map(len, reversed_quantities) dim = len(reversed_quantities) size = 1 for length in lengths: size *= length result = [] for i, quantity in enumerate(reversed_quantities): shape = numpy.ones(dim) shape[i] = lengths[i] if quantities.is_quantity(quantity): array = quantity.value_in(quantity.unit) else: array = quantity array = array.reshape(shape) for j, length in enumerate(lengths): if j != i: array = array.repeat(length, axis=j) if quantities.is_quantity(quantity): result.append(quantity.unit.new_quantity(array)) else: result.append(array) return tuple(result[::-1])
def contour(*args, **kwargs): if len(args)%2 == 0: stripped_args = UnitlessArgs.strip(*args[:-1]) levels = args[-1] z_unit = UnitlessArgs.arg_units[-1] if quantities.is_quantity(levels): stripped_args.append(levels.value_in(z_unit)) else: stripped_args = UnitlessArgs.strip(*args) if 'levels' in kwargs: levels = kwargs['levels'] z_unit = UnitlessArgs.arg_units[-1] if quantities.is_quantity(levels): kwargs['levels'] = levels.value_in(z_unit) result = native_plot.contour(*stripped_args, **kwargs) native_plot.xlabel(UnitlessArgs.x_label()) native_plot.ylabel(UnitlessArgs.y_label()) return result
def _convert_to_numeric(self, first, second, in_units): if in_units: return (first.value_in(in_units), second.value_in(in_units)) elif is_quantity(first) or is_quantity(second): return (to_quantity(first).value_in(to_quantity(second).unit), to_quantity(second).value_in(to_quantity(second).unit)) else: return (first, second)
def as_three_vector(self, array): number = array if quantities.is_quantity(array): number = array.number three_vector = numpy.transpose([number]*3) if quantities.is_quantity(array): three_vector = three_vector | array.unit return three_vector
def _convert_to_vectors(self, first, second): x = first if is_quantity(first) else numpy.array(first) y = second if is_quantity(second) else numpy.array(second) try: # Using numpy broadcasting to convert the arguments to arrays with equal length: return (x+(y*0)).flatten(), (y+(x*0)).flatten() except: #raise raise TypeError("Arguments do not have compatible shapes for broadcasting")
def _check_comparable(self, first, second): if is_quantity(first): # if the second is not a quantity and the first does not have the none unit then # we are comparing a quantity with a non-quanity if not is_quantity(second) and not first.unit.base == none.base: raise TypeError("Cannot compare quantity: {0} with non-quantity: {1}.".format(first, second)) elif is_quantity(second): # by definition the first is not a quantity, so only check if second unit is not none if not second.unit.base == none.base: raise TypeError("Cannot compare non-quantity: {0} with quantity: {1}.".format(first, second))
def _check_comparable(self, first, second): if is_quantity(first): # if the second is not a quantity and the first does not have the none unit then # we are comparing a quantity with a non-quanity if not is_quantity(second) and not first.unit.is_none(): raise TypeError("Cannot compare quantity: {0} with non-quantity: {1}.".format(first, second)) elif is_quantity(second): # by definition the first is not a quantity, so only check if second unit is not none if not second.unit.is_none(): raise TypeError("Cannot compare non-quantity: {0} with quantity: {1}.".format(first, second))
def _check_comparable(self, first, second): if is_quantity(first) is not is_quantity(second): # One exception: quantity with none_unit CAN be compared with non-quantity: if not to_quantity(first).unit == to_quantity(second).unit: raise TypeError( "Cannot compare quantity: {0} with non-quantity: {1}.". format( *(first, second) if isinstance(first, Quantity) else (second, first)))
def set_value(self, parameter, object, quantity): if is_quantity(self.default_value): unit = self.default_value.unit if unit.is_non_numeric() or len(unit.base) == 0: if not is_quantity(quantity): quantity = quantity | unit parameter.cached_value = quantity parameter.is_set = True
def converted_keyword_and_list_arguments(self, arguments_list, keyword_arguments): from amuse.units import quantities dtype_to_values = self.specification.new_dtype_to_values() units = [None] * len(self.specification.input_parameters) input_parameters_seen = set( map(lambda x: x.name, self.specification.input_parameters)) names_in_argument_list = set([]) for index, argument in enumerate(arguments_list): parameter = self.specification.input_parameters[index] names_in_argument_list.add(parameter.name) if quantities.is_quantity(argument): units[parameter.index_in_input] = argument.unit argument = argument.number values = dtype_to_values[parameter.datatype] values[parameter.input_index] = argument input_parameters_seen.remove(parameter.name) for index, parameter in enumerate(self.specification.input_parameters): if parameter.name in keyword_arguments: argument = keyword_arguments[parameter.name] if quantities.is_quantity(argument): units[parameter.index_in_input] = argument.unit argument = argument.number values = dtype_to_values[parameter.datatype] values[parameter.input_index] = argument input_parameters_seen.remove(parameter.name) for parameter in self.specification.input_parameters: if (parameter.name in input_parameters_seen ) and parameter.has_default_value(): argument = parameter.default if quantities.is_quantity(argument): units[parameter.index_in_input] = argument.unit argument = argument.number values = dtype_to_values[parameter.datatype] values[parameter.input_index] = argument input_parameters_seen.remove(parameter.name) if input_parameters_seen: raise exceptions.CodeException( "Not enough parameters in call, missing " + str(sorted(input_parameters_seen))) return dtype_to_values, units
def fill_output_message(self, output_message, index, result, keyword_arguments, specification, units): from amuse.units import quantities if not specification.result_type is None: attribute = self.dtype_to_message_attribute[specification.result_type] if specification.must_handle_array: getattr(output_message, attribute)[0] = result else: getattr(output_message, attribute)[0][index] = result for parameter in specification.parameters: attribute = self.dtype_to_message_attribute[parameter.datatype] if (parameter.direction == LegacyFunctionSpecification.OUT or parameter.direction == LegacyFunctionSpecification.INOUT): argument_value = keyword_arguments[parameter.name] output = argument_value.value if specification.has_units: unit = output.unit if quantities.is_quantity(output) else None if specification.must_handle_array or index == 0: units[parameter.index_in_output] = unit else: unit = units[parameter.index_in_output] if not unit is None: output = output.value_in(unit) if specification.must_handle_array: getattr(output_message, attribute)[parameter.output_index] = output else: getattr(output_message, attribute)[parameter.output_index][index] = output
def set_values_in_store_async(self, indices, attributes, quantities): array_of_indices = self._to_arrays_of_indices(indices) one_dimensional_array_of_indices = [ x.reshape(-1) for x in array_of_indices ] if len(one_dimensional_array_of_indices) == 0: one_dimensional_values = [x for x in quantities] else: one_dimensional_values = [(x.reshape(-1) if is_quantity(x) else numpy.asanyarray(x).reshape(-1)) for x in quantities] selected_setters = list( [setter for setter in self.select_setters_for(attributes)]) def next_request(index, setters): if index < len(setters): setter = setters[index] return setter.set_attribute_values_async( self, attributes, one_dimensional_values, *one_dimensional_array_of_indices) else: return None request = ASyncRequestSequence(next_request, args=(selected_setters, )) return request
def read_inifile_parameters(self, configfile): self._configfile = configfile parser = ConfigParser() parser.optionxform = self._optionxform parser.read(configfile) for section in parser.sections(): group = section for option in parser.options(section): key = (option, group) if key in self._inifile_parameters: ptype = self._inifile_parameters[key]["ptype"] dtype = self._inifile_parameters[key]["dtype"] value = self.interpret_value(parser.get(group, option), dtype=dtype) if is_quantity(self._inifile_parameters[key]["default"]): value = new_quantity( val, to_quantity( self._inifile_parameters[key]["default"]).unit) self._inifile_parameters[key]["value"] = value else: value = self.interpret_value(parser.get(group, option)) self._inifile_parameters[key] = dict( group_name=group, name=option, set_name=group, default=value, value=value, short=option, ptype="ini", dtype="unknown", description="unknown parameter read from %s" % configfile)
def new_attribute(cls, name, shape, input, group): if is_quantity(input): if not hasattr(shape, '__iter__'): shape = shape, dataset = group.create_dataset(name, shape=shape, dtype=input.number.dtype) dataset.attrs["units"] = input.unit.to_simple_form( ).reference_string().encode("ascii") return HDF5VectorQuantityAttribute(name, dataset, input.unit) elif hasattr(input, 'as_set'): subgroup = group.create_group(name) group.create_dataset('keys', shape=shape, dtype=input.key.dtype) group.create_dataset('masked', shape=shape, dtype=numpy.bool) return HDF5LinkedAttribute(name, subgroup) else: dtype = numpy.asanyarray(input).dtype if dtype.kind == 'U': new_dtype = numpy.dtype('S' + dtype.itemsize * 4) dataset = group.create_dataset(name, shape=shape, dtype=dtype) dataset.attrs["units"] = "UNICODE".encode('ascii') return HDF5UnicodeAttribute(name, dataset) else: if not hasattr(shape, '__iter__'): shape = shape, dtype = numpy.asanyarray(input).dtype dataset = group.create_dataset(name, shape=shape, dtype=dtype) dataset.attrs["units"] = "none".encode("ascii") return HDF5UnitlessAttribute(name, dataset)
def store_values(self, container, group, links=[]): attributes_group = group.create_group("attributes") all_values = container.get_values_in_store( Ellipsis, container.get_attribute_names_defined_in_store()) for attribute, quantity in zip( container.get_attribute_names_defined_in_store(), all_values): if is_quantity(quantity): value = quantity.value_in(quantity.unit) dataset = attributes_group.create_dataset(attribute, data=value) dataset.attrs["units"] = quantity.unit.to_simple_form( ).reference_string().encode("ascii") elif isinstance(quantity, LinkedArray): self.store_linked_array(attribute, attributes_group, quantity, group, links) else: dtype = numpy.asanyarray(quantity).dtype if dtype.kind == 'U': dataset = attributes_group.create_dataset( attribute, data=numpy.char.encode(quantity, 'UTF-32BE')) dataset.attrs["units"] = "UNICODE".encode('ascii') else: dataset = attributes_group.create_dataset(attribute, data=quantity) dataset.attrs["units"] = "none".encode('ascii')
def store_collection_attributes(self, container, group, extra_attributes, links): collection_attributes = container.collection_attributes.__getstate__() arguments_and_attributes = {} arguments_and_attributes.update(collection_attributes) arguments_and_attributes.update(extra_attributes) ref_dtype = h5py.special_dtype(ref=h5py.Reference) for name, quantity in arguments_and_attributes.iteritems(): if quantity is None: continue if is_quantity(quantity): group.attrs[name] = quantity.value_in(quantity.unit) group.attrs[name + "_unit"] = quantity.unit.reference_string() elif isinstance(quantity, Particle): # group.attrs[name] = ref_dtype(None) group.attrs[name + "_key"] = quantity.key group.attrs[name + "_unit"] = "particle" links.append(UneresolvedAttributeLink(group, name, quantity.get_containing_set())) elif isinstance(quantity, GridPoint): # group.attrs[name] = ref_dtype(None) group.attrs[name + "_index"] = quantity.index group.attrs[name + "_unit"] = "gridpoint" links.append(UneresolvedAttributeLink(group, name, quantity.get_containing_set())) elif isinstance(quantity, AbstractSet): # group.attrs[name] = ref_dtype(None) group.attrs[name + "_unit"] = "set" links.append(UneresolvedAttributeLink(group, name, quantity._original_set())) else: group.attrs[name] = quantity group.attrs[name + "_unit"] = "none"
def store_values(self, container, group, links=[]): attributes_group = group.create_group("attributes") all_values = container.get_values_in_store( None, container.get_attribute_names_defined_in_store()) for attribute, quantity in zip( container.get_attribute_names_defined_in_store(), all_values): if is_quantity(quantity): value = quantity.value_in(quantity.unit) dataset = attributes_group.create_dataset(attribute, data=value) dataset.attrs["units"] = quantity.unit.to_simple_form( ).reference_string().encode("ascii") elif hasattr(quantity, 'as_set'): quantity = quantity.as_set() subgroup = attributes_group.create_group(attribute) keys = quantity.get_all_keys_in_store() masked = ~quantity.get_valid_particles_mask() links.append([subgroup, quantity.as_set()._original_set()]) subgroup.create_dataset('keys', data=keys) subgroup.create_dataset('masked', data=masked) subgroup.attrs["units"] = "object".encode("ascii") else: dtype = numpy.asanyarray(quantity).dtype if dtype.kind == 'U': dataset = attributes_group.create_dataset( attribute, data=numpy.char.encode(quantity, 'UTF-32BE')) dataset.attrs["units"] = "UNICODE".encode("ascii") else: dataset = attributes_group.create_dataset(attribute, data=quantity) dataset.attrs["units"] = "none".encode("ascii")
def set_values_in_store(self, indices, attributes, quantities): array_of_indices = self._to_arrays_of_indices(indices) one_dimensional_values = [(x.reshape(-1) if is_quantity(x) else numpy.asanyarray(x).reshape(-1)) for x in quantities] one_dimensional_array_of_indices = [x.reshape(-1) for x in array_of_indices] for setter in self.select_setters_for(attributes): setter.set_attribute_values(self, attributes, one_dimensional_values, *one_dimensional_array_of_indices)
def new_attribute(cls, name, shape, input, group): if is_quantity(input): if not hasattr(shape, '__iter__'): shape = shape, dataset = group.create_dataset(name, shape=shape, dtype=input.number.dtype) dataset.attrs["units"] = input.unit.to_simple_form().reference_string() return HDF5VectorQuantityAttribute(name, dataset, input.unit) elif hasattr(input, 'as_set'): raise Exception("adding a linked attribute to a set stored in a HDF file is not supported, alternative is to copy the set and save it") subgroup = group.create_group(name) group.create_dataset('keys', shape=shape, dtype=input.key.dtype) group.create_dataset('masked', shape=shape, dtype=numpy.bool) return HDF5LinkedAttribute(name, subgroup) else: dtype = numpy.asanyarray(input).dtype if dtype.kind == 'U': new_dtype = numpy.dtype('S' + dtype.itemsize * 4) dataset = group.create_dataset(name, shape=shape, dtype=dtype) dataset.attrs["units"] = "UNICODE" return HDF5UnicodeAttribute(name, dataset) else: dtype = numpy.asanyarray(input).dtype dataset = group.create_dataset(name, shape=shape, dtype=dtype) dataset.attrs["units"] = "none" return HDF5UnitlessAttribute(name, dataset)
def plot_result(self, rtime=0.0): if is_quantity(rtime): rtime = rtime.value_in(units.day) # Get timing ymd_str = self.rtime_to_ymdstr(rtime) # Set coordinates for figures grd = self.grd Mx, My = grd.M(grd.lon(), grd.lat()) MMx, MMy = Mx, My anim_figure(self.A_eddy, self.C_eddy, Mx, My, MMx, MMy, plt.cm.RdBu_r, rtime, self.DIAGNOSTIC_TYPE, self.SAVE_DIR, 'ALL ' + ymd_str, self.animax, self.animax_cbar, track_length=7 / self.days_between, plot_all=True) #plot all tracks of at least 28 days
def write_namelist_parameters(self, outputfile, do_patch=False, nml_file=None): patch = defaultdict(dict) for p in self._namelist_parameters.values(): name = p["name"] group_name = p["group_name"] group = patch[group_name] short = p["short"] parameter_set_name = p.get("set_name", "parameters_" + group_name) parameter_set = getattr(self, parameter_set_name) if getattr(parameter_set, name) is None: # omit if value is None continue if is_quantity(p["default"]): value = to_quantity(getattr(parameter_set, name)).value_in(p["default"].unit) else: value = getattr(parameter_set, name) if isinstance(value, numpy.ndarray): value = list( value) # necessary until f90nml supports numpy arrays group[short] = value if do_patch: f90nml.patch(nml_file or self._nml_file, patch, outputfile) else: f90nml.write(patch, outputfile, force=True)
def new_attribute(cls, name, shape, input, group): if is_quantity(input): if not hasattr(shape, '__iter__'): shape = shape, dtype = numpy.asanyarray(input.number).dtype dataset = group.create_dataset(name, shape=shape, dtype=dtype) dataset.attrs["units"] = input.unit.to_simple_form( ).reference_string().encode('ascii') return HDF5VectorQuantityAttribute(name, dataset, input.unit) elif hasattr(input, 'as_set'): raise Exception( "adding a linked attribute to a set stored in a HDF file is not supported, alternative is to copy the set and save it" ) subgroup = group.create_group(name) group.create_dataset('keys', shape=shape, dtype=input.key.dtype) group.create_dataset('masked', shape=shape, dtype=numpy.bool) return HDF5LinkedAttribute(name, subgroup) else: dtype = numpy.asanyarray(input).dtype if dtype.kind == 'U': new_dtype = numpy.dtype('S' + dtype.itemsize * 4) dataset = group.create_dataset(name, shape=shape, dtype=dtype) dataset.attrs["units"] = "UNICODE".encode('ascii') return HDF5UnicodeAttribute(name, dataset) else: if not hasattr(shape, '__iter__'): shape = shape, dataset = group.create_dataset(name, shape=shape, dtype=dtype) dataset.attrs["units"] = "none".encode('ascii') return HDF5UnitlessAttribute(name, dataset)
def read_namelist_parameters(self, inputfile): self._nml_file = inputfile self._nml_params = f90nml.read(inputfile) for group, d in self._nml_params.iteritems(): for short, val in d.iteritems(): key = (short, group.upper()) if key in self._namelist_parameters: group_name = self._namelist_parameters[key]["group_name"] name = self._namelist_parameters[key]["name"] parameter_set_name = self._namelist_parameters[key].get( "set_name", "parameters_" + group_name) parameter_set = getattr(self, parameter_set_name) if is_quantity(self._namelist_parameters[key]["default"]): setattr( parameter_set, name, new_quantity( val, to_quantity(self._namelist_parameters[key] ["default"]).unit)) else: setattr(parameter_set, name, val) else: print "'%s' of group '%s' not in the namelist_parameters" % ( short, group)
def store_values(self, container, group, links=[]): attributes_group = group.create_group("attributes") all_values = container.get_values_in_store(None, container.get_attribute_names_defined_in_store()) for attribute, quantity in zip(container.get_attribute_names_defined_in_store(), all_values): if is_quantity(quantity): value = quantity.value_in(quantity.unit) dataset = attributes_group.create_dataset(attribute, data=value) dataset.attrs["units"] = quantity.unit.to_simple_form().reference_string() elif hasattr(quantity, "as_set"): quantity = quantity.as_set() subgroup = attributes_group.create_group(attribute) keys = quantity.get_all_keys_in_store() masked = ~quantity.get_valid_particles_mask() links.append([subgroup, quantity.as_set()._original_set()]) subgroup.create_dataset("keys", data=keys) subgroup.create_dataset("masked", data=masked) subgroup.attrs["units"] = "object" else: dtype = numpy.asanyarray(quantity).dtype if dtype.kind == "U": dataset = attributes_group.create_dataset(attribute, data=numpy.char.encode(quantity, "UTF-32BE")) dataset.attrs["units"] = "UNICODE" else: dataset = attributes_group.create_dataset(attribute, data=quantity) dataset.attrs["units"] = "none"
def read_parameters(self, inputfile, add_missing_parameters=False): self._file=inputfile _nml_params = f90nml.read(inputfile) rawvals, comments = self._read_file(inputfile) for key, rawval in rawvals.items(): if key in self._parameters: group_name=self._parameters[key]["group_name"] name=self._parameters[key]["name"] dtype=self._parameters[key]["dtype"] val=self.interpret_value( rawval, dtype=dtype) if is_quantity(self._parameters[key]["default"]): self._parameters[key]["value"]=new_quantity(val, to_quantity(self._parameters[key]["default"]).unit) else: self._parameters[key]["value"]=val else: if not add_missing_parameters: print("'{0}' of group '{1}' not in the parameters list".format(*key)) else: value=rawval description=comments.get(key, "unknown parameter read from {0}".format(inputfile)) self._parameters[key]=dict( group_name=key[1], name=key[0], short_name=key[0], default=value, value=value, short=key[0], ptype=self._ptypes[0], dtype=dtype_str[type(value)], description=description )
def set_values(self, indices, values): try: self.dataset[indices] = values.value_in(self.unit) except AttributeError: if not is_quantity(values): raise ValueError("Tried to put a non quantity value in a quantity") else: raise
def set_values(self, indices, values): try: self.quantity[indices] = values except AttributeError: if not is_quantity(values): raise ValueError("Tried to set a non quantity value for an attribute ({0}) with a unit".format(self.name)) else: raise
def _get_attribute_types(self): quantities = self.quantities if self.quantities: return map( lambda x: x.unit.to_simple_form() if is_quantity(x) else None, quantities) elif self.set is None: return [None] * len(self.attribute_names)
def check_arguments(self, arguments): for index, x in enumerate(arguments): if is_unit(x): continue if not is_quantity(x): raise NotAQuantityException(index, x) if not x.is_scalar(): raise NotAScalarException(index, x)
def test1(self): contents = "#header\n1 2 3\n4 5 6\n \n7 8 9\n " data_file = StringIO(contents) instance = text.TableFormattedText("test.txt", data_file) instance.attribute_names = ['a', 'b', 'c'] particles = instance.load() self.assertEquals(len(particles), 3) self.assertEquals(particles[0].a, 1) self.assertFalse(quantities.is_quantity(particles[0].a))
def strip(self, *args, **kwargs): if self.current_plot is native_plot.gca(): args = [arg.as_quantity_in(unit) if quantities.is_quantity(arg) else arg for arg, unit in map(lambda *x : tuple(x), args, self.arg_units)] self.clear() self.current_plot = native_plot.gca() for arg in args: if quantities.is_quantity(arg): arg = console.current_printing_strategy.convert_quantity(arg) self.stripped_args.append(arg.value_in(arg.unit)) self.arg_units.append(arg.unit) self.unitnames_of_args.append("["+str(arg.unit)+"]") else: self.stripped_args.append(arg) self.arg_units.append(None) self.unitnames_of_args.append("") return self.stripped_args
def stop(self, rtime=0.0): if is_quantity(rtime): rtime = rtime.value_in(units.day) self.A_eddy.kill_all_tracks() self.C_eddy.kill_all_tracks() self.A_eddy.write2netcdf(rtime, stopper=1) self.C_eddy.write2netcdf(rtime, stopper=1) print('Outputs saved to', self.SAVE_DIR)
def set_values(self, indices, values): try: if indices is None: indices = slice(None) self.quantity[indices] = values except AttributeError: if not is_quantity(values): raise ValueError( "Tried to set a non quantity value for an attribute ({0}) with a unit" .format(self.name)) else: raise
def new_attribute(cls, name, shape, input, group): if is_quantity(input): dataset = group.create_dataset(name, shape=shape, dtype=input.number.dtype) return HDF5VectorQuantityAttribute(name, dataset, input.unit) elif hasattr(input, 'as_set'): subgroup = group.create_group(name) group.create_dataset('keys', shape=shape, dtype=input.key.dtype) group.create_dataset('masked', shape=shape, dtype=numpy.bool) return HDF5LinkedAttribute(name, subgroup) else: dtype = numpy.asanyarray(input).dtype dataset = group.create_dataset(name, shape=shape, dtype=dtype) return HDF5UnitlessAttribute(name, dataset)
def set_values_in_store(self, indices, attributes, list_of_values_to_set): for attribute, values_to_set in zip(attributes, list_of_values_to_set): if attribute in self.mapping_from_attribute_to_quantities: storage = self.mapping_from_attribute_to_quantities[attribute] else: storage = InMemoryAttribute.new_attribute( attribute, len(self.particle_keys), values_to_set) self.mapping_from_attribute_to_quantities[attribute] = storage try: storage.set_values(indices, values_to_set) except ValueError as ex: # hack to set values between # with quanities with units.none # and when values are stored without units # need to be removed when units.none is completely gone if is_quantity(values_to_set) and not storage.has_units(): if not values_to_set.unit.base: storage.set_values(indices, values_to_set.value_in(units.none)) else: raise AttributeError( "exception in setting attribute '{0}', error was '{1}'" .format(attribute, ex)) elif not is_quantity(values_to_set) and storage.has_units(): if not storage.quantity.unit.base: storage.set_values( indices, units.none.new_quantity(values_to_set)) else: raise AttributeError( "exception in setting attribute '{0}', error was '{1}'" .format(attribute, ex)) else: raise AttributeError( "exception in setting attribute '{0}', error was '{1}'" .format(attribute, ex))
def store_collection_attributes(self, container, group, extra_attributes): collection_attributes = container.collection_attributes.__getstate__() arguments_and_attributes = {} arguments_and_attributes.update(collection_attributes) arguments_and_attributes.update(extra_attributes) for name, quantity in arguments_and_attributes.iteritems(): if quantity is None: continue if is_quantity(quantity): group.attrs[name] = quantity.value_in(quantity.unit) group.attrs[name + "_unit"] = quantity.unit.reference_string() else: group.attrs[name] = quantity group.attrs[name + "_unit"] = "none"
def get_default_values(self): if not self.process_default_values: # Old, pre-Optik 1.5 behaviour. return Values(self.defaults) defaults = self.defaults.copy() for option in self._get_all_options(): default = defaults.get(option.dest) if isinstance(default, basestring): opt_str = option.get_opt_string() defaults[option.dest] = option.check_value(opt_str, default) elif not option.unit is None and not quantities.is_quantity(default): defaults[option.dest] = quantities.new_quantity(default, option.unit) return optparse.Values(defaults)
def set_values_in_store_async(self, indices, attributes, quantities): array_of_indices = self._to_arrays_of_indices(indices) one_dimensional_values = [(x.reshape(-1) if is_quantity(x) else numpy.asanyarray(x).reshape(-1)) for x in quantities] one_dimensional_array_of_indices = [x.reshape(-1) for x in array_of_indices] selected_setters = list([setter for setter in self.select_setters_for(attributes)]) def next_request(index, setters): if index < len(setters): setter = setters[index] return setter.set_attribute_values_async(self, attributes, one_dimensional_values, *one_dimensional_array_of_indices) else: return None request = ASyncRequestSequence(next_request, args = (selected_setters,)) return request
def new_attribute(cls, name, shape, values_to_set): if is_quantity(values_to_set): if values_to_set.is_vector() : shape = cls._determine_shape(shape, values_to_set) return InMemoryVectorQuantityAttribute(name, shape, values_to_set.unit) elif values_to_set is None: return InMemoryLinkedAttribute(name, shape) else: array = numpy.asanyarray(values_to_set) dtype = array.dtype shape = cls._determine_shape(shape, array) if dtype.kind == 'S' or dtype.kind == 'U': return InMemoryStringAttribute(name, shape, dtype) elif dtype == numpy.object: return InMemoryLinkedAttribute(name, shape) else: return InMemoryUnitlessAttribute(name, shape, dtype)
def store_values(self, container, group, links = []): attributes_group = group.create_group("attributes") all_values = container.get_values_in_store(Ellipsis, container.get_attribute_names_defined_in_store()) for attribute, quantity in zip(container.get_attribute_names_defined_in_store(), all_values): if is_quantity(quantity): value = quantity.value_in(quantity.unit) dataset = attributes_group.create_dataset(attribute, data=value) dataset.attrs["units"] = quantity.unit.to_simple_form().reference_string() elif isinstance(quantity, LinkedArray): self.store_linked_array(attribute, attributes_group, quantity, group, links) else: dtype = numpy.asanyarray(quantity).dtype if dtype.kind == 'U': dataset = attributes_group.create_dataset(attribute, data=numpy.char.encode(quantity, 'UTF-32BE')) dataset.attrs["units"] = "UNICODE" else: dataset = attributes_group.create_dataset(attribute, data=quantity) dataset.attrs["units"] = "none"
def new_attribute(cls, name, shape, input, group): if is_quantity(input): dataset = group.create_dataset(name, shape=shape, dtype=input.number.dtype) return HDF5VectorQuantityAttribute(name, dataset, input.unit) elif hasattr(input, "as_set"): subgroup = group.create_group(name) group.create_dataset("keys", shape=shape, dtype=input.key.dtype) group.create_dataset("masked", shape=shape, dtype=numpy.bool) return HDF5LinkedAttribute(name, subgroup) else: dtype = numpy.asanyarray(input).dtype if dtype.kind == "U": new_dtype = numpy.dtype("S" + dtype.itemsize * 4) dataset = group.create_dataset(name, shape=shape, dtype=dtype) return HDF5UnicodeAttribute(name, dataset) else: dtype = numpy.asanyarray(input).dtype dataset = group.create_dataset(name, shape=shape, dtype=dtype) return HDF5UnitlessAttribute(name, dataset)
def get_value(self, parameter, object): all_parameters = parameter.parameter_set result = [] unit = None for name in self.names_of_parameters: parameter = all_parameters.get_parameter(name) element = parameter.get_value() if unit is None: if is_quantity(element): unit = element.unit if not unit is None: result.append(element.value_in(unit)) else: result.append(element) if not unit is None: return unit.new_quantity(result) else: return numpy.asarray(result)
def store_values(self, container, group, links = []): attributes_group = group.create_group("attributes") all_values = container.get_values_in_store(None, container.get_attribute_names_defined_in_store()) for attribute, quantity in zip(container.get_attribute_names_defined_in_store(), all_values): if is_quantity(quantity): value = quantity.value_in(quantity.unit) dataset = attributes_group.create_dataset(attribute, data=value) dataset.attrs["units"] = quantity.unit.to_simple_form().reference_string() elif hasattr(quantity, 'as_set'): quantity = quantity.as_set() subgroup = attributes_group.create_group(attribute) keys = quantity.get_all_keys_in_store() masked = ~quantity.get_valid_particles_mask() links.append([subgroup, quantity.as_set()._original_set()]) subgroup.create_dataset('keys', data=keys) subgroup.create_dataset('masked', data=masked) subgroup.attrs["units"] = "object" else: dataset = attributes_group.create_dataset(attribute, data=quantity) dataset.attrs["units"] = "none"
def _get_attribute_types(self): quantities = self.quantities if self.quantities: return map(lambda x : x.unit.to_simple_form() if is_quantity(x) else None, quantities) elif self.set is None: return [None] * len(self.attribute_names)
def _check_comparable(self, first, second): if is_quantity(first) is not is_quantity(second): # One exception: quantity with none_unit CAN be compared with non-quantity: if not to_quantity(first).unit == to_quantity(second).unit: raise TypeError("Cannot compare quantity: {0} with non-quantity: {1}.".format(*(first,second) if isinstance(first, Quantity) else (second,first)))