Example #1
0
 def assertUnitsEqual(self, first, second, msg=None):
     r"""Assertion for equality in case of objects with units."""
     if units.has_units(first) and units.has_units(second):
         first = units.convert_to(first, units.get_units(second))
     self.assertEqual(units.get_data(first),
                      units.get_data(second),
                      msg=msg)
Example #2
0
def run(tmin, tmax, tstep):
    mass = 2000.0
    mesh_in = YggInput('mesh')
    mesh_out = YggOutput('mesh')
    mass = units.add_units(mass, 'g')
    tmin = units.add_units(tmin, 'hrs')
    tmax = units.add_units(tmax, 'hrs')
    tstep = units.add_units(tstep, 'hrs')
    t = tmin

    while t <= tmax:

        # Receive mesh as input
        flag, mesh = mesh_in.recv()
        if not flag:
            raise Exception("Error receiving mesh from input")
        mesh = mesh.as_trimesh()

        # Grow mesh
        # (pretend this is a biologically complex calculation)
        scale = units.get_data(mass * t / units.add_units(1.5e6, 'g*hrs'))
        mesh.vertices[:, 2] += mesh.vertices[:, 2] * scale

        # Send mesh to output for this timestep
        flag = mesh_out.send(mesh)
        if not flag:
            raise Exception("Error sending mesh to output")

        # Advance time step
        t += tstep

    return mesh
Example #3
0
    def __call__(self, x):
        r"""Call filter on the provided message.

        Args:
            x (object): Message object to filter.

        Returns:
            bool: True if the message will pass through the filter, False otherwise.

        """
        try:
            out = self.evaluate_filter(x)
        except ValueError:
            if backwards.PY2 and units.has_units(x):
                out = self.evaluate_filter(units.get_data(x))
            else:
                raise  # pragma: debug
        if isinstance(out, np.ndarray):
            assert (out.dtype == bool)
            out = bool(out.all())
        elif isinstance(out, np.bool_):
            out = bool(out)
        try:
            assert (isinstance(out, bool))
        except AssertionError:  # pragma: debug
            print(out, type(out))
            raise
        return out
    def validate(cls, obj, raise_errors=False):
        r"""Validate an object to check if it could be of this type.

        Args:
            obj (object): Object to validate.
            raise_errors (bool, optional): If True, errors will be raised when
                the object fails to be validated. Defaults to False.

        Returns:
            bool: True if the object could be of this type, False otherwise.

        """
        if isinstance(obj, np.ndarray) and (obj.ndim == 0):
            obj = obj.reshape((1, ))[0]
        if super(ScalarMetaschemaType,
                 cls).validate(units.get_data(obj), raise_errors=raise_errors):
            dtype = ScalarMetaschemaProperties.data2dtype(obj)
            if cls.is_fixed and ('subtype' in cls.fixed_properties):
                type_list = [
                    ScalarMetaschemaProperties._valid_types[
                        cls.fixed_properties['subtype']]
                ]
            else:
                type_list = ScalarMetaschemaProperties._valid_numpy_types
            if dtype.name.startswith(tuple(type_list)):
                return True
            else:
                if raise_errors:
                    raise ValueError(
                        ("dtype %s dosn't corresponding with any " +
                         "of the accepted types: %s") %
                        (str(dtype), str(type_list)))
        return False
Example #5
0
def format_message(args, fmt_str):
    r"""Format a message from a list of arguments and a format string.

    Args:
        args (list, obj): List of arguments or single argument that should be
            formatted using the format string.
        fmt_str (str, bytes): Format string that should be used to format the
            arguments.

    Returns:
        str, bytes: Formatted message. The type will match the type of the
            fmt_str.

    Raises:
        RuntimeError: If the number of arguments does not match the number of
            format fields.

    """
    if not isinstance(args, (tuple, list)):
        args = (args, )
    nfmt = len(extract_formats(fmt_str))
    args_ = []
    if len(args) < nfmt:
        raise RuntimeError("Number of arguments (%d) does not match " %
                           len(args) + "number of format fields (%d)." % nfmt)
    for a0 in args:
        a = units.get_data(a0)
        if np.iscomplexobj(a):
            args_ += [a.real, a.imag]
        else:
            args_.append(a)
    out = backwards.format_bytes(fmt_str, tuple(args_))
    return out
Example #6
0
def run(mesh, tmin, tmax, tstep):
    mass = 2000.0
    light_rpc = YggRpcClient('light_plant')
    light_out = YggOutput('light')
    plant2root = YggTimesync('plant2root')
    mass = units.add_units(mass, 'g')
    tmin = units.add_units(tmin, 'hrs')
    tmax = units.add_units(tmax, 'hrs')
    tstep = units.add_units(tstep, 'hrs')
    t = tmin
    i = 0

    while t <= tmax:

        # Perform send portion of call to synchronize data for time step
        # with the root model
        root_state = {'mass': mass}
        flag = plant2root.send(t, root_state)
        if not flag:
            raise Exception("Error performing time-step synchronization "
                            "with root model.")

        # Get light data by calling light model
        flag, light = light_rpc.call(mesh.vertices[:, 2], t)
        if not flag:
            raise Exception("Error calling light model")

        # Perform receive portion of call to synchronize data for time step
        # with the root model
        flag, root_state = plant2root.recv()
        if not flag:
            raise Exception("Error performing recv for time-step "
                            "synchronization with root model.")
        mass = root_state['mass']

        # Grow mesh
        # (pretend this is a biologically complex calculation)
        scale = units.get_data(mass * light / units.add_units(1.0, 'kg'))
        mesh.vertices[:, 2] += mesh.vertices[:, 2] * scale

        # Save mesh for this timestep
        filename_mesh = os.path.join(_dir, f'../output/mesh_{i:03d}.obj')
        with open(filename_mesh, 'w') as fd:
            mesh.export(fd, 'obj')

        # Send light to output
        flag = light_out.send(light)
        if not flag:
            raise Exception("Error sending light to output")

        # Advance time step
        t += tstep
        i += 1

    return mesh
    def get_testing_options(cls,
                            not_as_frames=False,
                            no_names=False,
                            **kwargs):
        r"""Method to return a dictionary of testing options for this class.

        Args:
            not_as_frames (bool, optional): If True, the returned example
                includes data that is not in a pandas data frame. Defaults to
                False.
            no_names (bool, optional): If True, an example is returned where the
                names are not provided to the deserializer. Defaults to False.

        Returns:
            dict: Dictionary of variables to use for testing.

        """
        field_names = None
        out = super(PandasSerialize,
                    cls).get_testing_options(array_columns=True, **kwargs)
        for k in ['as_array']:  # , 'format_str']:
            if k in out['kwargs']:
                del out['kwargs'][k]
        out['extra_kwargs'] = {}
        out['empty'] = pandas.DataFrame()
        if no_names:
            for x in [out['kwargs'], out]:
                if 'field_names' in x:
                    del x['field_names']
            header_line = b'f0\tf1\tf2\n'
        else:
            if 'field_names' in out['kwargs']:
                field_names = [
                    backwards.as_str(x) for x in out['kwargs']['field_names']
                ]
            header_line = b'name\tcount\tsize\n'
        out['contents'] = (header_line + b'one\t1\t1.0\n' + b'two\t2\t2.0\n' +
                           b'three\t3\t3.0\n' + b'one\t1\t1.0\n' +
                           b'two\t2\t2.0\n' + b'three\t3\t3.0\n')
        out['concatenate'] = [([], [])]
        if not_as_frames:
            # Strip units since pandas data frames are not serialized with units
            out['objects'] = [[units.get_data(ix) for ix in x]
                              for x in out['objects']]
        else:
            out['objects'] = [
                serialize.list2pandas(x, names=field_names)
                for x in out['objects']
            ]
        out['kwargs'].update(out['typedef'])
        return out
    def to_array(cls, obj):
        r"""Get np.array representation of the data.

        Args:
            obj (object): Object to get array for.

        Returns:
            np.ndarray: Array representation of object.

        """
        obj_nounits = units.get_data(obj)
        if isinstance(obj_nounits, np.ndarray):
            arr = obj_nounits
        else:
            dtype = ScalarMetaschemaProperties.data2dtype(obj_nounits)
            arr = np.array([obj_nounits], dtype=dtype)
        return arr
def data2dtype(data):
    r"""Get numpy data type for an object.

    Args:
        data (object): Python object.

    Returns:
        np.dtype: Numpy data type.

    """
    data_nounits = units.get_data(data)
    if isinstance(data_nounits, np.ndarray):
        dtype = data_nounits.dtype
    elif isinstance(data_nounits, (list, dict, tuple)):
        raise MetaschemaTypeError
    elif isinstance(data_nounits, np.dtype(_valid_types['bytes']).type):
        dtype = np.array(data_nounits).dtype
    else:
        dtype = np.array([data_nounits]).dtype
    return dtype
    def validate(cls, obj, raise_errors=False):
        r"""Validate an object to check if it could be of this type.

        Args:
            obj (object): Object to validate.
            raise_errors (bool, optional): If True, errors will be raised when
                the object fails to be validated. Defaults to False.

        Returns:
            bool: True if the object could be of this type, False otherwise.

        """
        if not super(OneDArrayMetaschemaType, cls).validate(
                obj, raise_errors=raise_errors):
            return False
        if units.get_data(obj).ndim != 1:
            if raise_errors:
                raise ValueError("The array has more than one dimension.")
            return False
        return True
Example #11
0
def run(mesh, tmin, tmax, tstep):
    mass = 2000.0
    light_rpc = YggRpcClient('light_plant')
    light_out = YggOutput('light')
    mass = units.add_units(mass, 'g')
    tmin = units.add_units(tmin, 'hrs')
    tmax = units.add_units(tmax, 'hrs')
    tstep = units.add_units(tstep, 'hrs')
    t = tmin
    i = 0

    while t <= tmax:

        # Get light data by calling light model
        flag, light = light_rpc.call(mesh.vertices[:, 2], t)
        if not flag:
            raise Exception("Error calling light model")

        # Grow mesh
        # (pretend this is a biologically complex calculation)
        scale = units.get_data(mass * light / units.add_units(1.0, 'kg'))
        mesh.vertices[:, 2] += mesh.vertices[:, 2] * scale

        # Save mesh for this timestep
        filename_mesh = os.path.join(_dir, f'../output/mesh_{i:03d}.obj')
        with open(filename_mesh, 'w') as fd:
            mesh.export(fd, 'obj')

        # Send light to output
        flag = light_out.send(light)
        if not flag:
            raise Exception("Error sending light to output")

        # Advance time step
        t += tstep
        i += 1

    return mesh
Example #12
0
def assert_equal(x, y):
    r"""Assert that two messages are equivalent.

    Args:
        x (object): Python object to compare against y.
        y (object): Python object to compare against x.

    Raises:
        AssertionError: If the two messages are not equivalent.

    """
    if isinstance(y, (list, tuple)):
        assert(isinstance(x, (list, tuple)))
        ut.assertEqual(len(x), len(y))
        for ix, iy in zip(x, y):
            assert_equal(ix, iy)
    elif isinstance(y, dict):
        assert(issubclass(y.__class__, dict))
        # ut.assertEqual(type(x), type(y))
        ut.assertEqual(len(x), len(y))
        for k, iy in y.items():
            ix = x[k]
            assert_equal(ix, iy)
    elif isinstance(y, (np.ndarray, pd.DataFrame)):
        if units.has_units(y) and (not units.has_units(x)):  # pragma: debug
            y = units.get_data(y)
        elif (not units.has_units(y)) and units.has_units(x):
            x = units.get_data(x)
        np.testing.assert_array_equal(x, y)
    else:
        if units.has_units(y) and units.has_units(x):
            x = units.convert_to(x, units.get_units(y))
            assert_equal(units.get_data(x), units.get_data(y))
        else:
            if units.has_units(y) and (not units.has_units(x)):  # pragma: debug
                y = units.get_data(y)
            elif (not units.has_units(y)) and units.has_units(x):
                x = units.get_data(x)
            ut.assertEqual(x, y)
Example #13
0
 def test_get_data(self):
     r"""Test get_data."""
     for v in self._vars_nounits:
         self.assert_equal(units.get_data(v), v)
     for vno, v in zip(self._vars_nounits, self._vars_units):
         self.assert_equal(units.get_data(v), np.array(vno))
Example #14
0
    def get_testing_options(cls,
                            not_as_frames=False,
                            no_names=False,
                            no_header=False,
                            **kwargs):
        r"""Method to return a dictionary of testing options for this class.

        Args:
            not_as_frames (bool, optional): If True, the returned example
                includes data that is not in a pandas data frame. Defaults to
                False.
            no_names (bool, optional): If True, an example is returned where the
                names are not provided to the deserializer. Defaults to False.
            no_header (bool, optional): If True, an example is returned
            where a header is not included. Defaults to False.

        Returns:
            dict: Dictionary of variables to use for testing.

        """
        kwargs.setdefault('table_string_type', 'string')
        field_names = None
        out = super(PandasSerialize,
                    cls).get_testing_options(array_columns=True, **kwargs)
        if kwargs['table_string_type'] == 'bytes':
            out['kwargs']['str_as_bytes'] = True
        for k in ['as_array']:  # , 'format_str']:
            if k in out['kwargs']:
                del out['kwargs'][k]
        out['extra_kwargs'] = {}
        if no_names:
            for x in [out['kwargs'], out]:
                if 'field_names' in x:
                    del x['field_names']
            header_line = b'f0\tf1\tf2\n'
        elif no_header:
            for x in [out['kwargs'], out]:
                if 'field_names' in x:
                    del x['field_names']
            header_line = b''
            out['kwargs']['no_header'] = True
            for x in out['typedef']['items']:
                x.pop('title', None)
        else:
            if 'field_names' in out['kwargs']:
                field_names = out['kwargs']['field_names']
            header_line = b'name\tcount\tsize\n'
        out['contents'] = (header_line + b'one\t1\t1.0\n' + b'two\t2\t2.0\n' +
                           b'three\t3\t3.0\n' + b'one\t1\t1.0\n' +
                           b'two\t2\t2.0\n' + b'three\t3\t3.0\n')
        out['concatenate'] = [([], [])]
        if not_as_frames:
            # Strip units since pandas data frames are not serialized with units
            out['objects'] = [[units.get_data(ix) for ix in x]
                              for x in out['objects']]
        elif no_header:
            out['objects'] = [serialize.list2pandas(x) for x in out['objects']]
            out['dtype'] = np.dtype(','.join(
                [x[1] for x in out['dtype'].descr]))
        else:
            if field_names is None:
                field_names = ['f0', 'f1', 'f2']
            out['objects'] = [
                serialize.list2pandas(x, names=field_names)
                for x in out['objects']
            ]
        out['kwargs']['datatype'] = copy.deepcopy(out['typedef'])
        if no_names:
            for x in out['kwargs']['datatype']['items']:
                x.pop('title', None)
        out['empty'] = pandas.DataFrame(np.zeros(0, out['dtype']))
        return out
Example #15
0
 def fcond(x):
     return (units.get_data(x) != 3)
Example #16
0
 def model_wrapper(cls,
                   name,
                   synonyms,
                   interpolation,
                   aggregation,
                   additional_variables,
                   env=None):
     r"""Model wrapper."""
     from yggdrasil.languages.Python.YggInterface import YggTimesyncServer
     if env is not None:
         os.environ.update(env)
     rpc = YggTimesyncServer(name)
     threads = {}
     times = []
     tables = {}
     table_units = {'base': {}}
     table_lock = multitasking.RLock()
     default_agg = _default_agg
     if not isinstance(aggregation, dict):
         default_agg = aggregation
         aggregation = {}
     while True:
         # Check for errors on response threads
         for v in threads.values():
             if v.check_flag_attr('error_flag'):  # pragma: debug
                 for v in threads.values():
                     if v.is_alive():
                         v.terminate()
                 raise Exception("Error on response thread.")
         # Receive values from client models
         flag, values, request_id = rpc.recv_from(timeout=1.0)
         if not flag:
             print("timesync server: End of input.")
             break
         if len(values) == 0:
             rpc.sleep()
             continue
         t, state = values[:]
         t_pd = units.convert_to_pandas_timedelta(t)
         client_model = rpc.ocomm[request_id].client_model
         # Remove variables marked as external so they are not merged
         external_variables = additional_variables.get(client_model, [])
         for k in external_variables:
             state.pop(k, None)
         internal_variables = list(state.keys())
         # Update record
         with table_lock:
             if client_model not in tables:
                 tables[client_model] = pd.DataFrame({'time': times})
             # Update units & aggregation methods
             if client_model not in table_units:
                 # NOTE: this assumes that units will not change
                 # between timesteps for a single model. Is there a
                 # case where this might not be true?
                 table_units[client_model] = {
                     k: units.get_units(v)
                     for k, v in state.items()
                 }
                 table_units[client_model]['time'] = units.get_units(t)
                 alt_vars = []
                 for k, v in synonyms.get(client_model, {}).items():
                     alt_vars += v['alt']
                     if v['alt2base'] is not None:
                         table_units[client_model][k] = units.get_units(
                             v['alt2base'](*[state[a] for a in v['alt']]))
                     else:
                         table_units[client_model][k] = table_units[
                             client_model][v['alt'][0]]
                 for k, v in table_units[client_model].items():
                     table_units['base'].setdefault(k, v)
                 for k in list(set(state.keys()) - set(alt_vars)):
                     aggregation.setdefault(k, default_agg)
             # Update the state
             if t_pd not in times:
                 times.append(t_pd)
             for model, table in tables.items():
                 new_data = {'time': [t_pd]}
                 if model == client_model:
                     new_data.update(
                         {k: [units.get_data(v)]
                          for k, v in state.items()})
                 new_data = pd.DataFrame(new_data)
                 idx = table['time'].isin([t_pd])
                 if not idx.any():
                     table = table.append(new_data, sort=False)
                 elif model == client_model:
                     table = table.drop(table.index[idx])
                     table = table.append(new_data, sort=False)
                 tables[model] = table.sort_values('time')
         # Assign thread to handle checking when data is filled in
         threads[request_id] = multitasking.YggTaskLoop(
             target=cls.response_loop,
             args=(client_model, request_id, rpc, t_pd, internal_variables,
                   external_variables, tables, table_units, table_lock,
                   synonyms, interpolation, aggregation))
         threads[request_id].start()
     # Cleanup threads (only called if there is an error since the
     # loop will only be broken when all of the clients have signed
     # off, implying that all requests have been responded to).
     for v in threads.values():
         if v.is_alive():  # pragma: debug
             v.wait(0.5)
     for v in threads.values():
         if v.is_alive():  # pragma: debug
             v.terminate()
Example #17
0
 def test_get_data(self, vars_units, vars_nounits, nested_approx):
     r"""Test get_data."""
     for v in vars_nounits:
         assert (units.get_data(v) == nested_approx(v))
     for vno, v in zip(vars_nounits, vars_units):
         assert (units.get_data(v) == nested_approx(vno))