Exemple #1
0
    def record_iteration(self, params, unknowns, resids, metadata):
        """
        Stores the provided data in the HDF5 file using the iteration
        coordinate for the Group name.

        Args
        ----
        params : dict
            Dictionary containing parameters. (p)

        unknowns : dict
            Dictionary containing outputs and states. (u)

        resids : dict
            Dictionary containing residuals. (r)

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        iteration_coordinate = metadata['coord']
        timestamp = metadata['timestamp']
        group_name = format_iteration_coordinate(iteration_coordinate)

        f = self.out

        group = f.require_group(group_name)
        group.attrs['timestamp'] = timestamp

        pairings = []

        if self.options['record_params']:

            p_group = group.create_group("Parameters")
            pairings.append(
                (p_group, self._filter_vector(params, 'p',
                                              iteration_coordinate)))

        if self.options['record_unknowns']:
            u_group = group.create_group("Unknowns")
            pairings.append((u_group,
                             self._filter_vector(unknowns, 'u',
                                                 iteration_coordinate)))

        if self.options['record_resids']:
            r_group = group.create_group("Residuals")
            pairings.append(
                (r_group, self._filter_vector(resids, 'r',
                                              iteration_coordinate)))

        for grp, data in pairings:
            for key, val in iteritems(data):
                if isinstance(val, (np.ndarray, Number)):
                    grp.create_dataset(key, data=val)
                    # TODO: Compression/Checksum?
                else:
                    # TODO: Handling non-numeric data
                    msg = "HDF5 Recorder does not support data of type '{0}'".format(
                        type(val))
                    raise NotImplementedError(msg)
Exemple #2
0
    def assertIterationDataRecorded(self, expected, tolerance):
        sout = open(self.filename)

        for coord, (t0, t1), params, unknowns, resids in expected:
            icoord = format_iteration_coordinate(coord)

            line = sout.readline()
            self.assertTrue('Timestamp: ' in line)
            timestamp = float(line[11:-1])
            self.assertTrue(t0 <= timestamp and timestamp <= t1)

            line = sout.readline()
            self.assertEqual("Iteration Coordinate: {0}\n".format(icoord), line)

            groupings = []

            if params is not None:
                groupings.append(("Params:\n", params))

            if unknowns is not None:
                groupings.append(("Unknowns:\n", unknowns))

            if resids is not None:
                groupings.append(("Resids:\n", resids))

            for header, exp in groupings:
                line = sout.readline()
                self.assertEqual(header, line)
                for key, val in exp:
                    line = sout.readline()
                    self.assertEqual("  {0}: {1}\n".format(key, str(val)), line)

        sout.close()
Exemple #3
0
    def assertDatasetEquals(self, expected, tolerance):
        sout = self.recorder.out
        sout.seek(0)

        for coord, expect in expected:
            icoord = format_iteration_coordinate(coord)

            line = sout.readline()
            self.assertTrue('Timestamp: ' in line)
            timestamp = float(line[11:-1])
            self.assertTrue(self.t0 <= timestamp and timestamp <= self.t1)

            line = sout.readline()
            self.assertEqual("Iteration Coordinate: {0}\n".format(icoord), line)

            groupings = (
                ("Params:\n", expect[0]),
                ("Unknowns:\n", expect[1]),
                ("Resids:\n", expect[2])
            )

            for header, exp in groupings:
                line = sout.readline()
                self.assertEqual(header, line)
                for key, val in exp:
                    line = sout.readline()
                    self.assertEqual("  {0}: {1}\n".format(key, str(val)), line)
Exemple #4
0
    def assertIterationDataRecorded(self, expected, tolerance):

        saved_results = {}

        self.io.seek(0)
        csv_reader = csv.DictReader(self.io)
        for row in csv_reader:
            for header_name in row:
                if header_name not in saved_results:
                    saved_results[header_name] = [header_name]

                try:
                    value = float(row[header_name])
                except TypeError:
                    value = row[header_name]

                saved_results[header_name].append(value)

        for coord, (t0, t1), params, unknowns, resids in expected:
            icoord = format_iteration_coordinate(coord)

            if params is not None:
                for param in params:
                    self.assertTrue(param[0] in saved_results)
                    self.assertTrue(tuple(saved_results[param[0]]) == param, tuple(saved_results[param[0]]) + param)

            if unknowns is not None:
                for unknown in unknowns:
                    self.assertTrue(unknown[0] in saved_results)
                    self.assertEqual(tuple(saved_results[unknown[0]]), unknown)
                    self.assertTrue(tuple(saved_results[unknown[0]]) == unknown, tuple(saved_results[unknown[0]]) + unknown)
Exemple #5
0
    def assertIterationDataRecorded(self, expected, tolerance):

        saved_results = {}

        self.io.seek(0)
        csv_reader = csv.DictReader(self.io)
        for row in csv_reader:
            for header_name in row:

                if header_name == 'Derivatives':
                    continue

                if header_name not in saved_results:
                    saved_results[header_name] = [header_name]

                try:
                    value = float(row[header_name])
                except TypeError:
                    value = row[header_name]

                saved_results[header_name].append(value)

        for coord, (t0, t1), params, unknowns, resids in expected:
            icoord = format_iteration_coordinate(coord)

            if params is not None:
                for param in params:
                    self.assertTrue(param[0] in saved_results)
                    self.assertTrue(tuple(saved_results[param[0]]) == param, tuple(saved_results[param[0]]) + param)

            if unknowns is not None:
                for unknown in unknowns:
                    self.assertTrue(unknown[0] in saved_results)
                    self.assertEqual(tuple(saved_results[unknown[0]]), unknown)
                    self.assertTrue(tuple(saved_results[unknown[0]]) == unknown, tuple(saved_results[unknown[0]]) + unknown)
    def record(self, params, unknowns, resids, metadata):
        """
        Stores the provided data in the shelve file using the iteration
        coordinate for the key.

        Args
        ----
        params : dict
            Dictionary containing parameters. (p)

        unknowns : dict
            Dictionary containing outputs and states. (u)

        resids : dict
            Dictionary containing residuals. (r)

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        iteration_coordinate = metadata['coord']
        timestamp = metadata['timestamp']
        params, unknowns, resids = self._filter_vectors(params, unknowns, resids, iteration_coordinate)
        group_name = format_iteration_coordinate(iteration_coordinate)
        
        data = OrderedDict([('Parameters', params),
                            ('Unknowns', unknowns),
                            ('Residuals', resids),
                            ('timestamp', timestamp), 
                            ])

        self.out[group_name] = data
Exemple #7
0
    def record_derivatives(self, derivs, metadata):
        """Writes the derivatives that were calculated for the driver.

        Args
        ----
        derivs : dict
            Dictionary containing derivatives

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        data = OrderedDict()
        iteration_coordinate = metadata["coord"]
        timestamp = metadata["timestamp"]

        group_name = format_iteration_coordinate(iteration_coordinate)
        group_name = "%s/derivs" % group_name

        data["timestamp"] = timestamp
        data["success"] = metadata["success"]
        data["msg"] = metadata["msg"]
        data["Derivatives"] = derivs

        self.out[group_name] = data
Exemple #8
0
    def record_derivatives(self, derivs, metadata):
        """Writes the derivatives that were calculated for the driver.

        Args
        ----
        derivs : dict
            Dictionary containing derivatives

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        data = OrderedDict()
        iteration_coordinate = metadata['coord']
        timestamp = metadata['timestamp']

        group_name = format_iteration_coordinate(iteration_coordinate)
        group_name = '%s/derivs' % group_name

        data['timestamp'] = timestamp
        data['success'] = metadata['success']
        data['msg'] = metadata['msg']
        data['Derivatives'] = derivs

        self.out[group_name] = data
    def record(self, params, unknowns, resids, metadata):
        """
        Stores the provided data in the shelve file using the iteration
        coordinate for the key.

        Args
        ----
        params : dict
            Dictionary containing parameters. (p)

        unknowns : dict
            Dictionary containing outputs and states. (u)

        resids : dict
            Dictionary containing residuals. (r)

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        iteration_coordinate = metadata['coord']
        group_name = format_iteration_coordinate(iteration_coordinate)

        self.order.append(group_name)

        f = self.out

        data = OrderedDict([('Parameters', params),
                            ('Unknowns', unknowns),
                            ('Residuals', resids)])

        f[group_name] = data
        f['order'] = self.order
Exemple #10
0
    def record_iteration(self, params, unknowns, resids, metadata):
        """Record the given run data in memory.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        metadata : dict
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        data = {}
        iteration_coordinate = metadata["coord"]
        params, unknowns, resids = self._filter_vectors(params, unknowns, resids, iteration_coordinate)

        data["timestamp"] = metadata["timestamp"]
        data["iter"] = format_iteration_coordinate(iteration_coordinate)

        if self.options["record_params"]:
            data["params"] = {p: v for p, v in iteritems(params)}

        if self.options["record_unknowns"]:
            data["unknowns"] = {u: v for u, v in iteritems(unknowns)}

        if self.options["record_resids"]:
            data["resids"] = {r: v for r, v in iteritems(resids)}

        self.iters.append(data)
Exemple #11
0
    def record_derivatives(self, derivs, metadata):
        """Writes the derivatives that were calculated for the driver.

        Args
        ----
        derivs : dict
            Dictionary containing derivatives

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        iteration_coordinate = metadata['coord']
        timestamp = metadata['timestamp']

        write = self.out.write
        fmat = "Timestamp: {0!r}\n"
        write(fmat.format(timestamp))

        fmat = "Iteration Coordinate: {0:s}/Derivs\n"
        write(fmat.format(format_iteration_coordinate(iteration_coordinate)))

        self._write_success_info(metadata)

        write("Derivatives:\n")
        if isinstance(derivs, dict):
            for okey, sub in sorted(iteritems(derivs)):
                for ikey, deriv in sorted(iteritems(sub)):
                    write("  {0} wrt {1}: {2}\n".format(
                        okey, ikey, str(deriv)))
        else:
            write("  {0} \n".format(str(derivs)))

        # Flush once per iteration to allow external scripts to process the data.
        self.out.flush()
    def record_derivatives(self, derivs, metadata):
        """Writes the derivatives that were calculated for the driver.

        Args
        ----
        derivs : dict
            Dictionary containing derivatives

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        iteration_coordinate = metadata['coord']
        timestamp = metadata['timestamp']

        write = self.out.write
        fmat = "Timestamp: {0!r}\n"
        write(fmat.format(timestamp))

        fmat = "Iteration Coordinate: {0:s}/Derivs\n"
        write(fmat.format(format_iteration_coordinate(iteration_coordinate)))

        write("Derivatives:\n")
        if isinstance(derivs, dict):
            for okey, sub in sorted(iteritems(derivs)):
                for ikey, deriv in sorted(iteritems(sub)):
                    write("  {0} wrt {1}: {2}\n".format(okey, ikey, str(deriv)))
        else:
            write("  {0} \n".format(str(derivs)))

        # Flush once per iteration to allow external scripts to process the data.
        self.out.flush()
Exemple #13
0
    def assertIterationDataRecorded(self, expected, tolerance):
        sout = open(self.filename)

        for coord, (t0, t1), params, unknowns, resids in expected:
            icoord = format_iteration_coordinate(coord)

            line = sout.readline()
            self.assertTrue('Timestamp: ' in line)
            timestamp = float(line[11:-1])
            self.assertTrue(t0 <= timestamp and timestamp <= t1)

            line = sout.readline()
            self.assertEqual("Iteration Coordinate: {0}\n".format(icoord), line)

            groupings = []

            if params is not None:
                groupings.append(("Params:\n", params))

            if unknowns is not None:
                groupings.append(("Unknowns:\n", unknowns))

            if resids is not None:
                groupings.append(("Resids:\n", resids))

            for header, exp in groupings:
                line = sout.readline()
                self.assertEqual(header, line)
                for key, val in exp:
                    line = sout.readline()
                    self.assertEqual("  {0}: {1}\n".format(key, str(val)), line)

        sout.close()
Exemple #14
0
    def record(self, params, unknowns, resids, metadata):
        """
        Stores the provided data in the shelve file using the iteration
        coordinate for the key.

        Args
        ----
        params : dict
            Dictionary containing parameters. (p)

        unknowns : dict
            Dictionary containing outputs and states. (u)

        resids : dict
            Dictionary containing residuals. (r)

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        iteration_coordinate = metadata['coord']
        group_name = format_iteration_coordinate(iteration_coordinate)

        self.order.append(group_name)

        f = self.out

        data = OrderedDict([('Parameters', params), ('Unknowns', unknowns),
                            ('Residuals', resids)])

        f[group_name] = data
        f['order'] = self.order
Exemple #15
0
    def record_iteration(self, params, unknowns, resids, metadata):
        """Dump the given run data in a "pretty" form.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        metadata : dict
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        if not self.out:  # if self.out is None, just do nothing
            return

        iteration_coordinate = metadata['coord']
        timestamp = metadata['timestamp']

        write = self.out.write
        fmat = "Timestamp: {0!r}\n"
        write(fmat.format(timestamp))

        fmat = "Iteration Coordinate: {0:s}\n"
        write(fmat.format(format_iteration_coordinate(iteration_coordinate)))

        self._write_success_info(metadata)

        if self.options['record_params']:
            write("Params:\n")
            for param, val in sorted(
                    iteritems(
                        self._filter_vector(params, 'p',
                                            iteration_coordinate))):
                write("  {0}: {1}\n".format(param, str(val)))

        if self.options['record_unknowns']:
            write("Unknowns:\n")
            for unknown, val in sorted(
                    iteritems(
                        self._filter_vector(unknowns, 'u',
                                            iteration_coordinate))):
                write("  {0}: {1}\n".format(unknown, str(val)))

        if self.options['record_resids']:
            write("Resids:\n")
            for resid, val in sorted(
                    iteritems(
                        self._filter_vector(resids, 'r',
                                            iteration_coordinate))):
                write("  {0}: {1}\n".format(resid, str(val)))

        # Flush once per iteration to allow external scripts to process the data.
        self.out.flush()
Exemple #16
0
    def record_iteration(self, params, unknowns, resids, metadata):
        """
        Stores the provided data in the HDF5 file using the iteration
        coordinate for the Group name.

        Args
        ----
        params : dict
            Dictionary containing parameters. (p)

        unknowns : dict
            Dictionary containing outputs and states. (u)

        resids : dict
            Dictionary containing residuals. (r)

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        iteration_coordinate = metadata['coord']
        group_name = format_iteration_coordinate(iteration_coordinate)

        f = self.out

        group = f.require_group(group_name)
        group.attrs['timestamp'] = metadata['timestamp']
        group.attrs['success'] = metadata['success']
        group.attrs['msg'] = metadata['msg']

        pairings = []

        if self.options['record_params']:

            p_group = group.create_group("Parameters")
            pairings.append((p_group, self._filter_vector(params, 'p',
                                                         iteration_coordinate)))

        if self.options['record_unknowns']:
            u_group = group.create_group("Unknowns")
            pairings.append((u_group, self._filter_vector(unknowns, 'u',
                                                         iteration_coordinate)))

        if self.options['record_resids']:
            r_group = group.create_group("Residuals")
            pairings.append((r_group, self._filter_vector(resids, 'r',
                                                         iteration_coordinate)))

        for grp, data in pairings:
            for key, val in iteritems(data):
                if isinstance(val, (np.ndarray, Number)):
                    grp.create_dataset(key, data=val)
                    # TODO: Compression/Checksum?
                else:
                    # TODO: Handling non-numeric data
                    msg = "HDF5 Recorder does not support data of type '{0}'".format(type(val))
                    raise NotImplementedError(msg)
    def test_format_coord(self):
        name = 'Sub'
        coord = (1, 2, 3)

        meta = create_local_meta(self.meta, name)
        update_local_meta(meta, coord)

        s = format_iteration_coordinate(meta['coord'])

        self.assertEqual(s, '/0/Sub/1-2-3')
Exemple #18
0
def _assertIterationDataRecorded(test, db, expected, tolerance):
    sentinel = object()
    keys = list(iterkeys(db))

    test.assertEquals(len(keys), len(expected))

    for coord, (t0, t1), params, unknowns, resids in expected:
        iter_coord = format_iteration_coordinate(coord)
        actual_group = db[iter_coord]
        groupings = {
            "timestamp":  None,
            "Parameters": params,
            "Unknowns":   unknowns,
            "Residuals":  resids,
            'success': 1,
            'msg': '',
        }

        if params is None:
            test.assertIsNone(actual_group.get('Parameters', None))
            del groupings['Parameters']

        if unknowns is None:
            test.assertIsNone(actual_group.get('Unknowns', None))
            del groupings['Unknowns']

        if resids is None:
            test.assertIsNone(actual_group.get('Residuals', None))
            del groupings['Residuals']

        test.assertEquals(set(actual_group.keys()), set(groupings.keys()))

        timestamp = actual_group['timestamp']
        test.assertTrue( t0 <= timestamp and timestamp <= t1)
        del groupings["timestamp"]

        for label, values in iteritems(groupings):
            actual = actual_group.get(label, None)
            if isinstance(values, int):
                test.assertEqual(actual, values)
            else:
                # If len(actual) == len(expected) and actual <= expected, then
                # actual == expected.
                test.assertEqual(len(actual), len(values))

                for key, val in values:
                    found_val = actual.get(key, sentinel)

                    if found_val is sentinel:
                        test.fail("Did not find key '{0}'".format(key))

                    try:
                        assert_rel_error(test, found_val, val, tolerance)
                    except TypeError:
                        test.assertEqual(val, found_val)
Exemple #19
0
    def record_iteration(self, params, unknowns, resids, metadata):
        """Dump the given run data in a "pretty" form.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        metadata : dict
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        if not self.out:  # if self.out is None, just do nothing
            return

        iteration_coordinate = metadata['coord']
        timestamp = metadata['timestamp']

        write = self.out.write
        fmat = "Timestamp: {0!r}\n"
        write(fmat.format(timestamp))

        fmat = "Iteration Coordinate: {0:s}\n"
        write(fmat.format(format_iteration_coordinate(iteration_coordinate)))

        self._write_success_info(metadata)

        if self.options['record_params']:
            write("Params:\n")
            for param, val in sorted(iteritems(self._filter_vector(params,
                                                   'p',iteration_coordinate))):
                write("  {0}: {1}\n".format(param, str(val)))

        if self.options['record_unknowns']:
            write("Unknowns:\n")
            for unknown, val in sorted(iteritems(self._filter_vector(unknowns,
                                                   'u',iteration_coordinate))):
                write("  {0}: {1}\n".format(unknown, str(val)))

        if self.options['record_resids']:
            write("Resids:\n")
            for resid, val in sorted(iteritems(self._filter_vector(resids,
                                                   'r',iteration_coordinate))):
                write("  {0}: {1}\n".format(resid, str(val)))

        # Flush once per iteration to allow external scripts to process the data.
        self.out.flush()
Exemple #20
0
    def assertDatasetEquals(self, expected, tolerance):
        # Close the file to ensure it is written to disk.
        self.recorder.close()
        # self.recorder.out = None

        sentinel = object()

        db = SqliteDict( self.filename, self.tablename )


        for coord, expect in expected:
            iter_coord = format_iteration_coordinate(coord)

            groupings = (
                ("Parameters", expect[0]),
                ("Unknowns", expect[1]),
                ("Residuals", expect[2])
            )

            #### Need to get the record with the key of 'iter_coord'
            actual_group = db[iter_coord]
            timestamp = actual_group['timestamp']

            self.assertTrue(self.t0 <= timestamp and timestamp <= self.t1 )

            for label, values in groupings:
                actual = actual_group[label]
                # If len(actual) == len(expected) and actual <= expected, then
                # actual == expected.
                self.assertEqual(len(actual), len(values))
                for key, val in values:
                    found_val = actual.get(key, sentinel)
                    if found_val is sentinel:
                        self.fail("Did not find key '{0}'".format(key))
                    
                    if isinstance(found_val, _ByObjWrapper):
                        found_val = found_val.val

                    try:
                        assert_rel_error(self, found_val, val, tolerance)
                    except TypeError as error:
                        self.assertEqual(found_val, val)

            del db[iter_coord]
            ######## delete the record with the key 'iter_coord'

        # Having deleted all found values, the file should now be empty.
        ###### Need a way to get the number of records in the main table
        self.assertEqual(len(db), 0)

        db.close()
Exemple #21
0
    def assertIterationDataRecorded(self, expected, tolerance):
        sentinel = object()
        hdf = h5py.File(self.filename, 'r')

        for coord, (t0, t1), params, unknowns, resids in expected:
            icoord = format_iteration_coordinate(coord)
            actual_group = hdf[icoord]

            groupings = {
                    "Parameters" :  params,
                    "Unknowns" :  unknowns,
                    "Residuals" :  resids,
            }

            self.assertEqual(actual_group.attrs['success'], 1)
            self.assertEqual(actual_group.attrs['msg'], '')

            if params is None:
                self.assertIsNone(actual_group.get('Parameters', None))
                del groupings['Parameters']

            if unknowns is None:
                self.assertIsNone(actual_group.get('Unknowns', None))
                del groupings['Unknowns']

            if resids is None:
                self.assertIsNone(actual_group.get('Residuals', None))
                del groupings['Residuals']

            timestamp = actual_group.attrs['timestamp']
            self.assertTrue(t0 <= timestamp and timestamp <= t1)

            for label, values in iteritems(groupings):
                actual = actual_group[label]

                # If len(actual) == len(expected) and actual <= expected, then
                # actual == expected.
                self.assertEqual(len(actual), len(values))

                for key, val in values:
                    found_val = actual.get(key, sentinel)

                    if found_val is sentinel:
                        self.fail("Did not find key '{0}'.".format(key))

                    assert_rel_error(self, found_val.value, val, tolerance)

        hdf.close()
Exemple #22
0
    def assertDatasetEquals(self, expected, tolerance):
        # Close the file to ensure it is written to disk.
        self.recorder.close()
        # self.recorder.out = None

        sentinel = object()

        db = SqliteDict( self.filename, self.tablename )

        ###### Need a way to get a list of the group_names in the order in which they were written and put it in  a variable named order
        order = db['order']
        del db['order']

        for coord, expect in expected:
            iter_coord = format_iteration_coordinate(coord)

            self.assertEqual(order.pop(0), iter_coord)

            groupings = (
                ("Parameters", expect[0]),
                ("Unknowns", expect[1]),
                ("Residuals", expect[2])
            )

            #### Need to get the record with the key of 'iter_coord'
            actual_group = db[iter_coord]

            for label, values in groupings:
                actual = actual_group[label]
                # If len(actual) == len(expected) and actual <= expected, then
                # actual == expected.
                self.assertEqual(len(actual), len(values))
                for key, val in values:
                    found_val = actual.get(key, sentinel)
                    if found_val is sentinel:
                        self.fail("Did not find key '{0}'".format(key))
                    assert_rel_error(self, found_val, val, tolerance)
            del db[iter_coord]
            ######## delete the record with the key 'iter_coord'

        # Having deleted all found values, the file should now be empty.
        ###### Need a way to get the number of records in the main table
        self.assertEqual(len(db), 0)

        # As should the ordering.
        self.assertEqual(len(order), 0)

        db.close()
Exemple #23
0
    def assertIterationDataRecorded(self, expected, tolerance):
        sentinel = object()
        hdf = h5py.File(self.filename, 'r')

        for coord, (t0, t1), params, unknowns, resids in expected:
            icoord = format_iteration_coordinate(coord)
            actual_group = hdf[icoord]

            groupings = {
                "Parameters": params,
                "Unknowns": unknowns,
                "Residuals": resids,
            }

            self.assertEqual(actual_group.attrs['success'], 1)
            self.assertEqual(actual_group.attrs['msg'], '')

            if params is None:
                self.assertIsNone(actual_group.get('Parameters', None))
                del groupings['Parameters']

            if unknowns is None:
                self.assertIsNone(actual_group.get('Unknowns', None))
                del groupings['Unknowns']

            if resids is None:
                self.assertIsNone(actual_group.get('Residuals', None))
                del groupings['Residuals']

            timestamp = actual_group.attrs['timestamp']
            self.assertTrue(t0 <= timestamp and timestamp <= t1)

            for label, values in iteritems(groupings):
                actual = actual_group[label]

                # If len(actual) == len(expected) and actual <= expected, then
                # actual == expected.
                self.assertEqual(len(actual), len(values))

                for key, val in values:
                    found_val = actual.get(key, sentinel)

                    if found_val is sentinel:
                        self.fail("Did not find key '{0}'.".format(key))

                    assert_rel_error(self, found_val.value, val, tolerance)

        hdf.close()
    def record_iteration(self, params, unknowns, resids, metadata):
        """Record the given run data in memory.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        metadata : dict
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        data = {}
        iteration_coordinate = metadata['coord']

        data['timestamp'] = metadata['timestamp']
        data['iter'] = format_iteration_coordinate(iteration_coordinate)
        data['success'] = metadata['success']
        data['msg'] = metadata['msg']

        if self.options['record_params']:
            data['params'] = {
                p: v
                for p, v in iteritems(
                    self._filter_vector(params, 'p', iteration_coordinate))
            }

        if self.options['record_unknowns']:
            data['unknowns'] = {
                u: v
                for u, v in iteritems(
                    self._filter_vector(unknowns, 'u', iteration_coordinate))
            }

        if self.options['record_resids']:
            data['resids'] = {
                r: v
                for r, v in iteritems(
                    self._filter_vector(resids, 'r', iteration_coordinate))
            }

        self.iters.append(data)
Exemple #25
0
    def assertDatasetEquals(self, expected, tolerance):
        # Close the file to ensure it is written to disk.
        self.recorder.out.close()
        self.recorder.out = None

        sentinel = object()

        f = shelve.open(self.filename)

        order = f['order']
        del f['order']

        for coord, expect in expected:
            iter_coord = format_iteration_coordinate(coord)

            self.assertEqual(order.pop(0), iter_coord)

            groupings = (
                ("Parameters", expect[0]),
                ("Unknowns", expect[1]),
                ("Residuals", expect[2])
            )

            actual_group = f[iter_coord]
            timestamp = actual_group['timestamp']
            self.assertTrue(self.t0 <= timestamp and timestamp <= self.t1)

            for label, values in groupings:
                actual = actual_group[label]
                # If len(actual) == len(expected) and actual <= expected, then
                # actual == expected.
                self.assertEqual(len(actual), len(values))
                for key, val in values:
                    found_val = actual.get(key, sentinel)
                    if found_val is sentinel:
                        self.fail("Did not find key '{0}'".format(key))
                    assert_rel_error(self, found_val, val, tolerance)
            del f[iter_coord]

        # Having deleted all found values, the file should now be empty.
        self.assertEqual(len(f), 0)

        # As should the ordering.
        self.assertEqual(len(order), 0)

        f.close()
Exemple #26
0
    def record(self, params, unknowns, resids, metadata):
        """
        Stores the provided data in the HDF5 file using the iteration
        coordinate for the Group name.

        Args
        ----
        params : dict
            Dictionary containing parameters. (p)

        unknowns : dict
            Dictionary containing outputs and states. (u)

        resids : dict
            Dictionary containing residuals. (r)

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        iteration_coordinate = metadata['coord']
        group_name = format_iteration_coordinate(iteration_coordinate)

        f = self.out

        group = f.require_group(group_name)
        p_group = group.create_group("Parameters")
        u_group = group.create_group("Unknowns")
        r_group = group.create_group("Residuals")

        pairings = ((p_group, params),
                    (u_group, unknowns),
                    (r_group, resids))

        for grp, data in pairings:
            for key, val in data.items():
                if isinstance(val, (np.ndarray, Number)):
                    grp.create_dataset(key, data=val)
                    # TODO: Compression/Checksum?
                else:
                    # TODO: Handling non-numeric data
                    msg = "HDF5 Recorder does not support data of type '{0}'".format(type(val))
                    raise NotImplementedError(msg)
    def record_iteration(self, params, unknowns, resids, metadata):
        """
        Stores the provided data in the sqlite file using the iteration
        coordinate for the key.

        Args
        ----
        params : dict
            Dictionary containing parameters. (p)

        unknowns : dict
            Dictionary containing outputs and states. (u)

        resids : dict
            Dictionary containing residuals. (r)

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        data = OrderedDict()
        iteration_coordinate = metadata['coord']
        timestamp = metadata['timestamp']

        group_name = format_iteration_coordinate(iteration_coordinate)

        data['timestamp'] = timestamp
        data['success'] = metadata['success']
        data['msg'] = metadata['msg']

        if self.options['record_params']:
            data['Parameters'] = self._filter_vector(params, 'p',
                                                     iteration_coordinate)

        if self.options['record_unknowns']:
            data['Unknowns'] = self._filter_vector(unknowns, 'u',
                                                   iteration_coordinate)

        if self.options['record_resids']:
            data['Residuals'] = self._filter_vector(resids, 'r',
                                                    iteration_coordinate)

        self.out[group_name] = data
    def record_iteration(self, params, unknowns, resids, metadata):
        """
        Stores the provided data in the sqlite file using the iteration
        coordinate for the key.

        Args
        ----
        params : dict
            Dictionary containing parameters. (p)

        unknowns : dict
            Dictionary containing outputs and states. (u)

        resids : dict
            Dictionary containing residuals. (r)

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        if MPI and MPI.COMM_WORLD.rank > 0 :
            raise RuntimeError("not rank 0")

        data = OrderedDict()
        iteration_coordinate = metadata['coord']
        timestamp = metadata['timestamp']

        group_name = format_iteration_coordinate(iteration_coordinate)

        data['timestamp'] = timestamp
        data['success'] = metadata['success']
        data['msg'] = metadata['msg']

        if self.options['record_params']:
            data['Parameters'] = self._filter_vector(params, 'p', iteration_coordinate)

        if self.options['record_unknowns']:
            data['Unknowns'] = self._filter_vector(unknowns, 'u', iteration_coordinate)

        if self.options['record_resids']:
            data['Residuals'] = self._filter_vector(resids, 'r', iteration_coordinate)

        self.out_iterations[group_name] = data
Exemple #29
0
    def record_iteration(self, params, unknowns, resids, metadata):
        """Record the given run data in memory.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        metadata : dict
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        data = {}
        iteration_coordinate = metadata['coord']

        data['timestamp'] = metadata['timestamp']
        data['iter'] = format_iteration_coordinate(iteration_coordinate)
        data['success'] = metadata['success']
        data['msg'] = metadata['msg']

        if self.options['record_params']:
            data['params'] = {p:v for p,v in
                                 iteritems(self._filter_vector(params,'p',
                                                        iteration_coordinate))}

        if self.options['record_unknowns']:
            data['unknowns'] = {u:v for u,v in
                                  iteritems(self._filter_vector(unknowns,'u',
                                                        iteration_coordinate))}

        if self.options['record_resids']:
            data['resids'] = {r:v for r,v in
                                  iteritems(self._filter_vector(resids,'r',
                                                         iteration_coordinate))}

        self.iters.append(data)
Exemple #30
0
    def assertDatasetEquals(self, expected, tolerance):
        for coord, expect in expected:
            icoord = format_iteration_coordinate(coord)

            f = self.recorder.out[icoord]
            params = f['Parameters']
            unknowns = f['Unknowns']
            resids = f['Residuals']

            sentinel = object()

            # If len(actual) == len(expected) and actual <= expected, then
            # actual == expected.
            for actual, exp in zip((params, unknowns, resids), expect):
                self.assertEqual(len(actual), len(exp))
                for key, val in exp:
                    found_val = actual.get(key, sentinel)
                    if found_val is sentinel:
                        self.fail("Did not find key '{0}'.".format(key))
                    assert_rel_error(self, found_val[()], val, tolerance)
Exemple #31
0
    def assertDatasetEquals(self, expected, tolerance):
        # Close the file to ensure it is written to disk.
        self.recorder.out.close()
        self.recorder.out = None

        sentinel = object()

        f = shelve.open(self.filename)

        order = f['order']
        del f['order']

        for coord, expect in expected:
            iter_coord = format_iteration_coordinate(coord)

            self.assertEqual(order.pop(0), iter_coord)

            groupings = (("Parameters", expect[0]), ("Unknowns", expect[1]),
                         ("Residuals", expect[2]))

            actual_group = f[iter_coord]

            for label, values in groupings:
                actual = actual_group[label]
                # If len(actual) == len(expected) and actual <= expected, then
                # actual == expected.
                self.assertEqual(len(actual), len(values))
                for key, val in values:
                    found_val = actual.get(key, sentinel)
                    if found_val is sentinel:
                        self.fail("Did not find key '{0}'".format(key))
                    assert_rel_error(self, found_val, val, tolerance)
            del f[iter_coord]

        # Having deleted all found values, the file should now be empty.
        self.assertEqual(len(f), 0)

        # As should the ordering.
        self.assertEqual(len(order), 0)

        f.close()
Exemple #32
0
    def record_derivatives(self, derivs, metadata):
        """Writes the derivatives that were calculated for the driver.

        Args
        ----
        derivs : dict
            Dictionary containing derivatives

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        iteration_coordinate = metadata['coord']
        group_name = format_iteration_coordinate(iteration_coordinate)

        # get the group for the iteration
        iteration_group = self.out[group_name]

        # Create a group under that called 'deriv'
        deriv_group = iteration_group.require_group('Derivs')

        # Then add timestamp, success, msg as attributes
        deriv_group.attrs['timestamp'] = metadata['timestamp']
        deriv_group.attrs['success'] = metadata['success']
        deriv_group.attrs['msg'] = metadata['msg']

        #  And actual deriv data. derivs could either be a dict or an ndarray
        #    depending on the optimizer
        if isinstance(derivs, np.ndarray):
            deriv_group.create_dataset('Derivatives', data=derivs)
        elif isinstance(derivs, OrderedDict):
            deriv_data_group = deriv_group.require_group('Derivatives')
            k = derivs.keys()
            for k, v in derivs.items():
                g = deriv_data_group.require_group(k)
                for k2, v2 in v.items():
                    g.create_dataset(k2, data=v2)
        else:
            raise ValueError(
                "Currently can only record derivatives that are ndarrays or OrderedDicts"
            )
Exemple #33
0
    def record_iteration(self, params, unknowns, resids, metadata):
        """
        Stores the provided data in the sqlite file using the iteration
        coordinate for the key.

        Args
        ----
        params : dict
            Dictionary containing parameters. (p)

        unknowns : dict
            Dictionary containing outputs and states. (u)

        resids : dict
            Dictionary containing residuals. (r)

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        data = OrderedDict()
        iteration_coordinate = metadata["coord"]
        timestamp = metadata["timestamp"]

        group_name = format_iteration_coordinate(iteration_coordinate)

        data["timestamp"] = timestamp
        data["success"] = metadata["success"]
        data["msg"] = metadata["msg"]

        if self.options["record_params"]:
            data["Parameters"] = self._filter_vector(params, "p", iteration_coordinate)

        if self.options["record_unknowns"]:
            data["Unknowns"] = self._filter_vector(unknowns, "u", iteration_coordinate)

        if self.options["record_resids"]:
            data["Residuals"] = self._filter_vector(resids, "r", iteration_coordinate)

        self.out[group_name] = data
Exemple #34
0
    def record_derivatives(self, derivs, metadata):
        """Writes the derivatives that were calculated for the driver.

        Args
        ----
        derivs : dict
            Dictionary containing derivatives

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        iteration_coordinate = metadata['coord']
        group_name = format_iteration_coordinate(iteration_coordinate)

        # get the group for the iteration
        iteration_group = self.out[group_name]

        # Create a group under that called 'deriv'
        deriv_group = iteration_group.require_group('deriv')

        # Then add timestamp, success, msg as attributes
        deriv_group.attrs['timestamp'] = metadata['timestamp']
        deriv_group.attrs['success'] = metadata['success']
        deriv_group.attrs['msg'] = metadata['msg']

        #  And actual deriv data. derivs could either be a dict or an ndarray
        #    depending on the optimizer
        if isinstance(derivs, np.ndarray):
            deriv_group.create_dataset('Derivatives', data=derivs)
        elif isinstance(derivs, OrderedDict):
            deriv_data_group = deriv_group.require_group('Derivatives')
            k = derivs.keys()
            for k,v in derivs.items():
                g = deriv_data_group.require_group(k)
                for k2,v2 in v.items():
                    g.create_dataset(k2,data=v2)
        else:
            raise ValueError("Currently can only record derivatives that are ndarrays or OrderedDicts")
Exemple #35
0
    def record_iteration(self, params, unknowns, resids, metadata):
        """
        Stores the provided data in the sqlite file using the iteration
        coordinate for the key.

        Args
        ----
        params : dict
            Dictionary containing parameters. (p)

        unknowns : dict
            Dictionary containing outputs and states. (u)

        resids : dict
            Dictionary containing residuals. (r)

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        data = OrderedDict()
        iteration_coordinate = metadata['coord']
        timestamp = metadata['timestamp']
        params, unknowns, resids = self._filter_vectors(params, unknowns, resids, iteration_coordinate)

        group_name = format_iteration_coordinate(iteration_coordinate)
        
        data['timestamp'] = timestamp

        if self.options['record_params']:
            data['Parameters'] = params

        if self.options['record_unknowns']:
            data['Unknowns'] = unknowns

        if self.options['record_resids']:
            data['Residuals'] = resids

        self.out[group_name] = data
Exemple #36
0
    def assertDatasetEquals(self, expected, tolerance):
        sout = self.recorder.out
        sout.seek(0)

        for coord, expect in expected:
            icoord = format_iteration_coordinate(coord)

            line = sout.readline()
            self.assertEqual("Iteration Coordinate: {0}\n".format(icoord), line)

            groupings = (
                ("Params:\n", expect[0]),
                ("Unknowns:\n", expect[1]),
                ("Resids:\n", expect[2])
            )

            for header, exp in groupings:
                line = sout.readline()
                self.assertEqual(header, line)
                for key, val in exp:
                    line = sout.readline()
                    self.assertEqual("  {0}: {1}\n".format(key, str(val)), line)
Exemple #37
0
    def record(self, params, unknowns, resids, metadata):
        """Dump the given run data in a "pretty" form.

        Args
        ----
        params : `VecWrapper`
            `VecWrapper` containing parameters. (p)

        unknowns : `VecWrapper`
            `VecWrapper` containing outputs and states. (u)

        resids : `VecWrapper`
            `VecWrapper` containing residuals. (r)

        metadata : dict
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        if not self.out:  # if self.out is None, just do nothing
            return

        write = self.out.write
        fmat = "Iteration Coordinate: {0:s}\n"
        write(fmat.format(format_iteration_coordinate(metadata['coord'])))

        write("Params:\n")
        for param, val in sorted(iteritems(params)):
            write("  {0}: {1}\n".format(param, str(val)))

        write("Unknowns:\n")
        for unknown, val in sorted(iteritems(unknowns)):
            write("  {0}: {1}\n".format(unknown, str(val)))

        write("Resids:\n")
        for resid, val in sorted(iteritems(resids)):
            write("  {0}: {1}\n".format(resid, str(val)))

        # Flush once per iteration to allow external scripts to process the data.
        self.out.flush()
Exemple #38
0
    def record_derivatives(self, derivs, metadata):
        """Writes the derivatives that were calculated for the driver.

        Args
        ----
        derivs : dict
            Dictionary containing derivatives

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        data = OrderedDict()
        iteration_coordinate = metadata['coord']
        timestamp = metadata['timestamp']

        group_name = format_iteration_coordinate(iteration_coordinate)
        group_name = '%s/derivs' % group_name

        data['timestamp'] = timestamp
        data['Derivatives'] = derivs

        self.out[group_name] = data
    def record_derivatives(self, derivs, metadata):
        """Writes the derivatives that were calculated for the driver.

        Args
        ----
        derivs : dict or ndarray depending on the optimizer
            Dictionary containing derivatives

        metadata : dict, optional
            Dictionary containing execution metadata (e.g. iteration coordinate).
        """

        data = OrderedDict()
        iteration_coordinate = metadata['coord']
        timestamp = metadata['timestamp']

        group_name = format_iteration_coordinate(iteration_coordinate)

        data['timestamp'] = timestamp
        data['success'] = metadata['success']
        data['msg'] = metadata['msg']
        data['Derivatives'] = derivs

        self.out_derivs[group_name] = data