示例#1
0
 def __call__(self):
     if PY2:
         with open(self.filename, 'rb') as f:
             return load(f)
     else:
         try:
             with open(self.filename, 'rb') as f:
                 return load(f)
         except UnicodeDecodeError:
             # this happens when loading numpy arrays which have been
             # pickled with Python 2
             with open(self.filename, 'rb') as f:
                 return load(f, encoding='latin-1')
示例#2
0
 def _load(self, i):
     if i in self._cache:
         return self._cache[i]
     else:
         with open(os.path.join(self.dir, str(i)), 'rb') as f:
             v = load(f)
         self._cache[i] = v
         if len(self._cache) > self.cache_size:
             self._cache.popitem(last=False)
         return v
示例#3
0
文件: disk.py 项目: simon-ca/pymor
 def _load(self, i):
     if i in self._cache:
         return self._cache[i]
     else:
         with open(os.path.join(self.dir, str(i)), 'rb') as f:
             v = load(f)
         self._cache[i] = v
         if len(self._cache) > self.cache_size:
             self._cache.popitem(last=False)
         return v
示例#4
0
    def d(self):
        if self._deleted:
            raise ValueError('Dataset has been deleted')
        if hasattr(self, '_d'):
            return self._d

        if not self.finished:
            if self.failed:
                logger.warn('Loading data of failed dataset {}.'.format(
                    self.name))
            else:
                if os.path.exists(os.path.join(self.path, 'DATA')):
                    logger.warn(
                        'Loading data of unfinished dataset {}.'.format(
                            self.name))
                else:
                    raise ValueError(
                        'No data has been written to unfinished dataset {}.'.
                        format(self.name))
        if PY2:
            with open(os.path.join(self.path, 'DATA'), 'rb') as f:
                self._data = load(f)
        else:
            try:
                with open(os.path.join(self.path, 'DATA'), 'rb') as f:
                    self._data = load(f)
            except UnicodeDecodeError:
                # this happens when loading numpy arrays which have been
                # pickled with Python 2
                with open(os.path.join(self.path, 'DATA'), 'rb') as f:
                    self._data = load(f, encoding='latin-1')
        for v in self._data.values():
            if isinstance(v, DataLoader):
                v.filename = os.path.join(self.path, v.filename)
            elif isinstance(v, list):
                for vv in v:
                    if isinstance(vv, DataLoader):
                        vv.filename = os.path.join(self.path, vv.filename)

        self._d = self.DataDict()
        self._d.__dict__ = self._data
        return self._d
示例#5
0
 def get(self, key):
     key = base64.b64encode(key)
     response = self.server.get(self.secret, key)
     assert len(response) == 2 and isinstance(response[0], bool) and isinstance(response[1], str)
     if response[0]:
         file_path = response[1]
         with open(file_path) as f:
             value = load(f)
         return True, value
     else:
         return False, None
示例#6
0
 def get(self, key):
     key = base64.b64encode(key)
     response = self.server.get(self.secret, key)
     assert len(response) == 2 and isinstance(response[0], bool) and isinstance(response[1], str)
     if response[0]:
         file_path = response[1]
         with open(file_path, 'rb') as f:
             value = load(f)
         return True, value
     else:
         return False, None
示例#7
0
文件: cache.py 项目: renemilk/pyMor
 def get(self, key):
     c = self.conn.cursor()
     t = (key,)
     c.execute('SELECT filename FROM entries WHERE key=?', t)
     result = c.fetchall()
     if len(result) == 0:
         return False, None
     elif len(result) == 1:
         file_path = os.path.join(self.path, result[0][0])
         with open(file_path, 'rb') as f:
             value = load(f)
         return True, value
     else:
         raise RuntimeError('Cache is corrupt!')
示例#8
0
文件: cache.py 项目: emamy/pymor
 def get(self, key):
     c = self.conn.cursor()
     t = (key, )
     c.execute('SELECT filename FROM entries WHERE key=?', t)
     result = c.fetchall()
     if len(result) == 0:
         return False, None
     elif len(result) == 1:
         file_path = os.path.join(self.path, result[0][0])
         with open(file_path, 'rb') as f:
             value = load(f)
         return True, value
     else:
         raise RuntimeError('Cache is corrupt!')
示例#9
0
 def __init__(self, argument_dict):
     """If argument_dict contains a value for max_keys this maximum amount of cache values kept in the
     internal cache file, otherwise its set to sys.maxlen.
     If necessary values are deleted from the cache in FIFO order.
     """
     argument_dict['filename'] = argument_dict.get('filename', os.path.join(gettempdir(), 'pymor'))
     super(LimitedFileBackend, self).__init__(argument_dict)
     self.logger.debug('LimitedFileBackend args {}'.format(pformat(argument_dict)))
     self._max_keys = argument_dict.get('max_keys', sys.maxsize)
     self._keylist_fn = self.filename + '.keys'
     self._max_size = argument_dict.get('max_size', None)
     try:
         self._keylist, self._size = load(open(self._keylist_fn, 'rb'))
     except Exception:
         self._keylist = deque()
         self._size = 0
     self._enforce_limits(None)
     self.print_limit()
示例#10
0
def analyze_pickle_histogram(args):
    args['SAMPLES'] = int(args['SAMPLES'])

    print('Loading reduced discretization ...')
    rb_discretization = load(open(args['REDUCED_DATA']))

    mus = rb_discretization.parameter_space.sample_randomly(args['SAMPLES'])
    us = []
    for mu in mus:
        print('Solving reduced for {} ... '.format(mu), end='')
        sys.stdout.flush()
        us.append(rb_discretization.solve(mu))
        print('done')

    print()

    if hasattr(rb_discretization, 'estimate'):
        ests = []
        for u, mu in zip(us, mus):
            print('Estimating error for {} ... '.format(mu), end='')
            sys.stdout.flush()
            ests.append(rb_discretization.estimate(u, mu=mu))
            print('done')

    if args['--detailed']:
        print('Loading high-dimensional data ...')
        discretization, reconstructor = load(open(args['--detailed']))

        errs = []
        for u, mu in zip(us, mus):
            print('Calculating error for {} ... '.format(mu))
            sys.stdout.flush()
            err = discretization.solve(mu) - reconstructor.reconstruct(u)
            if args['--error-norm']:
                errs.append(np.max(getattr(discretization, args['--error-norm'] + '_norm')(err)))
            else:
                errs.append(np.max(err.l2_norm()))
            print('done')

        print()

    try:
        plt.style.use('ggplot')
    except AttributeError:
        pass  # plt.style is only available in newer matplotlib versions

    if hasattr(rb_discretization, 'estimate') and args['--detailed']:

        # setup axes
        left, width = 0.1, 0.65
        bottom, height = 0.1, 0.65
        bottom_h = left_h = left+width+0.02
        rect_scatter = [left, bottom, width, height]
        rect_histx = [left, bottom_h, width, 0.2]
        rect_histy = [left_h, bottom, 0.2, height]
        plt.figure(1, figsize=(8, 8))
        axScatter = plt.axes(rect_scatter)
        axHistx = plt.axes(rect_histx)
        axHisty = plt.axes(rect_histy)

        # scatter plot
        total_min = min(min(ests), min(errs)) * 0.9
        total_max = max(max(ests), max(errs)) * 1.1
        axScatter.set_xscale('log')
        axScatter.set_yscale('log')
        axScatter.set_xlim([total_min, total_max])
        axScatter.set_ylim([total_min, total_max])
        axScatter.set_xlabel('errors')
        axScatter.set_ylabel('estimates')
        axScatter.plot([total_min, total_max], [total_min, total_max], 'r')
        axScatter.scatter(errs, ests)

        # plot histograms
        x_hist, x_bin_edges = np.histogram(errs, bins=np.logspace(np.log10(total_min), np.log10(total_max), 100))
        axHistx.bar(x_bin_edges[1:], x_hist, width=x_bin_edges[:-1] - x_bin_edges[1:], color='blue')
        y_hist, y_bin_edges = np.histogram(ests, bins=np.logspace(np.log10(total_min), np.log10(total_max), 100))
        axHisty.barh(y_bin_edges[1:], y_hist, height=y_bin_edges[:-1] - y_bin_edges[1:], color='blue')
        axHistx.set_xscale('log')
        axHisty.set_yscale('log')
        axHistx.set_xticklabels([])
        axHisty.set_yticklabels([])
        axHistx.set_xlim(axScatter.get_xlim())
        axHisty.set_ylim(axScatter.get_ylim())
        axHistx.set_ylim([0, max(max(x_hist), max(y_hist))])
        axHisty.set_xlim([0, max(max(x_hist), max(y_hist))])

        plt.show()

    elif hasattr(rb_discretization, 'estimate'):

        total_min = min(ests) * 0.9
        total_max = max(ests) * 1.1

        hist, bin_edges = np.histogram(ests, bins=np.logspace(np.log10(total_min), np.log10(total_max), 100))
        plt.bar(bin_edges[1:], hist, width=bin_edges[:-1] - bin_edges[1:], color='blue')
        plt.xlim([total_min, total_max])
        plt.xscale('log')
        plt.xlabel('estimated error')

        plt.show()

    elif args['--detailed']:

        total_min = min(ests) * 0.9
        total_max = max(ests) * 1.1

        hist, bin_edges = np.histogram(errs, bins=np.logspace(np.log10(total_min), np.log10(total_max), 100))
        plt.bar(bin_edges[1:], hist, width=bin_edges[:-1] - bin_edges[1:], color='blue')
        plt.xlim([total_min, total_max])
        plt.xscale('log')
        plt.xlabel('error')

        plt.show()

    else:
        raise ValueError('Nothing to plot!')
示例#11
0
def analyze_pickle_convergence(args):
    args['SAMPLES'] = int(args['SAMPLES'])

    print('Loading reduced discretization ...')
    rb_discretization = load(open(args['REDUCED_DATA']))

    if args['--detailed']:
        print('Loading high-dimensional data ...')
        discretization, reconstructor = load(open(args['--detailed']))

    if not hasattr(rb_discretization, 'estimate') and not args['--detailed']:
        raise ValueError('Nothing to do! (Neither estimates nor true error can be computed.)')

    dim = rb_discretization.solution_space.dim
    if args['--ndim']:
        dims = np.linspace(0, dim, args['--ndim'], dtype=np.int)
    else:
        dims = np.arange(dim + 1)

    mus = rb_discretization.parameter_space.sample_randomly(args['SAMPLES'])

    ESTS = []
    ERRS = []
    T_SOLVES = []
    T_ESTS = []
    for N in dims:
        rd, rc, _ = reduce_to_subbasis(rb_discretization, N)
        print('N = {:3} '.format(N), end='')
        us = []
        print('solve ', end='')
        sys.stdout.flush()
        start = time.time()
        for mu in mus:
            us.append(rd.solve(mu))
        T_SOLVES.append((time.time() - start) * 1000. / len(mus))

        print('estimate ', end='')
        sys.stdout.flush()
        if hasattr(rb_discretization, 'estimate'):
            ests = []
            start = time.time()
            for u, mu in zip(us, mus):
                # print('e', end='')
                # sys.stdout.flush()
                ests.append(rd.estimate(u, mu=mu))
            ESTS.append(max(ests))
            T_ESTS.append((time.time() - start) * 1000. / len(mus))

        if args['--detailed']:
            print('errors', end='')
            sys.stdout.flush()
            errs = []
            for u, mu in zip(us, mus):
                err = discretization.solve(mu) - reconstructor.reconstruct(rc.reconstruct(u))
                if args['--error-norm']:
                    errs.append(np.max(getattr(discretization, args['--error-norm'] + '_norm')(err)))
                else:
                    errs.append(np.max(err.l2_norm()))
            ERRS.append(max(errs))

        print()

    print()

    try:
        plt.style.use('ggplot')
    except AttributeError:
        pass  # plt.style is only available in newer matplotlib versions

    plt.subplot(1, 2, 1)
    if hasattr(rb_discretization, 'estimate'):
        plt.semilogy(dims, ESTS, label='max. estimate')
    if args['--detailed']:
        plt.semilogy(dims, ERRS, label='max. error')
    plt.xlabel('dimension')
    plt.legend()

    plt.subplot(1, 2, 2)
    plt.plot(dims, T_SOLVES, label='avg. solve time')
    if hasattr(rb_discretization, 'estimate'):
        plt.plot(dims, T_ESTS, label='avg. estimate time')
    plt.xlabel('dimension')
    plt.ylabel('milliseconds')
    plt.legend()

    plt.show()
示例#12
0
def analyze_pickle_histogram(args):
    args['SAMPLES'] = int(args['SAMPLES'])

    print('Loading reduced model ...')
    rom, parameter_space = load(open(args['REDUCED_DATA'], 'rb'))

    mus = parameter_space.sample_randomly(args['SAMPLES'])
    us = []
    for mu in mus:
        print(f'Solving reduced for {mu} ... ', end='')
        sys.stdout.flush()
        us.append(rom.solve(mu))
        print('done')

    print()

    if hasattr(rom, 'estimate'):
        ests = []
        for u, mu in zip(us, mus):
            print(f'Estimating error for {mu} ... ', end='')
            sys.stdout.flush()
            ests.append(rom.estimate(u, mu=mu))
            print('done')

    if args['--detailed']:
        print('Loading high-dimensional data ...')
        fom, reductor = load(open(args['--detailed'], 'rb'))

        errs = []
        for u, mu in zip(us, mus):
            print(f'Calculating error for {mu} ... ')
            sys.stdout.flush()
            err = fom.solve(mu) - reductor.reconstruct(u)
            if args['--error-norm']:
                errs.append(
                    np.max(getattr(fom, args['--error-norm'] + '_norm')(err)))
            else:
                errs.append(np.max(err.l2_norm()))
            print('done')

        print()

    try:
        plt.style.use('ggplot')
    except AttributeError:
        pass  # plt.style is only available in newer matplotlib versions

    if hasattr(rom, 'estimate') and args['--detailed']:

        # setup axes
        left, width = 0.1, 0.65
        bottom, height = 0.1, 0.65
        bottom_h = left_h = left + width + 0.02
        rect_scatter = [left, bottom, width, height]
        rect_histx = [left, bottom_h, width, 0.2]
        rect_histy = [left_h, bottom, 0.2, height]
        plt.figure(1, figsize=(8, 8))
        axScatter = plt.axes(rect_scatter)
        axHistx = plt.axes(rect_histx)
        axHisty = plt.axes(rect_histy)

        # scatter plot
        total_min = min(np.min(ests), np.min(errs)) * 0.9
        total_max = max(np.max(ests), np.max(errs)) * 1.1
        axScatter.set_xscale('log')
        axScatter.set_yscale('log')
        axScatter.set_xlim([total_min, total_max])
        axScatter.set_ylim([total_min, total_max])
        axScatter.set_xlabel('errors')
        axScatter.set_ylabel('estimates')
        axScatter.plot([total_min, total_max], [total_min, total_max], 'r')
        axScatter.scatter(errs, ests)

        # plot histograms
        x_hist, x_bin_edges = np.histogram(errs,
                                           bins=_bins(total_min, total_max))
        axHistx.bar(x_bin_edges[1:],
                    x_hist,
                    width=x_bin_edges[:-1] - x_bin_edges[1:],
                    color='blue')
        y_hist, y_bin_edges = np.histogram(ests,
                                           bins=_bins(total_min, total_max))
        axHisty.barh(y_bin_edges[1:],
                     y_hist,
                     height=y_bin_edges[:-1] - y_bin_edges[1:],
                     color='blue')
        axHistx.set_xscale('log')
        axHisty.set_yscale('log')
        axHistx.set_xticklabels([])
        axHisty.set_yticklabels([])
        axHistx.set_xlim(axScatter.get_xlim())
        axHisty.set_ylim(axScatter.get_ylim())
        axHistx.set_ylim([0, max(np.max(x_hist), np.max(y_hist))])
        axHisty.set_xlim([0, max(np.max(x_hist), np.max(y_hist))])

        plt.show()

    elif hasattr(rom, 'estimate'):

        total_min = np.min(ests) * 0.9
        total_max = np.max(ests) * 1.1

        hist, bin_edges = np.histogram(ests, bins=_bins(total_min, total_max))
        plt.bar(bin_edges[1:],
                hist,
                width=bin_edges[:-1] - bin_edges[1:],
                color='blue')
        plt.xlim([total_min, total_max])
        plt.xscale('log')
        plt.xlabel('estimated error')

        plt.show()

    elif args['--detailed']:

        total_min = np.min(ests) * 0.9
        total_max = np.max(ests) * 1.1

        hist, bin_edges = np.histogram(errs, bins=_bins(total_min, total_max))
        plt.bar(bin_edges[1:],
                hist,
                width=bin_edges[:-1] - bin_edges[1:],
                color='blue')
        plt.xlim([total_min, total_max])
        plt.xscale('log')
        plt.xlabel('error')

        plt.show()

    else:
        raise ValueError('Nothing to plot!')
示例#13
0
def analyze_pickle_convergence(args):
    args['SAMPLES'] = int(args['SAMPLES'])

    print('Loading reduced model ...')
    rom, parameter_space = load(open(args['REDUCED_DATA'], 'rb'))

    if not args['--detailed']:
        raise ValueError('High-dimensional data file must be specified.')
    print('Loading high-dimensional data ...')
    fom, reductor = load(open(args['--detailed'], 'rb'))
    fom.enable_caching('disk')

    dim = rom.solution_space.dim
    if args['--ndim']:
        dims = np.linspace(0, dim, args['--ndim'], dtype=np.int)
    else:
        dims = np.arange(dim + 1)

    mus = parameter_space.sample_randomly(args['SAMPLES'])

    ESTS = []
    ERRS = []
    T_SOLVES = []
    T_ESTS = []
    for N in dims:
        rom = reductor.reduce(N)
        print(f'N = {N:3} ', end='')
        us = []
        print('solve ', end='')
        sys.stdout.flush()
        start = time.time()
        for mu in mus:
            us.append(rom.solve(mu))
        T_SOLVES.append((time.time() - start) * 1000. / len(mus))

        print('estimate ', end='')
        sys.stdout.flush()
        if hasattr(rom, 'estimate'):
            ests = []
            start = time.time()
            for u, mu in zip(us, mus):
                # print('e', end='')
                # sys.stdout.flush()
                ests.append(rom.estimate(u, mu=mu))
            ESTS.append(max(ests))
            T_ESTS.append((time.time() - start) * 1000. / len(mus))

        print('errors', end='')
        sys.stdout.flush()
        errs = []
        for u, mu in zip(us, mus):
            err = fom.solve(mu) - reductor.reconstruct(u)
            if args['--error-norm']:
                errs.append(
                    np.max(getattr(fom, args['--error-norm'] + '_norm')(err)))
            else:
                errs.append(np.max(err.l2_norm()))
        ERRS.append(max(errs))

        print()

    print()

    try:
        plt.style.use('ggplot')
    except AttributeError:
        pass  # plt.style is only available in newer matplotlib versions

    plt.subplot(1, 2, 1)
    if hasattr(rom, 'estimate'):
        plt.semilogy(dims, ESTS, label='max. estimate')
    plt.semilogy(dims, ERRS, label='max. error')
    plt.xlabel('dimension')
    plt.legend()

    plt.subplot(1, 2, 2)
    plt.plot(dims, T_SOLVES, label='avg. solve time')
    if hasattr(rom, 'estimate'):
        plt.plot(dims, T_ESTS, label='avg. estimate time')
    plt.xlabel('dimension')
    plt.ylabel('milliseconds')
    plt.legend()

    plt.show()
示例#14
0
def analyze_pickle_convergence(args):
    args['SAMPLES'] = int(args['SAMPLES'])

    print('Loading reduced discretization ...')
    rb_discretization = load(open(args['REDUCED_DATA']))

    if args['--detailed']:
        print('Loading high-dimensional data ...')
        discretization, reconstructor = load(open(args['--detailed']))

    if not hasattr(rb_discretization, 'estimate') and not args['--detailed']:
        raise ValueError('Nothing to do! (Neither estimates nor true error can be computed.)')

    dim = rb_discretization.solution_space.dim
    if args['--ndim']:
        dims = np.linspace(0, dim, args['--ndim'], dtype=np.int)
    else:
        dims = np.arange(dim + 1)

    mus = list(rb_discretization.parameter_space.sample_randomly(args['SAMPLES']))

    ESTS = []
    ERRS = []
    T_SOLVES = []
    T_ESTS = []
    for N in dims:
        rd, rc, _ = reduce_to_subbasis(rb_discretization, N)
        print('N = {:3} '.format(N), end='')
        us = []
        print('solve ', end='')
        sys.stdout.flush()
        start = time.time()
        for mu in mus:
            us.append(rd.solve(mu))
        T_SOLVES.append((time.time() - start) * 1000. / len(mus))

        print('estimate ', end='')
        sys.stdout.flush()
        if hasattr(rb_discretization, 'estimate'):
            ests = []
            start = time.time()
            for u, mu in zip(us, mus):
                # print('e', end='')
                # sys.stdout.flush()
                ests.append(rd.estimate(u, mu=mu))
            ESTS.append(max(ests))
            T_ESTS.append((time.time() - start) * 1000. / len(mus))

        if args['--detailed']:
            print('errors', end='')
            sys.stdout.flush()
            errs = []
            for u, mu in zip(us, mus):
                err = discretization.solve(mu) - reconstructor.reconstruct(rc.reconstruct(u))
                if args['--error-norm']:
                    errs.append(np.max(getattr(discretization, args['--error-norm'] + '_norm')(err)))
                else:
                    errs.append(np.max(err.l2_norm()))
            ERRS.append(max(errs))

        print()

    print()

    try:
        plt.style.use('ggplot')
    except AttributeError:
        pass  # plt.style is only available in newer matplotlib versions

    plt.subplot(1, 2, 1)
    if hasattr(rb_discretization, 'estimate'):
        plt.semilogy(dims, ESTS, label='max. estimate')
    if args['--detailed']:
        plt.semilogy(dims, ERRS, label='max. error')
    plt.xlabel('dimension')
    plt.legend()

    plt.subplot(1, 2, 2)
    plt.plot(dims, T_SOLVES, label='avg. solve time')
    if hasattr(rb_discretization, 'estimate'):
        plt.plot(dims, T_ESTS, label='avg. estimate time')
    plt.xlabel('dimension')
    plt.ylabel('milliseconds')
    plt.legend()

    plt.show()
示例#15
0
def analyze_pickle_convergence(args):
    args['SAMPLES'] = int(args['SAMPLES'])

    print('Loading reduced model ...')
    rom = load(open(args['REDUCED_DATA'], 'rb'))

    if not args['--detailed']:
        raise ValueError('High-dimensional data file must be specified.')
    print('Loading high-dimensional data ...')
    fom, reductor = load(open(args['--detailed'], 'rb'))
    fom.enable_caching('disk')

    dim = rom.solution_space.dim
    if args['--ndim']:
        dims = np.linspace(0, dim, args['--ndim'], dtype=np.int)
    else:
        dims = np.arange(dim + 1)

    mus = rom.parameter_space.sample_randomly(args['SAMPLES'])

    ESTS = []
    ERRS = []
    T_SOLVES = []
    T_ESTS = []
    for N in dims:
        rom = reductor.reduce(N)
        print(f'N = {N:3} ', end='')
        us = []
        print('solve ', end='')
        sys.stdout.flush()
        start = time.time()
        for mu in mus:
            us.append(rom.solve(mu))
        T_SOLVES.append((time.time() - start) * 1000. / len(mus))

        print('estimate ', end='')
        sys.stdout.flush()
        if hasattr(rom, 'estimate'):
            ests = []
            start = time.time()
            for u, mu in zip(us, mus):
                # print('e', end='')
                # sys.stdout.flush()
                ests.append(rom.estimate(u, mu=mu))
            ESTS.append(max(ests))
            T_ESTS.append((time.time() - start) * 1000. / len(mus))

        print('errors', end='')
        sys.stdout.flush()
        errs = []
        for u, mu in zip(us, mus):
            err = fom.solve(mu) - reductor.reconstruct(u)
            if args['--error-norm']:
                errs.append(np.max(getattr(fom, args['--error-norm'] + '_norm')(err)))
            else:
                errs.append(np.max(err.l2_norm()))
        ERRS.append(max(errs))

        print()

    print()

    try:
        plt.style.use('ggplot')
    except AttributeError:
        pass  # plt.style is only available in newer matplotlib versions

    plt.subplot(1, 2, 1)
    if hasattr(rom, 'estimate'):
        plt.semilogy(dims, ESTS, label='max. estimate')
    plt.semilogy(dims, ERRS, label='max. error')
    plt.xlabel('dimension')
    plt.legend()

    plt.subplot(1, 2, 2)
    plt.plot(dims, T_SOLVES, label='avg. solve time')
    if hasattr(rom, 'estimate'):
        plt.plot(dims, T_ESTS, label='avg. estimate time')
    plt.xlabel('dimension')
    plt.ylabel('milliseconds')
    plt.legend()

    plt.show()
示例#16
0
def convergence(
    reduced_data: str = REDUCED_DATA,
    detailed_data: str = Argument(..., help='File containing the high-dimensional model and the reductor.'),
    samples: int = SAMPLES,

    error_norm: str = ERROR_NORM,
    ndim: int = Option(None, help='Number of reduced basis dimensions for which to estimate the error.')
):
    print('Loading reduced model ...')
    rom, parameter_space = load(open(reduced_data, 'rb'))

    print('Loading high-dimensional data ...')
    fom, reductor = load(open(detailed_data, 'rb'))
    fom.enable_caching('disk')

    dim = rom.solution_space.dim
    if ndim:
        dims = np.linspace(0, dim, ndim, dtype=np.int)
    else:
        dims = np.arange(dim + 1)

    mus = parameter_space.sample_randomly(samples)

    ESTS = []
    ERRS = []
    T_SOLVES = []
    T_ESTS = []
    for N in dims:
        rom = reductor.reduce(N)
        print(f'N = {N:3} ', end='')
        us = []
        print('solve ', end='')
        sys.stdout.flush()
        start = time.perf_counter()
        for mu in mus:
            us.append(rom.solve(mu))
        T_SOLVES.append((time.perf_counter() - start) * 1000. / len(mus))

        print('estimate ', end='')
        sys.stdout.flush()
        if hasattr(rom, 'estimate'):
            ests = []
            start = time.perf_counter()
            for mu in mus:
                # print('e', end='')
                # sys.stdout.flush()
                ests.append(rom.estimate_error(mu))
            ESTS.append(max(ests))
            T_ESTS.append((time.perf_counter() - start) * 1000. / len(mus))

        print('errors', end='')
        sys.stdout.flush()
        errs = []
        for u, mu in zip(us, mus):
            err = fom.solve(mu) - reductor.reconstruct(u)
            if error_norm:
                errs.append(np.max(getattr(fom, error_norm + '_norm')(err)))
            else:
                errs.append(np.max(err.norm()))
        ERRS.append(max(errs))

        print()

    print()

    try:
        plt.style.use('ggplot')
    except AttributeError:
        pass  # plt.style is only available in newer matplotlib versions

    plt.subplot(1, 2, 1)
    if hasattr(rom, 'estimate'):
        plt.semilogy(dims, ESTS, label='max. estimate')
    plt.semilogy(dims, ERRS, label='max. error')
    plt.xlabel('dimension')
    plt.legend()

    plt.subplot(1, 2, 2)
    plt.plot(dims, T_SOLVES, label='avg. solve time')
    if hasattr(rom, 'estimate'):
        plt.plot(dims, T_ESTS, label='avg. estimate time')
    plt.xlabel('dimension')
    plt.ylabel('milliseconds')
    plt.legend()

    plt.show()