def test_db_to_dict(self): tmpdir = tempfile.mkdtemp() dfile = os.path.join(tmpdir, 'junk.db') recorder = DBCaseRecorder(dfile) # create some Cases where some are missing a variable outputs = ['comp1.z', 'comp2.z'] inputs = ['comp1.x', 'comp1.y', 'comp1.y2'] recorder.register(self, inputs, outputs) for i in range(10): inputs = [i, i*2, i*3] outputs = [i*i, float('NaN')] recorder.record(self, inputs, outputs, None, '', '') varnames = ['comp1.x', 'comp1.y', 'comp1.y2'] varinfo = case_db_to_dict(dfile, varnames) self.assertEqual(len(varinfo), 3) # each var list should have 10 data values in it for lst in varinfo.values(): self.assertEqual(len(lst), 10) # now use caseiter_to_dict to grab the same data varinfo = caseiter_to_dict(recorder.get_iterator(), varnames) # each var list should have 10 data values in it for lst in varinfo.values(): self.assertEqual(len(lst), 10) try: shutil.rmtree(tmpdir, onerror=onerror) except OSError: logging.error("problem removing directory %s", tmpdir)
def test_db_to_dict(self): tmpdir = tempfile.mkdtemp() dfile = os.path.join(tmpdir, 'junk.db') recorder = DBCaseRecorder(dfile) # create some Cases where some are missing a variable outputs = ['comp1.z', 'comp2.z'] inputs = ['comp1.x', 'comp1.y', 'comp1.y2'] recorder.register(self, inputs, outputs) for i in range(10): inputs = [i, i * 2, i * 3] outputs = [i * i, float('NaN')] recorder.record(self, inputs, outputs, None, '', '') varnames = ['comp1.x', 'comp1.y', 'comp1.y2'] varinfo = case_db_to_dict(dfile, varnames) self.assertEqual(len(varinfo), 3) # each var list should have 10 data values in it for lst in varinfo.values(): self.assertEqual(len(lst), 10) # now use caseiter_to_dict to grab the same data varinfo = caseiter_to_dict(recorder.get_iterator(), varnames) # each var list should have 10 data values in it for lst in varinfo.values(): self.assertEqual(len(lst), 10) try: shutil.rmtree(tmpdir, onerror=onerror) except OSError: logging.error("problem removing directory %s", tmpdir)
def test_db_to_dict(self): tmpdir = tempfile.mkdtemp() dfile = os.path.join(tmpdir, 'junk.db') recorder = DBCaseRecorder(dfile) # create some Cases where some are missing a variable outputs = ['comp1.z', 'comp2.z'] cases = [] for i in range(10): if i > 1: msg = '' else: msg = 'an error occurred' if i < 5: inputs = [('comp1.x', i), ('comp1.y', i * 2), ('comp1.y2', i * 3)] else: inputs = [('comp1.x', i), ('comp1.y', i * 2)] recorder.record(Case(inputs=inputs, outputs=outputs, msg=msg)) varnames = ['comp1.x', 'comp1.y', 'comp1.y2'] varinfo = case_db_to_dict(dfile, varnames) self.assertEqual(len(varinfo), 3) # each var list should have 3 data values in it (5 with the required variables minus # 2 with errors for name, lst in varinfo.items(): self.assertEqual(len(lst), 3) # now use caseiter_to_dict to grab the same data varinfo = caseiter_to_dict(recorder.get_iterator(), varnames) # each var list should have 3 data values in it (5 with the required variables minus # 2 with errors for name, lst in varinfo.items(): self.assertEqual(len(lst), 3) try: shutil.rmtree(tmpdir) except OSError: logging.error("problem removing directory %s" % tmpdir)
def test_db_to_dict(self): tmpdir = tempfile.mkdtemp() dfile = os.path.join(tmpdir, 'junk.db') recorder = DBCaseRecorder(dfile) # create some Cases where some are missing a variable outputs = ['comp1.z', 'comp2.z'] cases = [] for i in range(10): if i>1: msg = '' else: msg = 'an error occurred' if i<5: inputs = [('comp1.x', i), ('comp1.y', i*2), ('comp1.y2', i*3)] else: inputs = [('comp1.x', i), ('comp1.y', i*2)] recorder.record(Case(inputs=inputs, outputs=outputs, msg=msg)) varnames = ['comp1.x','comp1.y','comp1.y2'] varinfo = case_db_to_dict(dfile, varnames) self.assertEqual(len(varinfo), 3) # each var list should have 3 data values in it (5 with the required variables minus # 2 with errors for name,lst in varinfo.items(): self.assertEqual(len(lst), 3) # now use caseiter_to_dict to grab the same data varinfo = caseiter_to_dict(recorder.get_iterator(), varnames) # each var list should have 3 data values in it (5 with the required variables minus # 2 with errors for name,lst in varinfo.items(): self.assertEqual(len(lst), 3) try: shutil.rmtree(tmpdir) except OSError: logging.error("problem removing directory %s" % tmpdir)
X_range = arange(-5,10.2,.25) Y_range = arange(0,15.2,.25) X,Y = meshgrid(X_range,Y_range) Z = branin(X,Y) iterator = analysis.branin_meta_model.recorder.get_iterator() plt.contour(X,Y,Z,arange(1,200,2),zorder=1) cb = plt.colorbar(shrink=.45) #plot the initial training data data_train = case_db_to_dict(os.path.join(analysis._tdir,'trainer.db'), ['branin_meta_model.y', 'branin_meta_model.x', 'branin_meta_model.f_xy']) plt.scatter(data_train['branin_meta_model.x'], data_train['branin_meta_model.y'],s=30,c='#572E07',zorder=10) data_EI = case_db_to_dict(os.path.join(analysis._tdir,'retrain.db'), ['branin_meta_model.y', 'branin_meta_model.x', 'branin_meta_model.f_xy']) count = len(data_EI['branin_meta_model.x']) colors = arange(0,count)/float(count) color_map = get_cmap('spring')
X_range = arange(-5, 10.2, .25) Y_range = arange(0, 15.2, .25) X, Y = meshgrid(X_range, Y_range) Z = branin(X, Y) iterator = analysis.branin_meta_model.recorder.get_iterator() plt.contour(X, Y, Z, arange(1, 200, 2), zorder=1) cb = plt.colorbar(shrink=.45) #plot the initial training data data_train = case_db_to_dict(os.path.join(analysis._tdir, 'trainer.db'), [ 'branin_meta_model.y', 'branin_meta_model.x', 'branin_meta_model.f_xy' ]) plt.scatter(data_train['branin_meta_model.x'], data_train['branin_meta_model.y'], s=30, c='#572E07', zorder=10) data_EI = case_db_to_dict(os.path.join(analysis._tdir, 'retrain.db'), [ 'branin_meta_model.y', 'branin_meta_model.x', 'branin_meta_model.f_xy' ]) count = len(data_EI['branin_meta_model.x']) colors = arange(0, count) / float(count)
return (y - (5.1 / (4. * pi ** 2.)) * x ** 2. + 5. * x / pi - 6.) ** 2. + \ 10. * (1. - 1. / (8. * pi)) * cos(x) + 10. X_range = arange(-5, 10.2, .25) Y_range = arange(0, 15.2, .25) X, Y = meshgrid(X_range, Y_range) Z = branin(X, Y) plt.contour(X, Y, Z, arange(1, 200, 2), zorder=1) cb = plt.colorbar(shrink=.45) # plot the initial training data data_train = case_db_to_dict( os.path.join(analysis.architecture._tdir, 'trainer.db'), [ 'branin.y', 'branin.x', ]) plt.scatter(data_train['branin.x'], data_train['branin.y'], s=30, c='#572E07', zorder=10) data_EI = {} data_EI['branin.x'] = data_train['branin.x'][analysis.architecture. initial_DOE_size:] data_EI['branin.y'] = data_train['branin.y'][analysis.architecture. initial_DOE_size:]
def branin(x,y): return (y-(5.1/(4.*pi**2.))*x**2.+5.*x/pi-6.)**2.+\ 10.*(1.-1./(8.*pi))*cos(x)+10. X_range = arange(-5,10.2,.25) Y_range = arange(0,15.2,.25) X,Y = meshgrid(X_range,Y_range) Z = branin(X,Y) plt.contour(X,Y,Z,arange(1,200,2),zorder=1) cb = plt.colorbar(shrink=.45) #plot the initial training data data_train = case_db_to_dict(os.path.join(analysis.architecture._tdir,'trainer.db'), ['branin.y', 'branin.x',]) plt.scatter(data_train['branin.x'], data_train['branin.y'],s=30,c='#572E07',zorder=10) data_EI = {} data_EI['branin.x'] = data_train['branin.x'][analysis.architecture.initial_DOE_size:] data_EI['branin.y'] = data_train['branin.y'][analysis.architecture.initial_DOE_size:] count = len(data_EI['branin.x']) print "%d adaptively sampled points"%count colors = arange(0,count)/float(count) color_map = get_cmap('spring')
row1 = [] row2 = [] for x, y in zip(x_row, y_row): analysis.spiral_meta_model.x = x analysis.spiral_meta_model.y = y analysis.spiral_meta_model.execute() row1.append(analysis.spiral_meta_model.f1_xy.mu) row2.append(analysis.spiral_meta_model.f2_xy.mu) Z1_pred.append(row1) Z2_pred.append(row2) Z1_pred = array(Z1_pred) Z2_pred = array(Z2_pred) #plot the initial training data data_train = case_db_to_dict(os.path.join(analysis._tdir, 'trainer.db'), [ 'spiral_meta_model.x', 'spiral_meta_model.y', 'spiral_meta_model.f1_xy', 'spiral_meta_model.f2_xy' ]) plt.scatter(data_train['spiral_meta_model.x'], data_train['spiral_meta_model.y'], s=30, c='#572E07', zorder=10) data_EI = case_db_to_dict(os.path.join(analysis._tdir, 'retrain.db'), [ 'spiral_meta_model.y', 'spiral_meta_model.x', 'spiral_meta_model.f1_xy', 'spiral_meta_model.f2_xy' ]) count = len(data_EI['spiral_meta_model.x']) colors = arange(0, count) / float(count)
row2 = [] for x,y in zip(x_row,y_row): analysis.spiral_meta_model.x = x analysis.spiral_meta_model.y = y analysis.spiral_meta_model.execute() row1.append(analysis.spiral_meta_model.f1_xy.mu) row2.append(analysis.spiral_meta_model.f2_xy.mu) Z1_pred.append(row1) Z2_pred.append(row2) Z1_pred = array(Z1_pred) Z2_pred = array(Z2_pred) #plot the initial training data data_train = case_db_to_dict(os.path.join(analysis._tdir,'trainer.db'), ['spiral_meta_model.x', 'spiral_meta_model.y', 'spiral_meta_model.f1_xy', 'spiral_meta_model.f2_xy']) plt.scatter(data_train['spiral_meta_model.x'], data_train['spiral_meta_model.y'],s=30,c='#572E07',zorder=10) data_EI = case_db_to_dict(os.path.join(analysis._tdir,'retrain.db'), ['spiral_meta_model.y', 'spiral_meta_model.x', 'spiral_meta_model.f1_xy', 'spiral_meta_model.f2_xy']) count = len(data_EI['spiral_meta_model.x']) colors = arange(0,count)/float(count) color_map = get_cmap('spring')