def test_lwr_linear(self): """Test LWLR on random linear models of dimensions from 1 to 20. It should return exact results, give of take floating point imprecisions.""" for _ in range(20): n = random.randint(1, 20) m = random.randint(1, 5) f = random_linear(n, m) cfg = {'m_channels' : [learners.Channel('x_{}'.format(i), (0.0, 1.0)) for i in range(n)], 's_channels' : [learners.Channel('y_{}'.format(i), (0.0, 1.0)) for i in range(m)], 'm_uniformize': True, 'options.maxiter': 500} learner = learners.OptimizeLearner(cfg) for _ in range(4*n): x = np.random.rand(n) y = f(x) learner.update(tools.to_signal(x, cfg['m_channels']), tools.to_signal(y, cfg['s_channels'])) for _ in range(10): x = np.random.rand(n).ravel() y = f(x) xp = learner.infer(tools.to_signal(y, cfg['s_channels'])) xp = np.array(tools.to_vector(xp, cfg['m_channels'])) self.assertTrue(np.allclose(f(xp), y, rtol = 1e-1, atol = 1e-1))
def test_lwr1D_linear(self): """Simplest test possible (well, not quite, but close).""" f = lambda x: 2.0 * x cfg = { 'm_channels': [learners.Channel('x', (0.0, 1.0))], 's_channels': [learners.Channel('y', (0.0, 1.0))], 'm_uniformize': True, 'sigma': 0.1 } for learner in [ learners.LWLRLearner(cfg), learners.ESLWLRLearner(cfg) ]: for _ in range(10): x = np.random.rand(1) y = f(x) learner.update(tools.to_signal(x, cfg['m_channels']), tools.to_signal(y, cfg['s_channels'])) for _ in range(10): x = np.random.rand(1).ravel() y = f(x) yp = learner.predict(tools.to_signal(x, cfg['m_channels'])) yp = tools.to_vector(yp, cfg['s_channels']) self.assertTrue(np.allclose(y, yp, rtol=1e-5, atol=1e-5))
def test_lwr_linear(self): """Test LWLR on random linear models of dimensions from 1 to 20. It should return exact results, give of take floating point imprecisions.""" for cpp in [True, False]: if cpp: learners.enable_fastlearners(silent_fail=False) else: learners.disable_fastlearners() for _ in range(20): n = random.randint(1, 20) m = random.randint(1, 5) f = random_linear(n, m) cfg = {'m_channels' : [learners.Channel('x_{}'.format(i), (0.0, 1.0)) for i in range(n)], 's_channels' : [learners.Channel('y_{}'.format(i), (0.0, 1.0)) for i in range(m)], 'm_uniformize': True, 'sigma' : 1.0} for learner in [learners.LWLRLearner(cfg), learners.ESLWLRLearner(cfg)]: for _ in range(2*n): x = np.random.rand(n) y = f(x) learner.update(tools.to_signal(x, cfg['m_channels']), tools.to_signal(y, cfg['s_channels'])) for _ in range(10): x = np.random.rand(n).ravel() y = f(x) yp = learner.predict(tools.to_signal(x, cfg['m_channels'])) yp = tools.to_vector(yp, cfg['s_channels']) self.assertTrue(np.allclose(y, yp, rtol = 1e-5, atol = 1e-5))
def cov_test(cfg): ticks = set(cfg.test.ticks) history = chrono.ChronoHistory(cfg.hardware.datafile, core_keys=['errors'], meta={'jobcfg': cfg}, extralog = False) sensory_data = chrono.ChronoHistory(cfg.hardware.exploration.sensoryfile, extralog = False, verbose=True) data_cfg = sensory_data.core.meta['jobcfg'].exploration points = [] for tick, entry in enumerate(sensory_data): if tick in ticks: if len(history) <= tick or history.core.entries[tick] is None: history.add_entry(tick, {'errors': [shapely.ops.unary_union(points).area]}, delete_posterior=False) s_vector = tools.to_vector(entry['data']['s_signal'], data_cfg.explorer.s_channels) if cfg.test.depolarize: cs = data_cfg.explorer.s_channels if len(cs) == 2 and cs[0].name == 'r' and cs[1].name == 'theta': r, theta = s_vector s_vector = (r*math.cos(theta), r*math.sin(theta)) points.append(shapely.geometry.Point(s_vector).buffer(cfg.test.buffer_size)) if len(sensory_data) in ticks: history.add_entry(len(sensory_data), {'errors': [shapely.ops.unary_union(points).area]}, delete_posterior=False) history.save(done=True, verbose=True)
def nn_test(cfg): ## Loading exploration data ## data_history = chrono.ChronoHistory(cfg.hardware.exploration.datafile, extralog=False, verbose=True) data_cfg = data_history.core.meta['jobcfg'].exploration testset_chr = chrono.ChronoHistory(cfg.hardware.testset.datafile, extralog=False, verbose=True) testset = { 's_channels': testset_chr.core.meta['s_channels'], 's_goals': testset_chr.core.meta['testset'] } history = chrono.ChronoHistory(cfg.hardware.datafile, core_keys=['errors'], meta={ 'jobcfg': cfg, 'testset': testset }, extralog=False) ticks = set(cfg.test.ticks) nnset = learners.NNSet() for tick, entry in enumerate(data_history): if tick in ticks and len(nnset) > 0: if len(history) <= tick or history.core.entries[tick] is None: measure_perf(tick, testset, nnset, data_cfg, history) exploration = entry['data']['exploration'] feedback = entry['data']['feedback'] m_vector = tools.to_vector(exploration['m_signal'], data_cfg.explorer.m_channels) s_vector = tools.to_vector(feedback['s_signal'], data_cfg.explorer.s_channels) nnset.add(m_vector, s_vector) if len(data_history) in ticks: measure_perf(len(data_history), testset, nnset, data_cfg, history) history.save(done=True, verbose=True)
def measure_perf(tick, testset, nnset, data_cfg, history, light=False): errors = [] for s_goal in testset['s_goals']: s_vector_goal = tools.to_vector(s_goal, testset['s_channels']) dist, idx = nnset.nn_y(s_vector_goal, k=1) s_vector = nnset.ys[idx[0]] errors.append(tools.dist(s_vector_goal, s_vector)) print('log: test(t={}) done'.format(tick)) history.add_entry(tick, {'errors': errors}, delete_posterior=False)
def nn_test(cfg): ## Loading exploration data ## data_history = chrono.ChronoHistory(cfg.hardware.exploration.datafile, extralog=False, verbose=True) data_cfg = data_history.core.meta['jobcfg'].exploration testset_chr = chrono.ChronoHistory(cfg.hardware.testset.datafile, extralog=False, verbose=True) testset = {'s_channels': testset_chr.core.meta['s_channels'], 's_goals' : testset_chr.core.meta['testset']} history = chrono.ChronoHistory(cfg.hardware.datafile, core_keys=['errors'], meta={'jobcfg': cfg, 'testset': testset}, extralog = False) ticks = set(cfg.test.ticks) nnset = learners.NNSet() for tick, entry in enumerate(data_history): if tick in ticks and len(nnset) > 0: if len(history) <= tick or history.core.entries[tick] is None: measure_perf(tick, testset, nnset, data_cfg, history) exploration = entry['data']['exploration'] feedback = entry['data']['feedback'] m_vector = tools.to_vector(exploration['m_signal'], data_cfg.explorer.m_channels) s_vector = tools.to_vector( feedback['s_signal'], data_cfg.explorer.s_channels) nnset.add(m_vector, s_vector) if len(data_history) in ticks: measure_perf(len(data_history), testset, nnset, data_cfg, history) history.save(done=True, verbose=True)
def test_lwr_linear(self): """Test LWLR on random linear models of dimensions from 1 to 20. It should return exact results, give of take floating point imprecisions.""" for cpp in [True, False]: if cpp: learners.enable_fastlearners(silent_fail=False) else: learners.disable_fastlearners() for _ in range(20): n = random.randint(1, 20) m = random.randint(1, 5) f = random_linear(n, m) cfg = { 'm_channels': [ learners.Channel('x_{}'.format(i), (0.0, 1.0)) for i in range(n) ], 's_channels': [ learners.Channel('y_{}'.format(i), (0.0, 1.0)) for i in range(m) ], 'm_uniformize': True, 'sigma': 1.0 } for learner in [ learners.LWLRLearner(cfg), learners.ESLWLRLearner(cfg) ]: for _ in range(2 * n): x = np.random.rand(n) y = f(x) learner.update(tools.to_signal(x, cfg['m_channels']), tools.to_signal(y, cfg['s_channels'])) for _ in range(10): x = np.random.rand(n).ravel() y = f(x) yp = learner.predict( tools.to_signal(x, cfg['m_channels'])) yp = tools.to_vector(yp, cfg['s_channels']) self.assertTrue( np.allclose(y, yp, rtol=1e-5, atol=1e-5))
def test_lwr1D_linear(self): """Simplest test possible (well, not quite, but close).""" f = lambda x : 2.0*x cfg = {'m_channels' : [learners.Channel('x', (0.0, 1.0))], 's_channels' : [learners.Channel('y', (0.0, 1.0))], 'm_uniformize': True} learner = learners.OptimizeLearner(cfg) for _ in range(10): x = np.random.rand(1) y = f(x) learner.update(tools.to_signal(x, cfg['m_channels']), tools.to_signal(y, cfg['s_channels'])) for _ in range(10): y = np.random.rand(1).ravel() xp = learner.infer(tools.to_signal(y, cfg['s_channels'])) xp = np.array(tools.to_vector(xp, cfg['m_channels'])) self.assertTrue(np.allclose(f(xp), y, rtol = 1e-5, atol = 1e-5))
def cov_test(cfg): ticks = set(cfg.test.ticks) history = chrono.ChronoHistory(cfg.hardware.datafile, core_keys=['errors'], meta={'jobcfg': cfg}, extralog=False) sensory_data = chrono.ChronoHistory(cfg.hardware.exploration.sensoryfile, extralog=False, verbose=True) data_cfg = sensory_data.core.meta['jobcfg'].exploration points = [] for tick, entry in enumerate(sensory_data): if tick in ticks: if len(history) <= tick or history.core.entries[tick] is None: history.add_entry( tick, {'errors': [shapely.ops.unary_union(points).area]}, delete_posterior=False) s_vector = tools.to_vector(entry['data']['s_signal'], data_cfg.explorer.s_channels) if cfg.test.depolarize: cs = data_cfg.explorer.s_channels if len(cs) == 2 and cs[0].name == 'r' and cs[1].name == 'theta': r, theta = s_vector s_vector = (r * math.cos(theta), r * math.sin(theta)) points.append( shapely.geometry.Point(s_vector).buffer(cfg.test.buffer_size)) if len(sensory_data) in ticks: history.add_entry(len(sensory_data), {'errors': [shapely.ops.unary_union(points).area]}, delete_posterior=False) history.save(done=True, verbose=True)
def test_lwr1D_linear(self): """Simplest test possible (well, not quite, but close).""" f = lambda x: 2.0 * x cfg = { 'm_channels': [learners.Channel('x', (0.0, 1.0))], 's_channels': [learners.Channel('y', (0.0, 1.0))], 'm_uniformize': True } learner = learners.OptimizeLearner(cfg) for _ in range(10): x = np.random.rand(1) y = f(x) learner.update(tools.to_signal(x, cfg['m_channels']), tools.to_signal(y, cfg['s_channels'])) for _ in range(10): y = np.random.rand(1).ravel() xp = learner.infer(tools.to_signal(y, cfg['s_channels'])) xp = np.array(tools.to_vector(xp, cfg['m_channels'])) self.assertTrue(np.allclose(f(xp), y, rtol=1e-5, atol=1e-5))
def test_lwr1D_linear(self): """Simplest test possible (well, not quite, but close).""" f = lambda x : 2.0*x cfg = {'m_channels' : [learners.Channel('x', (0.0, 1.0))], 's_channels' : [learners.Channel('y', (0.0, 1.0))], 'm_uniformize': True, 'sigma' : 0.1} for learner in [learners.LWLRLearner(cfg), learners.ESLWLRLearner(cfg)]: for _ in range(10): x = np.random.rand(1) y = f(x) learner.update(tools.to_signal(x, cfg['m_channels']), tools.to_signal(y, cfg['s_channels'])) for _ in range(10): x = np.random.rand(1).ravel() y = f(x) yp = learner.predict(tools.to_signal(x, cfg['m_channels'])) yp = tools.to_vector(yp, cfg['s_channels']) self.assertTrue(np.allclose(y, yp, rtol = 1e-5, atol = 1e-5))
def test_lwr_linear(self): """Test LWLR on random linear models of dimensions from 1 to 20. It should return exact results, give of take floating point imprecisions.""" for _ in range(20): n = random.randint(1, 20) m = random.randint(1, 5) f = random_linear(n, m) cfg = { 'm_channels': [ learners.Channel('x_{}'.format(i), (0.0, 1.0)) for i in range(n) ], 's_channels': [ learners.Channel('y_{}'.format(i), (0.0, 1.0)) for i in range(m) ], 'm_uniformize': True, 'options.maxiter': 500 } learner = learners.OptimizeLearner(cfg) for _ in range(4 * n): x = np.random.rand(n) y = f(x) learner.update(tools.to_signal(x, cfg['m_channels']), tools.to_signal(y, cfg['s_channels'])) for _ in range(10): x = np.random.rand(n).ravel() y = f(x) xp = learner.infer(tools.to_signal(y, cfg['s_channels'])) xp = np.array(tools.to_vector(xp, cfg['m_channels'])) self.assertTrue(np.allclose(f(xp), y, rtol=1e-1, atol=1e-1))
def _execute(self, m_signal, meta=None): m_vector = tools.to_vector(m_signal, self.m_channels) s_vector = (m_vector[0] + m_vector[1], m_vector[0]*m_vector[1]) return tools.to_signal(s_vector, self.s_channels)
def _execute(self, m_signal, meta=None): m_vector = tools.to_vector(m_signal, self.m_channels) s_vector = (m_vector[0] + m_vector[1], m_vector[0] * m_vector[1]) return tools.to_signal(s_vector, self.s_channels)