def hdf_read(filename, id_stream, boot_spec=None, read_extra=False, only_episodes=None): f = tables.openFile(filename) # logger.info("opening file %r" % filename) try: # TODO: check table exists stream_group = f.root.boot_olympics.streams._v_children[id_stream] table = stream_group.boot_stream extra = stream_group.extra if boot_spec is None: boot_spec = spec_from_group(stream_group) n = len(table) n_extra = len(extra) if n != n_extra: msg = ('In stream %s:%s I see %d observations, but only %d extra.' % (filename, id_stream, n, n_extra)) logger.warn(msg) n = min(n, n_extra) dtype = get_observations_dtype(boot_spec) for i in range(n): row = table[i] id_episode = row['id_episode'].item() if only_episodes and not id_episode in only_episodes: continue observations = np.zeros((), dtype) for x in dtype.names: if x == 'extra': continue observations[x].flat = row[x].flat # FIXME Strange strange if read_extra: observations['extra'] = load_extra(extra, i) else: observations['extra'] = {} yield observations finally: f.close()
def simulate_agent_robot( data_central, id_agent, id_robot, max_episode_len, num_episodes, cumulative, id_episodes=None, # if None, just use the ID given by the world stateful=False, interval_print=None, write_extra=True, ): """ If not cumulative, returns the list of the episodes IDs simulated, otherwise it returns all episodes. """ # Reseed the generator (otherwise multiprocessing will use the same) np.random.seed() if id_episodes is not None: if len(id_episodes) != num_episodes: raise ValueError("Expected correct number of IDs.") # Instance agent object config = data_central.get_bo_config() agent = config.agents.instance(id_agent) # @UndefinedVariable # Instance robot object robot = config.robots.instance(id_robot) # @UndefinedVariable # logger = logging.getLogger("BO:%s(%s)" % (id_agent, id_robot)) # logger.setLevel(logging.DEBUG) # AgentInterface.logger = logger # XXX boot_spec = robot.get_spec() # If --stateful is passed, we try to load a previous state. if stateful: db = data_central.get_agent_state_db() if db.has_state(id_agent=id_agent, id_robot=id_robot): logger.info("Using previous state.") db.reload_state_for_agent(id_agent=id_agent, id_robot=id_robot, agent=agent) else: logger.info("No previous state found.") agent.init(boot_spec) else: agent.init(boot_spec) ds = data_central.get_dir_structure() timestamp = unique_timestamp_string() timestamp = timestamp.replace("_", "") id_stream = "%s-%s-%s" % (id_robot, id_agent, timestamp) filename = ds.get_simlog_filename(id_robot=id_robot, id_agent=id_agent, id_stream=id_stream) logger.info("Creating stream %r\n in file %r" % (id_stream, filename)) logs_format = LogsFormat.get_reader_for(filename) bk = Bookkeeping( data_central=data_central, id_robot=id_robot, num_episodes=num_episodes, cumulative=cumulative, interval_print=interval_print, ) if bk.another_episode_todo(): with logs_format.write_stream(filename=filename, id_stream=id_stream, boot_spec=boot_spec) as writer: while bk.another_episode_todo(): if id_episodes is not None: id_episode = id_episodes.pop(0) logger.info("Simulating episode %s" % id_episode) else: id_episode = None for observations in run_simulation( id_robot, robot, id_agent, agent, 100000, max_episode_len, id_episode=id_episode ): bk.observations(observations) if write_extra: extra = dict(robot_state=robot.get_state()) else: extra = {} writer.push_observations(observations=observations, extra=extra) bk.episode_done() logger.info("Peacefully done all episodes") else: logger.warn("No episodes to do?") logger.info("done") if cumulative: return bk.get_all_episodes() else: return bk.get_id_episodes()
''' Fast routines to yaml reading and writing. ''' from contracts import contract from bootstrapping_olympics import logger from contracts import describe_type from pprint import pprint from types import NoneType from yaml import load, dump try: from yaml import CLoader as Loader, CDumper as Dumper except ImportError as e: logger.warn('Could not load C YAML reader. ' 'I can continue but everything will be slow. (%s)' % e) from yaml import Loader, Dumper @contract(yaml_string='str') def yaml_load(yaml_string): try: return load(yaml_string, Loader=Loader) except KeyboardInterrupt: raise except: logger.error('Could not deserialize YAML') dump_emergency_string(yaml_string) raise def dump_emergency_string(s): emergency = '/home/andrea/yaml_load.yaml' # XXX FIXME with open(emergency, 'w') as f: