def nuislog_stream(data_central, stream, id_equiv, equiv, with_extra): ds = data_central.get_dir_structure() id_agent = list(stream.get_id_agents())[0] id_stream, filename = ds.get_simlog_filename_stream(id_robot=id_equiv, id_agent=id_agent) logs_format = LogsFormat.get_reader_for(filename) with logs_format.write_stream(filename=filename, id_stream=id_stream, boot_spec=equiv.get_spec()) as writer: for obs1 in stream.read(read_extra=with_extra): obs2 = equiv.convert_observations_array(obs1) obs2["id_robot"] = id_equiv extra = obs1["extra"].item() if not with_extra: extra = {} writer.push_observations(observations=obs2, extra=extra)
def nuislog_episodes( data_central, id_robot_original, id_episodes, id_equiv, obs_nuisances, cmd_nuisances, with_extras=True ): equiv = EquivRobot(robot=id_robot_original, obs_nuisance=list(obs_nuisances), cmd_nuisance=list(cmd_nuisances)) log_index = data_central.get_log_index() ds = data_central.get_dir_structure() id_stream, filename = ds.get_simlog_filename_stream(id_robot=id_equiv, id_agent="derived") logs_format = LogsFormat.get_reader_for(filename) with logs_format.write_stream(filename=filename, id_stream=id_stream, boot_spec=equiv.get_spec()) as writer: for id_episode in id_episodes: for obs1 in log_index.read_robot_episode(id_robot_original, id_episode, read_extra=with_extras): obs2 = equiv.convert_observations_array(obs1) obs2["id_robot"] = id_equiv extra = obs1["extra"].item() writer.push_observations(observations=obs2, extra=extra)
def write_robot_observations(id_stream, filename, id_robot, robot, id_episode, id_environment): logs_format = LogsFormat.get_reader_for(filename) boot_spec = robot.get_spec() keeper = ObsKeeper(boot_spec=boot_spec, id_robot=id_robot, check_valid_values=False) boot_spec = robot.get_spec() nvalid = 0 with logs_format.write_stream(filename=filename, id_stream=id_stream, boot_spec=boot_spec) as writer: for obs in iterate_robot_observations(robot, sleep=0): # print('robot_pose: %s' % obs.robot_pose) # print('got %s' % obs['timestamp']) boot_observations = keeper.push(timestamp=obs.timestamp, observations=obs.observations, commands=obs.commands, commands_source=obs.commands_source, id_episode=id_episode, id_world=id_environment) extra = {} warnings.warn('Make this more general') if obs.robot_pose is not None: extra['robot_pose'] = obs.robot_pose.tolist() extra['odom'] = obs.robot_pose.tolist() writer.push_observations(observations=boot_observations, extra=extra) nvalid += 1 if nvalid == 0: msg = 'No observations could be found in %s' % filename raise Exception(msg)
def simulate_agent_robot( data_central, id_agent, id_robot, max_episode_len, num_episodes, cumulative, id_episodes=None, # if None, just use the ID given by the world stateful=False, interval_print=None, write_extra=True, ): """ If not cumulative, returns the list of the episodes IDs simulated, otherwise it returns all episodes. """ # Reseed the generator (otherwise multiprocessing will use the same) np.random.seed() if id_episodes is not None: if len(id_episodes) != num_episodes: raise ValueError("Expected correct number of IDs.") # Instance agent object config = data_central.get_bo_config() agent = config.agents.instance(id_agent) # @UndefinedVariable # Instance robot object robot = config.robots.instance(id_robot) # @UndefinedVariable # logger = logging.getLogger("BO:%s(%s)" % (id_agent, id_robot)) # logger.setLevel(logging.DEBUG) # AgentInterface.logger = logger # XXX boot_spec = robot.get_spec() # If --stateful is passed, we try to load a previous state. if stateful: db = data_central.get_agent_state_db() if db.has_state(id_agent=id_agent, id_robot=id_robot): logger.info("Using previous state.") db.reload_state_for_agent(id_agent=id_agent, id_robot=id_robot, agent=agent) else: logger.info("No previous state found.") agent.init(boot_spec) else: agent.init(boot_spec) ds = data_central.get_dir_structure() timestamp = unique_timestamp_string() timestamp = timestamp.replace("_", "") id_stream = "%s-%s-%s" % (id_robot, id_agent, timestamp) filename = ds.get_simlog_filename(id_robot=id_robot, id_agent=id_agent, id_stream=id_stream) logger.info("Creating stream %r\n in file %r" % (id_stream, filename)) logs_format = LogsFormat.get_reader_for(filename) bk = Bookkeeping( data_central=data_central, id_robot=id_robot, num_episodes=num_episodes, cumulative=cumulative, interval_print=interval_print, ) if bk.another_episode_todo(): with logs_format.write_stream(filename=filename, id_stream=id_stream, boot_spec=boot_spec) as writer: while bk.another_episode_todo(): if id_episodes is not None: id_episode = id_episodes.pop(0) logger.info("Simulating episode %s" % id_episode) else: id_episode = None for observations in run_simulation( id_robot, robot, id_agent, agent, 100000, max_episode_len, id_episode=id_episode ): bk.observations(observations) if write_extra: extra = dict(robot_state=robot.get_state()) else: extra = {} writer.push_observations(observations=observations, extra=extra) bk.episode_done() logger.info("Peacefully done all episodes") else: logger.warn("No episodes to do?") logger.info("done") if cumulative: return bk.get_all_episodes() else: return bk.get_id_episodes()
def check_logs_writing(id_agent, agent, id_robot, robot): try: agent.init(robot.get_spec()) except UnsupportedSpec: return root = tempfile.mkdtemp() ds = DirectoryStructure(root) id_stream = unique_timestamp_string() filename = ds.get_simlog_filename(id_agent, id_robot, id_stream) written = [] written_extra = [] logs_format = LogsFormat.get_reader_for(filename) with logs_format.write_stream(filename=filename, id_stream=id_stream, boot_spec=robot.get_spec()) as writer: print run_simulation for observations in run_simulation(id_robot=id_robot, robot=robot, id_agent=id_agent, agent=agent, max_observations=3, max_time=1000, check_valid_values=True): extra = {'random_number': np.random.randint(1)} writer.push_observations(observations, extra) written_extra.append(extra) written.append(observations) logdirs = ds.get_log_directories() index = LogIndex() for logdir in logdirs: index.index(logdir) # now use the cached version index = LogIndex() for logdir in logdirs: index.index(logdir) assert index.has_streams_for_robot(id_robot) streams = index.get_streams_for_robot(id_robot) assert len(streams) == 1 stream = streams[0] assert isinstance(stream, BootStream) assert stream.get_spec() == robot.get_spec() read_back = [] read_back_extra = [] for observations2 in stream.read(read_extra=True): read_back_extra.append(observations2['extra']) read_back.append(observations2) if len(read_back) != len(written): raise Exception('Written %d, read back %d.' % (len(written), len(read_back))) for i in range(len(read_back)): a = written[i] b = read_back[i] fields = set(a.dtype.names) or set(b.dtype.names) fields.remove('extra') for field in fields: assert_equal(a[field], b[field]) for i in range(len(read_back)): a = written_extra[i] b = read_back_extra[i] assert_equal(a, b) shutil.rmtree(root)
def task_servonav( data_central, id_agent, id_robot, max_episode_len, num_episodes, fail_if_not_working, id_episodes=None, # if None, just use the ID given by the world cumulative=False, interval_print=None, interval_write=10, # write every 10 frames num_episodes_with_robot_state=0, resolution=1, ): """ Returns the list of the episodes IDs simulated. """ # Reseed the generator (otherwise multiprocessing will use the same) np.random.seed() if id_episodes is not None: if len(id_episodes) != num_episodes: raise ValueError("Expected correct number of IDs.") # Instance robot object robot = data_central.get_bo_config().robots.instance(id_robot) # TODO: check that this is a Vehicles simulation boot_spec = robot.get_spec() # Instance agent object agent, _ = load_agent_state(data_central, id_agent, id_robot, reset_state=False, raise_if_no_state=True) # TODO: check servo servo_agent = agent.get_servo() id_agent_servo = "%s_servo" % id_agent ds = data_central.get_dir_structure() id_stream = "%s_%s_%s_servonav" % (id_robot, id_agent, unique_timestamp_string()) filename = ds.get_simlog_filename(id_robot=id_robot, id_agent=id_agent, id_stream=id_stream) logger.info("Creating stream %r\n in file %r" % (id_stream, filename)) logs_format = LogsFormat.get_reader_for(filename) bk = BookkeepingServo( data_central=data_central, id_robot=id_robot, id_agent=id_agent_servo, num_episodes=num_episodes, cumulative=cumulative, interval_print=interval_print, ) if not bk.another_episode_todo(): return with logs_format.write_stream(filename=filename, id_stream=id_stream, boot_spec=boot_spec) as writer: counter = 0 while bk.another_episode_todo(): episode = robot.new_episode() if id_episodes is not None: id_episode = id_episodes.pop(0) else: id_episode = episode.id_episode save_robot_state = counter < num_episodes_with_robot_state servonav_episode( id_robot=id_robot, robot=robot, id_servo_agent=id_agent_servo, servo_agent=servo_agent, writer=writer, id_episode=id_episode, resolution=resolution, max_episode_len=max_episode_len, save_robot_state=save_robot_state, interval_write=interval_write, fail_if_not_working=fail_if_not_working, max_tries=10000, ) bk.episode_done() counter += 1
def task_servo(data_central, id_agent, id_robot, max_episode_len, num_episodes, displacement, id_episodes=None, # if None, just use the ID given by the world cumulative=False, interval_print=None, num_episodes_with_robot_state=0): ''' Returns the list of the episodes IDs simulated. ''' # Reseed the generator (otherwise multiprocessing will use the same) np.random.seed() if id_episodes is not None: if len(id_episodes) != num_episodes: raise ValueError('Expected correct number of IDs.') # Instance robot object robot = data_central.get_bo_config().robots.instance(id_robot) # TODO: check that this is a Vehicles simulation boot_spec = robot.get_spec() # Instance agent object agent, _ = load_agent_state(data_central, id_agent, id_robot, reset_state=False, raise_if_no_state=True) servo_agent = agent.get_servo() servo_agent.init(boot_spec) id_agent_servo = '%s_servo' % id_agent ds = data_central.get_dir_structure() id_stream = '%s_%s_%s_servo' % (id_robot, id_agent, unique_timestamp_string()) filename = ds.get_simlog_filename(id_robot=id_robot, id_agent=id_agent, id_stream=id_stream) logger.info('Creating stream %r\n in file %r' % (id_stream, filename)) logs_format = LogsFormat.get_reader_for(filename) bk = BookkeepingServo(data_central=data_central, id_robot=id_robot, id_agent=id_agent_servo, num_episodes=num_episodes, cumulative=cumulative, interval_print=interval_print) if bk.another_episode_todo(): with logs_format.write_stream(filename=filename, id_stream=id_stream, boot_spec=boot_spec) as writer: counter = 0 while bk.another_episode_todo(): episode = robot.new_episode() if id_episodes is not None: id_episode = id_episodes.pop(0) else: id_episode = episode.id_episode save_robot_state = counter < num_episodes_with_robot_state servoing_episode(id_robot=id_robot, robot=robot, id_servo_agent=id_agent_servo, servo_agent=servo_agent, writer=writer, id_episode=id_episode, displacement=displacement, max_episode_len=max_episode_len, save_robot_state=save_robot_state, max_tries=10000) bk.episode_done() counter += 1