def check_logs_formats(id_agent, agent, id_robot, robot): # @UnusedVariable with create_tmp_dir() as root: os.mkdir(os.path.join(root, 'config')) data_central = DataCentral(root) # Simulate two episodes # NO! there is a bug in bag reading; the messages are read # in timestamp order; and for now different episodes can # have overlapping timestamps try: simulate(data_central, id_agent=id_agent, id_robot=id_robot, max_episode_len=2, num_episodes=1, # changed from 2 (see above) cumulative=False, id_episodes=None, stateful=False, interval_print=None, write_extra=True) except UnsupportedSpec: return log_index = data_central.get_log_index() log_index.reindex() streams = log_index.get_streams_for(id_robot, id_agent) if len(streams) != 1: msg = 'Expected to find 1 stream, not %d' % len(streams) raise Exception(msg) stream_orig = streams[0] for logs_format, interface in LogsFormat.formats.items(): try: dirname = os.path.join(root, logs_format) safe_makedirs(dirname) filename = os.path.join(dirname, 'example.%s' % logs_format) written = [] id_stream = 'example' with interface.write_stream(filename, id_stream, robot.get_spec()) as writer: for observations in stream_orig.read(): logger.info('Writing %s:%s (%s)' % (observations['id_episode'], observations['counter'], observations['timestamp'])) writer.push_observations(observations) written.append(observations) count = 0 for obs_read in interface.read_from_stream(filename, id_stream): logger.info('Reading %s:%s (%s)' % (obs_read['id_episode'], obs_read['counter'], obs_read['timestamp'])) original = written[count] try: if obs_read['counter'] != original['counter']: msg = ('Not even the counter is the same!' ' %s vs %s' % (obs_read['counter'], original['counter'])) raise Exception(msg) assert_allclose(obs_read['timestamp'], original['timestamp']) assert_allclose(obs_read['observations'], original['observations']) assert_allclose(obs_read['commands'], original['commands']) except: logger.error('Error at count = %d' % count) logger.error(' original: %s' % original) logger.error(' obs_read: %s' % obs_read) raise count += 1 if count != len(written): msg = ('I wrote %d entries, but obtained %d.' % (len(written), count)) raise Exception(msg) except: logger.error('Could not pass tests for format %r.' % logs_format) raise
def batch_process_manager(data_central, which_sets, command=None): try: import compmake # @UnusedImport except: logger.error('Compmake not installed; multiprocessor ' 'processes not available.') raise from compmake import (comp_prefix, use_filesystem, compmake_console, batch_command) batch_config = BatchConfigMaster() configs = data_central.get_dir_structure().get_config_directories() for config in configs: batch_config.load(config) sets_available = batch_config.sets.keys() # logger.info('Available: %r' % sets_available) # logger.info('Sets: %r' % which_sets) which_sets_int = expand_string(which_sets, options=sets_available) if not which_sets_int: msg = 'Specified sets %r not found.' % which_sets msg += ' Available: %s' % sets_available raise UserError(msg) # logger.info('Expanded: %r' % which_sets) for x in which_sets_int: if not x in sets_available: msg = 'Set %r not available.' % x raise UserError(msg) if len(which_sets_int) == 1: combid = which_sets[0] else: combid = '-'.join(which_sets) # Create the new root root = data_central.root root_set = os.path.join(data_central.root, 'sets', combid) safe_makedirs(root_set) data_central_set = DataCentral(root_set) # add symbolic links to logs and config main_config = os.path.realpath(os.path.join(root, 'config')) set_config = os.path.join(root_set, 'config') safe_symlink(main_config, set_config) safe_makedirs(os.path.join(root_set, 'logs')) safe_symlink(os.path.join(root, 'logs'), os.path.join(root_set, 'logs', 'original')) storage = data_central_set.get_dir_structure().get_storage_dir() compmake_storage = os.path.join(storage, 'compmake') logger.debug('Using storage directory %r.' % friendly_path(compmake_storage)) use_filesystem(compmake_storage) for id_set in which_sets: if len(which_sets) > 1: comp_prefix(id_set) try: spec = batch_config.sets[x] batch_set(data_central_set, id_set, spec) except ConfToolsException: msg = ('Bad configuration for the set %r with spec\n %s' % (id_set, pformat(spec))) logger.error(msg) raise if command: return batch_command(command) else: compmake_console() return 0