Exemplo n.º 1
0
def make_dds(config, id_symdds, outdir):
    dds = instance_dds(config, id_symdds)
    basename = os.path.join(outdir, id_symdds)
    filename_pickle = basename + '.discdds.pickle'
    logger.info('Writing to %r ' % friendly_path(filename_pickle))
    safe_pickle_dump(dds, filename_pickle)

    filename_yaml = basename + '.discdds.yaml'
    description = {
        'id':
        id_symdds,
        'desc':
        'Synthetically generated from symbolic DDS %r.' % id_symdds,
        'code': [
            'diffeoplan.library.load_pickle', {
                'file:pickle': id_symdds + '.discdds.pickle'
            }
        ]
    }
    logger.info('Writing to %r ' % friendly_path(filename_yaml))
    with open(filename_yaml, 'w') as f:
        yaml.dump([description],
                  f,
                  default_flow_style=False,
                  explicit_start=True)
Exemplo n.º 2
0
def index_directory(directory, ignore_cache=False, warn_if_longer=3):
    ''' Returns a hash filename -> list of streams. '''
    file2streams = {}
    # logger.debug('Indexing directory %r (ignore cache: %s).' % 
    #             (friendly_path(directory), ignore_cache))
    
    with warn_long_time(warn_if_longer, 'indexing directory %r' % 
                                        friendly_path(directory)):
        files = get_all_log_files(directory)
    
    # Shuffle the list so that multiple threads will index different files
    import random
    random.seed()
    random.shuffle(files)

    with warn_long_time(warn_if_longer, 'indexing %d files (use cache: %s)' % 
                        (len(files), not ignore_cache)):
        for filename in files:
            reader = LogsFormat.get_reader_for(filename)
            try:
                file2streams[filename] = \
                    reader.index_file_cached(filename, ignore_cache=ignore_cache)
                for stream in file2streams[filename]:
                    assert isinstance(stream, BootStream)
                if not file2streams[filename]:
                    logger.warning('No streams found in file %r.' % 
                                   friendly_path(filename))
            except None:  # XXX
                logger.error('Invalid data in file %r.' % friendly_path(filename))
                logger.error(traceback.format_exc())

  
    return file2streams
Exemplo n.º 3
0
def do_list_logs(index, display_logs=False, display_streams=False, display_episodes=False):
    print('Index contains %d bag files with boot data.' % 
                len(index.file2streams))
    print('In total, there are %d robots:' % len(index.robots2streams))

    for robot, streams in index.robots2streams.items():
        print('- robot %r has %d streams.' % (robot, len(streams)))
        if streams:
            total_length = 0
            total_obs = 0
            total_episodes = 0
            agents = set()
            for stream in streams:
                total_length += stream.get_length()
                total_obs += stream.get_num_observations()
                total_episodes += len(stream.get_id_episodes())
                agents.update(stream.get_id_agents())
            print('            spec: %s' % streams[0].get_spec())
            print('    total length: %.1f minutes' % 
                        (total_length / 60.0))
            print('  total episodes: %d' % (total_episodes))
            print('   total samples: %d' % (total_obs))
            print('          agents: %s' % list(agents))

        if display_logs:
            for stream in streams:
                print('   * length %5ds' % stream.get_length())
                print('     %s' % friendly_path(stream.get_filename()))

    if display_streams:
        for filename, streams in index.file2streams.items():
            if streams:
                print('In file %s:' % friendly_path(filename))

                for stream in streams:
                    print(' - there is stream: %s' % (stream))
            else:
                print('  No bootstrapping data found. ')

    if display_episodes:
        for robot, streams in index.robots2streams.items():
            print()
            print('Episodes for robot %r:' % robot)
            for stream in streams:
                print('- %s: stream %s' % (robot, stream))
                for episode in stream.get_episodes():
                    print('  contains episode: %s' % episode)
Exemplo n.º 4
0
def make_dds(config, id_symdds, outdir):
    dds = instance_dds(config, id_symdds)
    basename = os.path.join(outdir, id_symdds)
    filename_pickle = basename + '.discdds.pickle'
    logger.info('Writing to %r ' % friendly_path(filename_pickle))
    safe_pickle_dump(dds, filename_pickle)
            
    filename_yaml = basename + '.discdds.yaml'
    description = {
        'id': id_symdds,
        'desc': 'Synthetically generated from symbolic DDS %r.' % id_symdds,
        'code': ['diffeoplan.library.load_pickle',
                 {'file:pickle': id_symdds + '.discdds.pickle'}]
    }
    logger.info('Writing to %r ' % friendly_path(filename_yaml))
    with open(filename_yaml, 'w') as f:
        yaml.dump([description], f, default_flow_style=False, explicit_start=True)
def warn_long_time_reading(filename, max_wall_time=default_wait, logger=None):
    """ Warns if it takes a long time to do whatever
        you do in the context. """
    # TODO: use timer for logging
    with warn_long_time2(max_wall_time, logger=logger) as more_info:
        more_info.write('Reading file %s of size %s' % 
                        (friendly_path(filename), friendly_filesize(filename)))
        yield
 def file_is_done(self, filename_or_basename, desc=None):  # @UnusedVariable
     """ 
         Notifies that some file is done writing. 
         Used to create a list of recent files that are done.
     """
     path = friendly_path(filename_or_basename)
     if os.path.exists(filename_or_basename):
         size = friendly_filesize(filename_or_basename)
         logger.info('Written %r (%r)' % (path, size))
     else:
         logger.info('Written %r' % (path))
Exemplo n.º 7
0
def write_report(report, report_html, write_pickle=False, **kwargs): 
    
    logger.debug('Writing to %r.' % friendly_path(report_html))
#     if False:
#         # Note here they might overwrite each other
#         rd = os.path.join(os.path.dirname(report_html), 'images')
#     else:
    rd = os.path.splitext(report_html)[0]
    report.to_html(report_html,
                   write_pickle=write_pickle, resources_dir=rd, **kwargs)
    # TODO: save hdf format
    return report_html
Exemplo n.º 8
0
    def get_bo_config(self):
        if self.bo_config is None:
            self.bo_config = get_boot_config()
            
            dirs = self.dir_structure.get_config_directories()
            for dirname in dirs:
                if not os.path.exists(dirname):
                    msg = ('Warning, the config dir %r does not exist ' % 
                           friendly_path(dirname))
                    logger.info(msg)  
                else:
                    GlobalConfig.global_load_dir(dirname)
#                     self.bo_config.load(dirname)
        return self.bo_config
Exemplo n.º 9
0
    def index_file_cached(self, filename, ignore_cache=False):
        cache = '%s.index_cache' % filename
        if os.path.exists(cache) and not ignore_cache:  # TODO: mtime
            try:
                return safe_pickle_load(cache)
            except Exception as e:
                msg = 'Could not unpickle cache %r, deleting.' % friendly_path(cache)
                msg += '\n%s' % e
                logger.warning(msg)
                try:
                    os.unlink(cache)
                except:
                    pass
        logger.debug('Indexing file %r' % friendly_path(filename))
        res = self.index_file(filename)
        for stream in res:
            assert isinstance(stream, BootStream)
            
        logger.debug('Now dumping file %r' % friendly_path(cache))
        with warn_long_time(1, 'dumping %r' % friendly_path(cache)):
            safe_pickle_dump(res, cache, protocol=2)

        return res
Exemplo n.º 10
0
def get_all_log_files(directory):
    ''' Returns all log files in the directory, for all registered
        extensions. '''
    extensions = LogsFormat.formats.keys()

    files = []
    for extension in extensions:
        pattern = '*.%s' % extension
        files.extend(locate_files(directory, pattern))

    if not files:
        msg = ('No log files found in %r (extensions: %s).' % 
               (friendly_path(directory), extensions))
        logger.warning(msg)

    return files
Exemplo n.º 11
0
def create_project_for_fcpx(dirname, pattern, project_filename=None, project_name=None, event_name=None):
    """ Creates an index Event for final cut pro X """
    if project_filename is None:
        project_filename = os.path.join(dirname, 'project.fcpxml')

    if project_name is None:
        project_name = os.path.basename(dirname) + '-project'
                        
    filenames = list(locate_files(dirname, pattern))
    videos = [get_info_for_file(f, project_filename) for f in filenames]
        
    xml = fcpx_get_xml_project(videos, project_name, event_name)

    with open(project_filename, 'w') as f:
        f.write(xml.strip())

    print('written %s' % friendly_path(project_filename))
Exemplo n.º 12
0
 def save(self, outdir):
     """ Creates outdir/<id_tc>.pickle and outdir/<>.yaml """
     filename_pickle = self.id_tc + '.tc.pickle'
     filename_yaml = self.id_tc + '.tc.yaml' 
     description = {
        'id': self.id_tc,
        'desc': 'Automatically generated test case',
        'code': ['diffeoplan.library.load_pickle',
                 {'file:pickle': filename_pickle}]
     }
     
     filename_pickle = os.path.join(outdir, filename_pickle)
     filename_yaml = os.path.join(outdir, filename_yaml)
     
     logger.info('Writing to %r ' % friendly_path(filename_pickle))
     safe_pickle_dump(self, filename_pickle)
     
     write_entries([description], filename_yaml)
Exemplo n.º 13
0
def write_report(report,
                 report_html,
                 static_dir,
                 write_pickle=False,
                 **kwargs):
    logger.debug('Writing to %s ' % friendly_path(report_html))
    #     if False:
    #         # Note here they might overwrite each other
    #         rd = os.path.join(os.path.dirname(report_html), 'images')
    #     else:
    rd = os.path.splitext(report_html)[0]
    report.to_html(report_html,
                   write_pickle=write_pickle,
                   resources_dir=rd,
                   static_dir=static_dir,
                   **kwargs)

    # TODO: save hdf format
    return report_html
Exemplo n.º 14
0
    def save(self, outdir):
        """ Creates outdir/<id_tc>.pickle and outdir/<>.yaml """
        filename_pickle = self.id_tc + '.tc.pickle'
        filename_yaml = self.id_tc + '.tc.yaml'
        description = {
            'id':
            self.id_tc,
            'desc':
            'Automatically generated test case',
            'code': [
                'diffeoplan.library.load_pickle', {
                    'file:pickle': filename_pickle
                }
            ]
        }

        filename_pickle = os.path.join(outdir, filename_pickle)
        filename_yaml = os.path.join(outdir, filename_yaml)

        logger.info('Writing to %r ' % friendly_path(filename_pickle))
        safe_pickle_dump(self, filename_pickle)

        write_entries([description], filename_yaml)
Exemplo n.º 15
0
def summarize(rawlog):
    s = ""
    s += 'Resources:\n'
    for x in rawlog.get_resources():
        s += ' - %s\n' % friendly_path(x)

    s += 'Signals:\n'
    signals = rawlog.get_signals()
    names = sorted(signals.keys())
    
    for x in names:
        v = signals[x]
        t0, t1 = v.get_time_bounds()
        length = t1 - t0
        reftime = v.get_time_reference()
        s += '%-55s  %10s %4.2f %10.4f %10.4f %s\n' % (x, reftime, length, t0, t1, v)
        
    s += 'Tags: %s\n' % rawlog.get_tags()

    
    s += 'Annotations:\n'
    s += indent(pformat(rawlog.get_annotations()), ' | ')

    return s
Exemplo n.º 16
0
def batch_process_manager(data_central, which_sets, command=None):
    try:
        import compmake  # @UnusedImport
    except:
        logger.error('Compmake not installed; multiprocessor '
                     'processes not available.')
        raise

    from compmake import (comp_prefix, use_filesystem,
                          compmake_console, batch_command)

    batch_config = BatchConfigMaster()
    configs = data_central.get_dir_structure().get_config_directories()
    for config in configs:
        batch_config.load(config)

    sets_available = batch_config.sets.keys()

    # logger.info('Available: %r' % sets_available)
    # logger.info('Sets:      %r' % which_sets)
    which_sets_int = expand_string(which_sets, options=sets_available)

    if not which_sets_int:
        msg = 'Specified sets %r not found.' % which_sets
        msg += ' Available: %s' % sets_available
        raise UserError(msg)

    # logger.info('Expanded:  %r' % which_sets)

    for x in which_sets_int:
        if not x in sets_available:
            msg = 'Set %r not available.' % x
            raise UserError(msg)

    if len(which_sets_int) == 1:
        combid = which_sets[0]
    else:
        combid = '-'.join(which_sets)

    # Create the new root        
    root = data_central.root
    root_set = os.path.join(data_central.root, 'sets', combid)
    safe_makedirs(root_set)
    data_central_set = DataCentral(root_set)

    # add symbolic links to logs and config
    main_config = os.path.realpath(os.path.join(root, 'config'))
    set_config = os.path.join(root_set, 'config')
    safe_symlink(main_config, set_config) 

    safe_makedirs(os.path.join(root_set, 'logs'))
    safe_symlink(os.path.join(root, 'logs'),
                 os.path.join(root_set, 'logs', 'original'))

    storage = data_central_set.get_dir_structure().get_storage_dir()
    compmake_storage = os.path.join(storage, 'compmake')
    logger.debug('Using storage directory %r.' % friendly_path(compmake_storage))
    use_filesystem(compmake_storage)

    for id_set in which_sets:
        if len(which_sets) > 1:
            comp_prefix(id_set)

        try:
            spec = batch_config.sets[x]
            batch_set(data_central_set, id_set, spec)
        except ConfToolsException:
            msg = ('Bad configuration for the set %r with spec\n %s' % 
                   (id_set, pformat(spec)))
            logger.error(msg)
            raise

    if command:
        return batch_command(command)
    else:
        compmake_console()
        return 0