Exemplo n.º 1
0
def load_result_file(fname, success_only=False):
    ''' Reads in all lines from the given file, and parses them into Result
    structures (or subclasses of Result). Optionally only keeps ResultSuccess.
    Returns all kept Results as a result dictionary. This function does not
    care about the age of the results '''
    assert os.path.isfile(fname)
    d = {}
    num_total = 0
    num_ignored = 0
    with DirectoryLock(os.path.dirname(fname)):
        with open(fname, 'rt') as fd:
            for line in fd:
                num_total += 1
                try:
                    r = Result.from_dict(json.loads(line.strip()))
                except json.decoder.JSONDecodeError:
                    log.warning('Could not decode result %s', line.strip())
                    r = None
                if r is None:
                    num_ignored += 1
                    continue
                if success_only and isinstance(r, ResultError):
                    continue
                fp = r.fingerprint
                if fp not in d:
                    d[fp] = []
                d[fp].append(r)
    num_kept = sum([len(d[fp]) for fp in d])
    log.debug('Keeping %d/%d read lines from %s', num_kept, num_total, fname)
    if num_ignored > 0:
        log.warning(
            'Had to ignore %d results due to not knowing how to '
            'parse them.', num_ignored)
    return d
Exemplo n.º 2
0
 def write(self, output):
     if output == '/dev/stdout':
         log.info("Writing to stdout is not supported.")
         return
     log.info('Writing v3bw file to %s', output)
     # To avoid inconsistent reads, the bandwidth data is written to an
     # archive path, then atomically symlinked to 'latest.v3bw'
     out_dir = os.path.dirname(output)
     out_link = os.path.join(out_dir, 'latest.v3bw')
     out_link_tmp = out_link + '.tmp'
     with DirectoryLock(out_dir):
         with open(output, 'wt') as fd:
             fd.write(str(self.header))
             for line in self.bw_lines:
                 fd.write(str(line))
         output_basename = os.path.basename(output)
         # To atomically symlink a file, we need to create a temporary link,
         # then rename it to the final link name. (POSIX guarantees that
         # rename is atomic.)
         log.debug('Creating symlink {} -> {}.'.format(
             out_link_tmp, output_basename))
         os.symlink(output_basename, out_link_tmp)
         log.debug('Renaming symlink {} -> {} to {} -> {}.'.format(
             out_link_tmp, output_basename, out_link, output_basename))
         os.rename(out_link_tmp, out_link)
Exemplo n.º 3
0
def _delete_files(dname, files, dry_run=True):
    """Delete the files passed as argument."""
    assert os.path.isdir(dname)
    assert isinstance(files, types.GeneratorType)
    with DirectoryLock(dname):
        for fname in files:
            log.info('Deleting %s', fname)
            assert os.path.commonprefix([dname, fname]) == dname
            if not dry_run:
                os.remove(fname)
Exemplo n.º 4
0
def write_result_to_datadir(result, datadir):
    ''' Can be called from any thread '''
    assert isinstance(result, Result)
    assert os.path.isdir(datadir)
    dt = datetime.utcfromtimestamp(result.time)
    ext = '.txt'
    result_fname = os.path.join(datadir, '{}{}'.format(dt.date(), ext))
    with DirectoryLock(datadir):
        log.debug('Writing a result to %s', result_fname)
        with open(result_fname, 'at') as fd:
            fd.write('{}\n'.format(str(result)))
Exemplo n.º 5
0
def _remove_rotten_files(datadir, rotten_days, dry_run=True):
    assert os.path.isdir(datadir)
    assert isinstance(rotten_days, int)
    # Hold the lock for basically the entire time just in case someone else
    # moves files between when we get the list of files and when we try to
    # delete them.
    with DirectoryLock(datadir):
        for fname in _get_older_files_than(datadir, rotten_days,
                                           ['.txt', '.txt.gz']):
            log.info('Deleting %s', fname)
            if not dry_run:
                os.remove(fname)
Exemplo n.º 6
0
def _compress_files(dname, files, dry_run=True):
    """Compress the files passed as argument."""
    assert os.path.isdir(dname)
    assert isinstance(files, types.GeneratorType)
    with DirectoryLock(dname):
        for fname in files:
            log.info('Compressing %s', fname)
            assert os.path.commonprefix([dname, fname]) == dname
            if dry_run:
                continue
            with open(fname, 'rt') as in_fd:
                out_fname = fname + '.gz'
                with gzip.open(out_fname, 'wt') as out_fd:
                    shutil.copyfileobj(in_fd, out_fd)
            os.remove(fname)
Exemplo n.º 7
0
def _compress_stale_files(datadir, stale_days, dry_run=True):
    assert os.path.isdir(datadir)
    assert isinstance(stale_days, int)
    # Hold the lock for basically the entire time just in case someone else
    # moves files between when we get the list of files and when we try to
    # compress them.
    with DirectoryLock(datadir):
        for fname in _get_older_files_than(datadir, stale_days, ['.txt']):
            log.info('Compressing %s', fname)
            if dry_run:
                continue
            with open(fname, 'rt') as in_fd:
                out_fname = fname + '.gz'
                with gzip.open(out_fname, 'wt') as out_fd:
                    shutil.copyfileobj(in_fd, out_fd)
            os.remove(fname)