def free(self, value):
        """Free bytes from /dev/shm.
        """
        with self.lock:
            self.allocated = max(self.allocated - value, 0)

        LOGGER.debug('Freed %s from /dev/shm (free: %s)',
                     human_readable_bytes(value),
                     human_readable_bytes(self.available))
    def allocate(self, value):
        """Allocate bytes from /dev/shm.
        """
        with self.lock:
            if self.allocated + value > self.max_allocation:
                raise Overflow(self, value)

            self.allocated += value

        LOGGER.debug('Allocated %s from /dev/shm (free: %s)',
                     human_readable_bytes(value),
                     human_readable_bytes(self.available))
    def __init__(self, max_allocation=None, lock=None):
        if max_allocation is None:
            shm_stat = statvfs('/dev/shm')
            self.max_allocation = shm_stat.f_frsize * shm_stat.f_ffree
        else:
            self.max_allocation = max_allocation

        self.allocated = 0
        self.lock = lock or Lock()

        LOGGER.debug('SHM allocator initialized: max_allocation=%s',
                     human_readable_bytes(self.max_allocation))
def main():
    """Main command line entry point.
    """
    arguments = parse_arguments()

    configure_logging(arguments.debug)

    file_comparison_executor = ThreadPoolExecutor(
        arguments.file_comparison_workers)
    s3_etag_compute = S3EtagComputer(
        arguments.s3_chunk_size, ThreadPoolExecutor(arguments.md5_workers),
        shm.Allocator(arguments.max_shm_allocation))

    files = [
        filepath.replace(arguments.local_dir, '')
        for filepath in find_files(arguments.local_dir)
    ]
    file_count = len(files)

    LOGGER.info('Comparing %s files from %s with %s', file_count,
                arguments.local_dir, arguments.s3_url)
    LOGGER.debug('File comparison workers: %s',
                 arguments.file_comparison_workers)

    jobs = {}

    for filepath in files:
        jobs[filepath] = file_comparison_executor.submit(
            compare, filepath, arguments.local_dir, s3_etag_compute,
            arguments.s3_url.bucket, arguments.s3_url.prefix)

    errors = 0
    total_bytes = 0

    for filepath, job in jobs.items():
        LOGGER.debug('Waiting for comparison: %s', filepath)

        try:
            total_bytes += job.result()
        except ComparisonFailed as error:
            LOGGER.critical(str(error))
            errors += 1

    LOGGER.info('success=%s errors=%s files=%s bytes=%s', file_count - errors,
                errors, file_count, human_readable_bytes(total_bytes))

    if errors:
        sys.exit(-1)
 def __init__(self, allocator, add_value):
     self.allocator = allocator
     self.add_value = add_value
     super().__init__('Failed to allocate %s from /dev/shm (free: %s)' %
                      (human_readable_bytes(add_value),
                       human_readable_bytes(self.allocator.available)))