def cleanup(self): """Run one iteration of the cleanup loop. A sorted list of files from the renderings directory is first created, oldest files last. Files are then pop()-ed out of the list and removed by cleanup_files() until we're back below the size threshold.""" files = map( lambda f: self.get_file_info(f), [ os.path.join(RENDERING_RESULT_PATH, f) for f in os.listdir(RENDERING_RESULT_PATH) if not (f.startswith(".") or f.endswith(render.THUMBNAIL_SUFFIX)) ], ) # Compute the total size occupied by the renderings, and the actual 80% # threshold, in bytes. size = reduce(lambda x, y: x + y["size"], files, 0) threshold = 0.8 * RENDERING_RESULT_MAX_SIZE_GB * 1024 * 1024 * 1024 # Stop here if we are below the threshold if size < threshold: return LOG.info( "%s consumed for a %s threshold. Cleaning..." % (self.get_formatted_value(size), self.get_formatted_value(threshold)) ) # Sort files by timestamp, oldest last, and start removing them by # pop()-ing the list. files.sort(lambda x, y: cmp(y["time"], x["time"])) while size > threshold: if not len(files): LOG.error("No files to remove and still above threshold! " "Something's wrong!") return f = files.pop() LOG.debug("Considering file %s..." % f["name"]) job = MapRenderingJob.objects.get_by_filename(f["name"]) if job: LOG.debug("Found matching parent job #%d." % job.id) removed, saved = job.remove_all_files() size -= saved if removed: LOG.info( "Removed %d files for job #%d (%s)." % (removed, job.id, self.get_formatted_details(saved, size, threshold)) ) else: # If we didn't find a parent job, it means this is an orphaned # file, we can safely remove it to get back some disk space. LOG.debug("No parent job found.") os.remove(f["path"]) size -= f["size"] LOG.info( "Removed orphan file %s (%s)." % (f["name"], self.get_formatted_details(f["size"], size, threshold)) )
def cleanup(self): """Run one iteration of the cleanup loop. A sorted list of files from the renderings directory is first created, oldest files last. Files are then pop()-ed out of the list and removed by cleanup_files() until we're back below the size threshold.""" files = map(lambda f: self.get_file_info(f), [ os.path.join(RENDERING_RESULT_PATH, f) for f in os.listdir(RENDERING_RESULT_PATH) if not (f.startswith('.') or f.endswith(render.THUMBNAIL_SUFFIX)) ]) # Compute the total size occupied by the renderings, and the actual 80% # threshold, in bytes. size = reduce(lambda x, y: x + y['size'], files, 0) threshold = 0.8 * RENDERING_RESULT_MAX_SIZE_GB * 1024 * 1024 * 1024 # Stop here if we are below the threshold if size < threshold: return LOG.info("%s consumed for a %s threshold. Cleaning..." % (self.get_formatted_value(size), self.get_formatted_value(threshold))) # Sort files by timestamp, oldest last, and start removing them by # pop()-ing the list. files.sort(lambda x, y: cmp(y['time'], x['time'])) while size > threshold: if not len(files): LOG.error("No files to remove and still above threshold! " "Something's wrong!") return f = files.pop() LOG.debug("Considering file %s..." % f['name']) job = MapRenderingJob.objects.get_by_filename(f['name']) if job: LOG.debug("Found matching parent job #%d." % job.id) removed, saved = job.remove_all_files() size -= saved if removed: LOG.info( "Removed %d files for job #%d (%s)." % (removed, job.id, self.get_formatted_details(saved, size, threshold))) else: # If we didn't find a parent job, it means this is an orphaned # file, we can safely remove it to get back some disk space. LOG.debug("No parent job found.") os.remove(f['path']) size -= f['size'] LOG.info( "Removed orphan file %s (%s)." % (f['name'], self.get_formatted_details(f['size'], size, threshold)))
LOG.debug("Found matching parent job #%d." % job.id) removed, saved = job.remove_all_files() size -= saved if removed: LOG.info( "Removed %d files for job #%d (%s)." % (removed, job.id, self.get_formatted_details(saved, size, threshold)) ) else: # If we didn't find a parent job, it means this is an orphaned # file, we can safely remove it to get back some disk space. LOG.debug("No parent job found.") os.remove(f["path"]) size -= f["size"] LOG.info( "Removed orphan file %s (%s)." % (f["name"], self.get_formatted_details(f["size"], size, threshold)) ) if __name__ == "__main__": if not os.path.exists(RENDERING_RESULT_PATH) or not os.path.isdir(RENDERING_RESULT_PATH): LOG.error("%s does not exist or is not a directory! " "Please use a valid RENDERING_RESULT_PATH.") sys.exit(1) cleaner = RenderingsGarbageCollector() daemon = MapOSMaticDaemon() cleaner.start() daemon.serve()
if removed: LOG.info( "Removed %d files for job #%d (%s)." % (removed, job.id, self.get_formatted_details(saved, size, threshold))) else: # If we didn't find a parent job, it means this is an orphaned # file, we can safely remove it to get back some disk space. LOG.debug("No parent job found.") os.remove(f['path']) size -= f['size'] LOG.info( "Removed orphan file %s (%s)." % (f['name'], self.get_formatted_details(f['size'], size, threshold))) if __name__ == '__main__': if (not os.path.exists(RENDERING_RESULT_PATH) or not os.path.isdir(RENDERING_RESULT_PATH)): LOG.error("%s does not exist or is not a directory! " "Please use a valid RENDERING_RESULT_PATH.") sys.exit(1) cleaner = RenderingsGarbageCollector() daemon = MapOSMaticDaemon() cleaner.start() daemon.serve()