def upgrade_to_dump_truck(): glacier_backend = GlacierBackend() glacier_backend.upgrade_to_dump_truck() s3_backend = S3Backend() regex_key = re.compile(r"(?P<backup_name>.+)\.(?P<date_component>\d{14})\.tgz(?P<is_enc>\.enc)?") # old regex for backward compatibility (for files without dot before the date component). old_regex_key = re.compile(r"(?P<backup_name>.+)(?P<date_component>\d{14})\.tgz(?P<is_enc>\.enc)?") for generator, backend in [(s3_backend, "s3"), (glacier_backend, "glacier")]: for key in generator.ls(): match = regex_key.match(key) # Backward compatibility if not match: match = old_regex_key.match(key) if match: filename = match.group("backup_name") is_enc = bool(match.group("is_enc")) backup_date = int(datetime.strptime(match.group("date_component"), "%Y%m%d%H%M%S").strftime("%s")) else: filename = key is_enc = False backup_date = 0 if backend == "s3": backend_hash = hashlib.sha512( s3_backend.conf.get("access_key") + s3_backend.conf.get(s3_backend.container_key) ).hexdigest() elif backend == "glacier": backend_hash = hashlib.sha512( glacier_backend.conf.get("access_key") + glacier_backend.conf.get(glacier_backend.container_key) ).hexdigest() new_backup = dict( backend=backend, is_deleted=0, backup_date=backup_date, tags=[], stored_filename=key, filename=filename, last_updated=int(datetime.utcnow().strftime("%s")), metadata=dict(is_enc=is_enc), size=0, backend_hash=backend_hash, ) try: dump_truck_insert_backup(new_backup) except: pass