def upgrade_from_shelve(): if os.path.isfile(os.path.expanduser("~/.bakthat.db")): glacier_backend = GlacierBackend() glacier_backend.upgrade_from_shelve() s3_backend = S3Backend() regex_key = re.compile(r"(?P<backup_name>.+)\.(?P<date_component>\d{14})\.tgz(?P<is_enc>\.enc)?") # old regex for backward compatibility (for files without dot before the date component). old_regex_key = re.compile(r"(?P<backup_name>.+)(?P<date_component>\d{14})\.tgz(?P<is_enc>\.enc)?") for generator, backend in [(s3_backend.ls(), "s3"), ([ivt.filename for ivt in Inventory.select()], "glacier")]: for key in generator: match = regex_key.match(key) # Backward compatibility if not match: match = old_regex_key.match(key) if match: filename = match.group("backup_name") is_enc = bool(match.group("is_enc")) backup_date = int(datetime.strptime(match.group("date_component"), "%Y%m%d%H%M%S").strftime("%s")) else: filename = key is_enc = False backup_date = 0 if backend == "s3": backend_hash = hashlib.sha512(s3_backend.conf.get("access_key") + \ s3_backend.conf.get(s3_backend.container_key)).hexdigest() elif backend == "glacier": backend_hash = hashlib.sha512(glacier_backend.conf.get("access_key") + \ glacier_backend.conf.get(glacier_backend.container_key)).hexdigest() new_backup = dict(backend=backend, is_deleted=0, backup_date=backup_date, tags="", stored_filename=key, filename=filename, last_updated=int(datetime.utcnow().strftime("%s")), metadata=dict(is_enc=is_enc), size=0, backend_hash=backend_hash) try: Backups.upsert(**new_backup) except Exception, exc: print exc os.remove(os.path.expanduser("~/.bakthat.db"))