Example #1
0
 def upgrade_from_shelve(self):
     try:
         with glacier_shelve() as d:
             archives = d["archives"]
             if "archives" in d:
                 for key, archive_id in archives.items():
                     #print {"filename": key, "archive_id": archive_id}
                     Inventory.create(**{"filename": key, "archive_id": archive_id})
                     del archives[key]
             d["archives"] = archives
     except Exception, exc:
         log.exception(exc)
Example #2
0
    def download(self, keyname, job_check=False):
        """Initiate a Job, check its status, and download the archive if it's completed."""
        archive_id = Inventory.get_archive_id(keyname)
        if not archive_id:
            log.error("{0} not found !")
            # check if the file exist on S3 ?
            return

        job = None

        job_id = Jobs.get_job_id(keyname)
        log.debug("Job: {0}".format(job_id))

        if job_id:
            try:
                job = self.vault.get_job(job_id)
            except UnexpectedHTTPResponseError:  # Return a 404 if the job is no more available
                self.delete_job(keyname)

        if not job:
            job = self.vault.retrieve_archive(archive_id)
            job_id = job.id
            Jobs.update_job_id(keyname, job_id)

        log.info(
            "Job {action}: {status_code} ({creation_date}/{completion_date})".
            format(**job.__dict__))

        if job.completed:
            log.info("Downloading...")
            encrypted_out = tempfile.TemporaryFile()

            # Boto related, download the file in chunk
            chunk_size = 4 * 1024 * 1024
            num_chunks = int(math.ceil(job.archive_size / float(chunk_size)))
            job._download_to_fileob(encrypted_out, num_chunks, chunk_size,
                                    True,
                                    (socket.error, httplib.IncompleteRead))

            encrypted_out.seek(0)
            return encrypted_out
        else:
            log.info("Not completed yet")
            if job_check:
                return job
            return
Example #3
0
    def download(self, keyname, job_check=False):
        """Initiate a Job, check its status, and download the archive if it's completed."""
        archive_id = Inventory.get_archive_id(keyname)
        if not archive_id:
            log.error("{0} not found !")
            # check if the file exist on S3 ?
            return

        job = None

        job_id = Jobs.get_job_id(keyname)
        log.debug("Job: {0}".format(job_id))

        if job_id:
            try:
                job = self.vault.get_job(job_id)
            except UnexpectedHTTPResponseError:  # Return a 404 if the job is no more available
                self.delete_job(keyname)

        if not job:
            job = self.vault.retrieve_archive(archive_id)
            job_id = job.id
            Jobs.update_job_id(keyname, job_id)

        log.info("Job {action}: {status_code} ({creation_date}/{completion_date})".format(**job.__dict__))

        if job.completed:
            log.info("Downloading...")
            encrypted_out = tempfile.TemporaryFile()

            # Boto related, download the file in chunk
            chunk_size = 4 * 1024 * 1024
            num_chunks = int(math.ceil(job.archive_size / float(chunk_size)))
            job._download_to_fileob(encrypted_out, num_chunks, chunk_size, True, (socket.error, httplib.IncompleteRead))

            encrypted_out.seek(0)
            return encrypted_out
        else:
            log.info("Not completed yet")
            if job_check:
                return job
            return
Example #4
0
 def delete(self, keyname):
     archive_id = Inventory.get_archive_id(keyname)
     if archive_id:
         self.vault.delete_archive(archive_id)
         archive_data = Inventory.get(Inventory.filename == keyname)
         archive_data.delete_instance()
Example #5
0
 def ls(self):
     return [ivt.filename for ivt in Inventory.select()]
Example #6
0
 def upload(self, keyname, filename, **kwargs):
     archive_id = self.vault.concurrent_create_archive_from_file(filename, keyname)
     Inventory.create(filename=keyname, archive_id=archive_id)
Example #7
0
def upgrade_from_shelve():
    if os.path.isfile(os.path.expanduser("~/.bakthat.db")):
        glacier_backend = GlacierBackend()
        glacier_backend.upgrade_from_shelve()

        s3_backend = S3Backend()

        regex_key = re.compile(r"(?P<backup_name>.+)\.(?P<date_component>\d{14})\.tgz(?P<is_enc>\.enc)?")

        # old regex for backward compatibility (for files without dot before the date component).
        old_regex_key = re.compile(r"(?P<backup_name>.+)(?P<date_component>\d{14})\.tgz(?P<is_enc>\.enc)?")

        for generator, backend in [(s3_backend.ls(), "s3"), ([ivt.filename for ivt in Inventory.select()], "glacier")]:
            for key in generator:
                match = regex_key.match(key)
                # Backward compatibility
                if not match:
                    match = old_regex_key.match(key)
                if match:
                    filename = match.group("backup_name")
                    is_enc = bool(match.group("is_enc"))
                    backup_date = int(datetime.strptime(match.group("date_component"), "%Y%m%d%H%M%S").strftime("%s"))
                else:
                    filename = key
                    is_enc = False
                    backup_date = 0
                if backend == "s3":
                    backend_hash = hashlib.sha512(s3_backend.conf.get("access_key") + \
                                        s3_backend.conf.get(s3_backend.container_key)).hexdigest()
                elif backend == "glacier":
                    backend_hash = hashlib.sha512(glacier_backend.conf.get("access_key") + \
                                        glacier_backend.conf.get(glacier_backend.container_key)).hexdigest()
                new_backup = dict(backend=backend,
                                  is_deleted=0,
                                  backup_date=backup_date,
                                  tags="",
                                  stored_filename=key,
                                  filename=filename,
                                  last_updated=int(datetime.utcnow().strftime("%s")),
                                  metadata=dict(is_enc=is_enc),
                                  size=0,
                                  backend_hash=backend_hash)
                try:
                    Backups.upsert(**new_backup)
                except Exception, exc:
                    print exc
        os.remove(os.path.expanduser("~/.bakthat.db"))