def dump_truck_upsert_backup(backup): """Insert backup data into DumpTruck backups table. :type backup: dict :param backup: Backup data """ backup = prepare_backup(backup) dump_truck.upsert(backup, "backups")
def upgrade_to_dump_truck(self): try: with glacier_shelve() as d: archives = d["archives"] if "archives" in d: for key, archive_id in archives.items(): #print {"filename": key, "archive_id": archive_id} dump_truck.upsert({"filename": key, "archive_id": archive_id}, "inventory") del archives[key] d["archives"] = archives except Exception, exc: log.exception(exc)
def download(self, keyname, job_check=False): """Initiate a Job, check its status, and download the archive if it's completed.""" archive_id = self.get_archive_id(keyname) if not archive_id: log.error("{0} not found !") # check if the file exist on S3 ? return job = None job_id = self.get_job_id(keyname) log.debug("Job: {0}".format(job_id)) if job_id: try: job = self.vault.get_job(job_id) except UnexpectedHTTPResponseError: # Return a 404 if the job is no more available self.delete_job(keyname) if not job: job = self.vault.retrieve_archive(archive_id) job_id = job.id dump_truck.upsert({"filename": keyname, "job_id": job_id}, "jobs") log.info("Job {action}: {status_code} ({creation_date}/{completion_date})".format(**job.__dict__)) if job.completed: log.info("Downloading...") encrypted_out = tempfile.TemporaryFile() # Boto related, download the file in chunk chunk_size = 4 * 1024 * 1024 num_chunks = int(math.ceil(job.archive_size / float(chunk_size))) job._download_to_fileob(encrypted_out, num_chunks, chunk_size, True, (socket.error, httplib.IncompleteRead)) encrypted_out.seek(0) return encrypted_out else: log.info("Not completed yet") if job_check: return job return
def upload(self, keyname, filename): archive_id = self.vault.concurrent_create_archive_from_file(filename, keyname) dump_truck.upsert({"filename": keyname, "archive_id": archive_id}, "inventory")