def get_notification_message(self, is_full, start_timestamp, end_timestamp, success, blob_size, blob_path, error_msg): """Assemble JSON message for notification.""" data = { "cloud": "azure", "hostname": self.backup_configuration.get_vm_name(), "instance-id": self.backup_configuration.get_system_uuid(), "state": { True: "success", False: "fail" }[success], "type": "fs", "method": "file", "level": { True: "full", False: "incr" }[is_full], "account-id": self.backup_configuration.get_subscription_id(), "customer-id": self.backup_configuration.cfg_file_value("DEFAULT.CID"), "system-id": self.backup_configuration.cfg_file_value("DEFAULT.SID"), "database-name": "", "database-id": "", "s3-path": self.backup_configuration.get_azure_storage_account_name() + '.blob.core.windows.net' + blob_path, "timestamp-send": Timing.local_string_to_utc_epoch(Timing.now_localtime()), "timestamp-last-successful": Timing.local_string_to_utc_epoch(start_timestamp), "timestamp-bkp-begin": Timing.local_string_to_utc_epoch(start_timestamp), "timestamp-bkp-end": Timing.local_string_to_utc_epoch(end_timestamp), "backup-size": blob_size, "dbtype": "", "error-message": error_msg or '', "script-version": azfilebak.__version__ } return json.dumps(data)
def prune_old_backups(self, older_than, filesets): """ Delete (prune) old backups from Azure storage. """ minimum_deletable_age = datetime.timedelta(7, 0) logging.warn("Deleting files older than %s", older_than) if older_than < minimum_deletable_age: msg = "Will not delete files younger than {}, ignoring".format( minimum_deletable_age) logging.warn(msg) return marker = None while True: results = self.backup_configuration.storage_client.list_blobs( container_name=self.backup_configuration. azure_storage_container_name, marker=marker) for blob in results: parts = Naming.parse_blobname(blob.name) if parts is None: continue (fileset, _is_full, start_timestamp, _vmname) = parts if (fileset != None) and not fileset in filesets: continue diff = Timing.time_diff(start_timestamp, Timing.now_localtime()) delete = diff > older_than if delete: logging.warn("Deleting %s", blob.name) self.backup_configuration.storage_client.delete_blob( container_name=self.backup_configuration. azure_storage_container_name, blob_name=blob.name) else: logging.warn("Keeping %s", blob.name) if results.next_marker: marker = results.next_marker else: break
def backup_single_fileset(self, fileset, is_full, force, command=None, rate=None): """ Backup a single fileset using the specified command. If no command is provided, it will be looked up in the config file. """ logging.info("Backup request for fileset: %s", fileset) # Determine if backup can run according to schedule start_timestamp = Timing.now_localtime() end_timestamp = None if not self.should_run_backup(fileset=fileset, is_full=is_full, force=force, start_timestamp=start_timestamp): logging.warn("Skipping backup of fileset %s", fileset) return # Final destination container dest_container_name = self.backup_configuration.azure_storage_container_name vmname = self.backup_configuration.get_vm_name() # Name of the backup blob blob_name = Naming.construct_blobname(fileset=fileset, is_full=is_full, start_timestamp=start_timestamp, vmname=vmname) # Command to run to execute the backup if not command: command = self.backup_configuration.get_backup_command(fileset) try: # Run the backup command proc = self.executable_connector.run_backup_command(command, rate) logging.info("Streaming backup to blob: %s in container: %s", blob_name, dest_container_name) # Stream backup command stdout to the blob storage_client = self.backup_configuration.storage_client storage_client.create_blob_from_stream( container_name=dest_container_name, blob_name=blob_name, stream=proc.stdout, use_byte_buffer=True, max_connections=1) # Wait for the command to terminate retcode = proc.wait() # Check return code # Ignore return code 1 (files changed during backup) if retcode == 1: logging.warning("ignoring tar command return code 1") elif retcode != 0: raise BackupException( "tar command failed with return code {}".format(retcode)) except Exception as ex: logging.error("Failed to stream blob: %s", ex.message) end_timestamp = Timing.now_localtime() self.send_notification(is_full=is_full, start_timestamp=start_timestamp, end_timestamp=end_timestamp, success=False, blob_size=0, blob_path='/' + dest_container_name + '/' + blob_name, error_msg=ex.message) raise ex logging.info("Finished streaming blob: %s", blob_name) end_timestamp = Timing.now_localtime() # Get blob size try: blob_props = storage_client.get_blob_properties( dest_container_name, blob_name) except Exception as ex: logging.error("Failed to get blob size: %s", ex.message) self.send_notification(is_full=is_full, start_timestamp=start_timestamp, end_timestamp=end_timestamp, success=False, blob_size=0, blob_path='/' + dest_container_name + '/' + blob_name, error_msg=ex.message) raise ex # Send notification self.send_notification(is_full=is_full, start_timestamp=start_timestamp, end_timestamp=end_timestamp, success=True, blob_size=blob_props.properties.content_length, blob_path='/' + dest_container_name + '/' + blob_name, error_msg=None) # Return name of new blob return blob_name
def is_backup_allowed_now_localtime(self): return self.is_backup_allowed_time(time=Timing.now_localtime())