コード例 #1
0
 def test_time_diff(self):
     """Test time_diff."""
     self.assertEqual(
         Timing.time_diff("20180106_120000", "20180106_120010"),
         datetime.timedelta(0, 10))
     self.assertEqual(
         Timing.time_diff("20180106_110000", "20180106_120010"),
         datetime.timedelta(0, 3610))
コード例 #2
0
 def get_notification_message(self, is_full, start_timestamp, end_timestamp,
                              success, blob_size, blob_path, error_msg):
     """Assemble JSON message for notification."""
     data = {
         "cloud":
         "azure",
         "hostname":
         self.backup_configuration.get_vm_name(),
         "instance-id":
         self.backup_configuration.get_system_uuid(),
         "state": {
             True: "success",
             False: "fail"
         }[success],
         "type":
         "fs",
         "method":
         "file",
         "level": {
             True: "full",
             False: "incr"
         }[is_full],
         "account-id":
         self.backup_configuration.get_subscription_id(),
         "customer-id":
         self.backup_configuration.cfg_file_value("DEFAULT.CID"),
         "system-id":
         self.backup_configuration.cfg_file_value("DEFAULT.SID"),
         "database-name":
         "",
         "database-id":
         "",
         "s3-path":
         self.backup_configuration.get_azure_storage_account_name() +
         '.blob.core.windows.net' + blob_path,
         "timestamp-send":
         Timing.local_string_to_utc_epoch(Timing.now_localtime()),
         "timestamp-last-successful":
         Timing.local_string_to_utc_epoch(start_timestamp),
         "timestamp-bkp-begin":
         Timing.local_string_to_utc_epoch(start_timestamp),
         "timestamp-bkp-end":
         Timing.local_string_to_utc_epoch(end_timestamp),
         "backup-size":
         blob_size,
         "dbtype":
         "",
         "error-message":
         error_msg or '',
         "script-version":
         azfilebak.__version__
     }
     return json.dumps(data)
コード例 #3
0
 def latest_backup_timestamp(self, fileset, is_full):
     """Return the timestamp for the latest backup for a given fileset."""
     existing_blobs_dict = self.existing_backups_for_fileset(
         fileset=fileset, is_full=is_full)
     if not existing_blobs_dict.keys():
         return "19000101_000000"
     return Timing.sort(existing_blobs_dict.keys())[-1:][0]
コード例 #4
0
 def test_parse(self):
     """Test parse."""
     res = Timing.parse("20180605_215959")
     self.assertEqual(
         time.struct_time(
             (2018, 6, 5, 21, 59, 59, 1, 156, -1)),
         res)
コード例 #5
0
 def test_sort(self):
     """Test sort."""
     self.assertEqual(
         Timing.sort(['20180110_120000', '20180105_120000', '20180101_120000']),
         ['20180101_120000', '20180105_120000', '20180110_120000'])
     self.assertEqual(
         Timing.sort(
             ['20180105_120000', '20180110_120000', '20180105_120000', '20180101_120000']),
         ['20180101_120000', '20180105_120000', '20180105_120000', '20180110_120000'])
     pick_start_date = lambda x: x["start_date"]
     self.assertEqual(
         Timing.sort(
             times=self.__recovery_sample_data(),
             selector=pick_start_date),
         [
             {'is_full': True,  'start_date': '20180101_010000'},
             {'is_full': True,  'start_date': '20180101_010000'},
             {'is_full': True,  'start_date': '20180101_010000'},
             {'is_full': False, 'start_date': '20180101_011000'},
             {'is_full': False, 'start_date': '20180101_012000'},
             {'is_full': False, 'start_date': '20180101_013000'},
             {'is_full': True,  'start_date': '20180101_014000'},
             {'is_full': True,  'start_date': '20180101_014000'},
             {'is_full': True,  'start_date': '20180101_014000'},
             {'is_full': False, 'start_date': '20180101_015000'},
             {'is_full': False, 'start_date': '20180101_020000'},
             {'is_full': False, 'start_date': '20180101_021000'},
             {'is_full': False, 'start_date': '20180101_021000'},
             {'is_full': True,  'start_date': '20180101_022000'},
             {'is_full': True,  'start_date': '20180101_022000'},
             {'is_full': True,  'start_date': '20180101_022000'},
             {'is_full': False, 'start_date': '20180101_023000'},
             {'is_full': False, 'start_date': '20180101_024000'},
             {'is_full': False, 'start_date': '20180101_025000'},
             {'is_full': True,  'start_date': '20180101_030000'},
             {'is_full': True,  'start_date': '20180101_030000'},
             {'is_full': True,  'start_date': '20180101_030000'},
             {'is_full': False, 'start_date': '20180101_031000'},
             {'is_full': False, 'start_date': '20180101_032000'},
             {'is_full': False, 'start_date': '20180101_032000'},
             {'is_full': False, 'start_date': '20180101_033000'}
         ])
     self.assertEquals(
         map(pick_start_date, Timing.sort(times=self.__recovery_sample_data(), selector=pick_start_date)),
         ['20180101_010000', '20180101_010000', '20180101_010000', '20180101_011000', '20180101_012000', '20180101_013000', '20180101_014000', '20180101_014000', '20180101_014000', '20180101_015000', '20180101_020000', '20180101_021000', '20180101_021000', '20180101_022000', '20180101_022000', '20180101_022000', '20180101_023000', '20180101_024000', '20180101_025000', '20180101_030000', '20180101_030000', '20180101_030000', '20180101_031000', '20180101_032000', '20180101_032000', '20180101_033000'])
コード例 #6
0
    def prune_old_backups(self, older_than, filesets):
        """
        Delete (prune) old backups from Azure storage.
        """
        minimum_deletable_age = datetime.timedelta(7, 0)
        logging.warn("Deleting files older than %s", older_than)
        if older_than < minimum_deletable_age:
            msg = "Will not delete files younger than {}, ignoring".format(
                minimum_deletable_age)
            logging.warn(msg)
            return

        marker = None
        while True:
            results = self.backup_configuration.storage_client.list_blobs(
                container_name=self.backup_configuration.
                azure_storage_container_name,
                marker=marker)
            for blob in results:
                parts = Naming.parse_blobname(blob.name)
                if parts is None:
                    continue

                (fileset, _is_full, start_timestamp, _vmname) = parts
                if (fileset != None) and not fileset in filesets:
                    continue

                diff = Timing.time_diff(start_timestamp,
                                        Timing.now_localtime())
                delete = diff > older_than

                if delete:
                    logging.warn("Deleting %s", blob.name)
                    self.backup_configuration.storage_client.delete_blob(
                        container_name=self.backup_configuration.
                        azure_storage_container_name,
                        blob_name=blob.name)
                else:
                    logging.warn("Keeping %s", blob.name)

            if results.next_marker:
                marker = results.next_marker
            else:
                break
コード例 #7
0
    def should_run_tran_backup(now_time, force, latest_tran_backup_timestamp,
                               log_backup_interval_min):
        """Determine if a 'tran' backup can be performed according to backup window rules."""
        if force:
            return True

        age_of_latest_backup_in_storage = Timing.time_diff(
            latest_tran_backup_timestamp, now_time)
        min_interval_allows_backup = age_of_latest_backup_in_storage > log_backup_interval_min
        perform_tran_backup = min_interval_allows_backup
        return perform_tran_backup
コード例 #8
0
 def is_backup_allowed_time(self, time):
     """
     >>> sample_data = BusinessHours._BusinessHours__sample_data()
     >>> sample_hours = BusinessHours.parse_tag_str(sample_data)
     >>> some_tuesday_evening = "20180605_215959"
     >>> sample_hours.is_backup_allowed_time(some_tuesday_evening)
     True
     >>> some_tuesday_noon = "20180605_115500"
     >>> sample_hours.is_backup_allowed_time(some_tuesday_noon)
     False
     >>> some_sunday_noon = "20180610_115500"
     >>> sample_hours.is_backup_allowed_time(some_sunday_noon)
     True
     """
     # time.struct_time.tm_wday is range [0, 6], Monday is 0
     t = Timing.parse(time)
     return self.is_backup_allowed_dh(day=1 + t.tm_wday, hour=t.tm_hour)
コード例 #9
0
    def should_run_full_backup(now_time, force, latest_full_backup_timestamp,
                               business_hours, db_backup_interval_min,
                               db_backup_interval_max):
        """
        Determine whether a backup should be executed.
        """
        allowed_by_business = business_hours.is_backup_allowed_time(now_time)
        age_of_latest_backup_in_storage = Timing.time_diff(
            latest_full_backup_timestamp, now_time)
        min_interval_allows_backup = age_of_latest_backup_in_storage > db_backup_interval_min
        max_interval_requires_backup = age_of_latest_backup_in_storage > db_backup_interval_max
        perform_full_backup = (allowed_by_business
                               and min_interval_allows_backup
                               or max_interval_requires_backup or force)

        # logging.info("Full backup requested. Current time: {now}. Last backup in storage: {last}. Age of backup {age}".format(now=now_time, last=latest_full_backup_timestamp, age=age_of_latest_backup_in_storage))
        # logging.info("Backup requirements: min=\"{min}\" max=\"{max}\"".format(min=db_backup_interval_min,max=db_backup_interval_max))
        # logging.info("Forced by user: {force}. Backup allowed by business hours: {allowed_by_business}. min_interval_allows_backup={min_interval_allows_backup}. max_interval_requires_backup={max_interval_requires_backup}".format(force=force, allowed_by_business=allowed_by_business, min_interval_allows_backup=min_interval_allows_backup, max_interval_requires_backup=max_interval_requires_backup))
        # logging.info("Decision to backup: {perform_full_backup}.".format(perform_full_backup=perform_full_backup))

        return perform_full_backup
コード例 #10
0
    def backup_single_fileset(self,
                              fileset,
                              is_full,
                              force,
                              command=None,
                              rate=None):
        """
        Backup a single fileset using the specified command.
        If no command is provided, it will be looked up in the config file.
        """
        logging.info("Backup request for fileset: %s", fileset)

        # Determine if backup can run according to schedule
        start_timestamp = Timing.now_localtime()
        end_timestamp = None
        if not self.should_run_backup(fileset=fileset,
                                      is_full=is_full,
                                      force=force,
                                      start_timestamp=start_timestamp):
            logging.warn("Skipping backup of fileset %s", fileset)
            return

        # Final destination container
        dest_container_name = self.backup_configuration.azure_storage_container_name
        vmname = self.backup_configuration.get_vm_name()
        # Name of the backup blob
        blob_name = Naming.construct_blobname(fileset=fileset,
                                              is_full=is_full,
                                              start_timestamp=start_timestamp,
                                              vmname=vmname)

        # Command to run to execute the backup
        if not command:
            command = self.backup_configuration.get_backup_command(fileset)

        try:
            # Run the backup command
            proc = self.executable_connector.run_backup_command(command, rate)

            logging.info("Streaming backup to blob: %s in container: %s",
                         blob_name, dest_container_name)

            # Stream backup command stdout to the blob
            storage_client = self.backup_configuration.storage_client
            storage_client.create_blob_from_stream(
                container_name=dest_container_name,
                blob_name=blob_name,
                stream=proc.stdout,
                use_byte_buffer=True,
                max_connections=1)

            # Wait for the command to terminate
            retcode = proc.wait()

            # Check return code
            # Ignore return code 1 (files changed during backup)
            if retcode == 1:
                logging.warning("ignoring tar command return code 1")
            elif retcode != 0:
                raise BackupException(
                    "tar command failed with return code {}".format(retcode))

        except Exception as ex:
            logging.error("Failed to stream blob: %s", ex.message)
            end_timestamp = Timing.now_localtime()
            self.send_notification(is_full=is_full,
                                   start_timestamp=start_timestamp,
                                   end_timestamp=end_timestamp,
                                   success=False,
                                   blob_size=0,
                                   blob_path='/' + dest_container_name + '/' +
                                   blob_name,
                                   error_msg=ex.message)
            raise ex

        logging.info("Finished streaming blob: %s", blob_name)
        end_timestamp = Timing.now_localtime()

        # Get blob size
        try:
            blob_props = storage_client.get_blob_properties(
                dest_container_name, blob_name)
        except Exception as ex:
            logging.error("Failed to get blob size: %s", ex.message)

            self.send_notification(is_full=is_full,
                                   start_timestamp=start_timestamp,
                                   end_timestamp=end_timestamp,
                                   success=False,
                                   blob_size=0,
                                   blob_path='/' + dest_container_name + '/' +
                                   blob_name,
                                   error_msg=ex.message)
            raise ex

        # Send notification
        self.send_notification(is_full=is_full,
                               start_timestamp=start_timestamp,
                               end_timestamp=end_timestamp,
                               success=True,
                               blob_size=blob_props.properties.content_length,
                               blob_path='/' + dest_container_name + '/' +
                               blob_name,
                               error_msg=None)

        # Return name of new blob
        return blob_name
コード例 #11
0
 def is_backup_allowed_now_localtime(self):
     return self.is_backup_allowed_time(time=Timing.now_localtime())