Example #1
0
def backup_data(storage, path=''):
    """ Backup Cassandra snapshot data directories/files.

  Args:
    storage: A str, the storage that is used for storing the backup.
    path: A str, the full backup filename path to use for cloud backup.
  Returns:
    The path to the backup file on success, None otherwise.
  """
    if storage not in StorageTypes().get_storage_types():
        logging.error("Storage '{0}' not supported.")
        return None

    logging.info("Starting new db backup.")
    clear_old_snapshots()

    if not create_snapshot():
        logging.error(
            "Failed to create Cassandra snapshots. Aborting backup...")
        return None

    files = get_cassandra_snapshot_file_names()
    if not files:
        logging.error(
            "No Cassandra files were found to tar up. Aborting backup...")
        return None

    tar_file = tar_backup_files(files)
    if not tar_file:
        logging.error(
            'Error while tarring up snapshot files. Aborting backup...')
        clear_old_snapshots()
        delete_local_backup_file(tar_file)
        move_secondary_backup()
        return None

    if storage == StorageTypes.LOCAL_FS:
        logging.info("Done with local db backup!")
        clear_old_snapshots()
        delete_secondary_backup()
        return tar_file
    elif storage == StorageTypes.GCS:
        return_value = path
        # Upload to GCS.
        if not gcs_helper.upload_to_bucket(path, tar_file):
            logging.error("Upload to GCS failed. Aborting backup...")
            move_secondary_backup()
            return_value = None
        else:
            logging.info("Done with db backup!")
            delete_secondary_backup()

        # Remove local backup file(s).
        clear_old_snapshots()
        delete_local_backup_file(tar_file)
        return return_value
Example #2
0
def backup_data(storage, path=''):
  """ Backup Cassandra snapshot data directories/files.

  Args:
    storage: A str, the storage that is used for storing the backup.
    path: A str, the full backup filename path to use for cloud backup.
  Returns:
    The path to the backup file on success, None otherwise.
  """
  if storage not in StorageTypes().get_storage_types():
    logging.error("Storage '{0}' not supported.")
    return None

  logging.info("Starting new db backup.")
  clear_old_snapshots()

  if not create_snapshot():
    logging.error("Failed to create Cassandra snapshots. Aborting backup...")
    return None

  files = get_cassandra_snapshot_file_names()
  if not files:
    logging.error("No Cassandra files were found to tar up. Aborting backup...")
    return None

  tar_file = tar_backup_files(files)
  if not tar_file:
    logging.error('Error while tarring up snapshot files. Aborting backup...')
    clear_old_snapshots()
    delete_local_backup_file(tar_file)
    move_secondary_backup()
    return None

  if storage == StorageTypes.LOCAL_FS:
    logging.info("Done with local db backup!")
    clear_old_snapshots()
    delete_secondary_backup()
    return tar_file
  elif storage == StorageTypes.GCS:
    return_value = path
    # Upload to GCS.
    if not gcs_helper.upload_to_bucket(path, tar_file):
      logging.error("Upload to GCS failed. Aborting backup...")
      move_secondary_backup()
      return_value = None
    else:
      logging.info("Done with db backup!")
      delete_secondary_backup()

    # Remove local backup file(s).
    clear_old_snapshots()
    delete_local_backup_file(tar_file)
    return return_value
Example #3
0
def backup_data(storage, path=''):
    """ Backup Zookeeper directories/files.

  Args:
    storage: A str, one of the StorageTypes class members.
    path: A str, the name of the backup file to be created.
  Returns:
    The path to the backup file on success, None otherwise.
  """
    if storage not in StorageTypes().get_storage_types():
        logging.error("Storage '{0}' not supported.")
        return None

    logging.info("Starting new zk backup.")
    dump_zk(TMP_ZOOKEEPER_BACKUP)

    tar_file = backup_recovery_helper.tar_backup_files(
        [TMP_ZOOKEEPER_BACKUP], ZOOKEEPER_BACKUP_FILE_LOCATION)
    if not tar_file:
        logging.error(
            'Error while tarring up Zookeeper files. Aborting backup...')
        backup_recovery_helper.remove(TMP_ZOOKEEPER_BACKUP)
        backup_recovery_helper.delete_local_backup_file(tar_file)
        backup_recovery_helper.move_secondary_backup(tar_file)
        return None

    if storage == StorageTypes.LOCAL_FS:
        logging.info("Done with local zk backup!")
        backup_recovery_helper.remove(TMP_ZOOKEEPER_BACKUP)
        backup_recovery_helper.\
          delete_secondary_backup(ZOOKEEPER_BACKUP_FILE_LOCATION)
        return tar_file
    elif storage == StorageTypes.GCS:
        return_value = path
        # Upload to GCS.
        if not gcs_helper.upload_to_bucket(path, tar_file):
            logging.error("Upload to GCS failed. Aborting backup...")
            backup_recovery_helper.move_secondary_backup(tar_file)
            return_value = None
        else:
            logging.info("Done with zk backup!")
            backup_recovery_helper.\
              delete_secondary_backup(ZOOKEEPER_BACKUP_FILE_LOCATION)

        # Remove local backup files.
        backup_recovery_helper.remove(TMP_ZOOKEEPER_BACKUP)
        backup_recovery_helper.delete_local_backup_file(tar_file)
        return return_value
Example #4
0
def backup_data(storage, path=''):
  """ Backup Zookeeper directories/files.

  Args:
    storage: A str, one of the StorageTypes class members.
    path: A str, the name of the backup file to be created.
  Returns:
    The path to the backup file on success, None otherwise.
  """
  if storage not in StorageTypes().get_storage_types():
    logging.error("Storage '{0}' not supported.")
    return None

  logging.info("Starting new zk backup.")
  dump_zk(TMP_ZOOKEEPER_BACKUP)

  tar_file = backup_recovery_helper.tar_backup_files([TMP_ZOOKEEPER_BACKUP],
    ZOOKEEPER_BACKUP_FILE_LOCATION)
  if not tar_file:
    logging.error('Error while tarring up Zookeeper files. Aborting backup...')
    backup_recovery_helper.remove(TMP_ZOOKEEPER_BACKUP)
    backup_recovery_helper.delete_local_backup_file(tar_file)
    backup_recovery_helper.move_secondary_backup(tar_file)
    return None

  if storage == StorageTypes.LOCAL_FS:
    logging.info("Done with local zk backup!")
    backup_recovery_helper.remove(TMP_ZOOKEEPER_BACKUP)
    backup_recovery_helper.\
      delete_secondary_backup(ZOOKEEPER_BACKUP_FILE_LOCATION)
    return tar_file
  elif storage == StorageTypes.GCS:
    return_value = path
    # Upload to GCS.
    if not gcs_helper.upload_to_bucket(path, tar_file):
      logging.error("Upload to GCS failed. Aborting backup...")
      backup_recovery_helper.move_secondary_backup(tar_file)
      return_value = None
    else:
      logging.info("Done with zk backup!")
      backup_recovery_helper.\
        delete_secondary_backup(ZOOKEEPER_BACKUP_FILE_LOCATION)

    # Remove local backup files.
    backup_recovery_helper.remove(TMP_ZOOKEEPER_BACKUP)
    backup_recovery_helper.delete_local_backup_file(tar_file)
    return return_value
Example #5
0
def backup_data(storage, path=''):
    """ Backup Zookeeper directories/files.

  Args:
    storage: A str, one of the StorageTypes class members.
    path: A str, the name of the backup file to be created.
  Returns:
    The path to the backup file on success, None otherwise.
  """
    logging.info("Starting new zk backup.")

    # TODO: Tar up zookeeper data.
    files = get_zookeeper_snapshot_file_names()
    if not files:
        logging.error(
            "No Zookeeper files were found to tar up. Aborting backup...")
        return None

    tar_file = tar_backup_files(files)
    if not tar_file:
        logging.error(
            'Error while tarring up snapshot files. Aborting backup...')
        remove_local_backup_file(tar_file)
        return None

    if storage == StorageTypes.LOCAL_FS:
        logging.info("Done with local zk backup!")
        return tar_file
    elif storage == StorageTypes.GCS:
        return_value = path
        # Upload to GCS.
        if not gcs_helper.upload_to_bucket(path, tar_file):
            logging.error("Upload to GCS failed. Aborting backup...")
            return_value = None
        else:
            logging.info("Done with zk backup!")

        # Remove local backup file.
        remove_local_backup_file(tar_file)
        return return_value
    else:
        logging.error("Storage '{0}' not supported.")
        remove_local_backup_file()
        return None
Example #6
0
def backup_data(storage, path=''):
  """ Backup Zookeeper directories/files.

  Args:
    storage: A str, one of the StorageTypes class members.
    path: A str, the name of the backup file to be created.
  Returns:
    The path to the backup file on success, None otherwise.
  """
  logging.info("Starting new zk backup.")

  # TODO: Tar up zookeeper data.
  files = get_zookeeper_snapshot_file_names()
  if not files:
    logging.error("No Zookeeper files were found to tar up. Aborting backup...")
    return None

  tar_file = tar_backup_files(files)
  if not tar_file:
    logging.error('Error while tarring up snapshot files. Aborting backup...')
    remove_local_backup_file(tar_file)
    return None

  if storage == StorageTypes.LOCAL_FS:
    logging.info("Done with local zk backup!")
    return tar_file
  elif storage == StorageTypes.GCS:
    return_value = path
    # Upload to GCS.
    if not gcs_helper.upload_to_bucket(path, tar_file):
      logging.error("Upload to GCS failed. Aborting backup...")
      return_value = None
    else:
      logging.info("Done with zk backup!")

    # Remove local backup file.
    remove_local_backup_file(tar_file)
    return return_value
  else:
    logging.error("Storage '{0}' not supported.")
    remove_local_backup_file()
    return None
Example #7
0
def app_backup(storage, full_bucket_name=None):
    """ Saves the app source code at the backups location on the filesystem.

  Args:
    storage: A str, one of the StorageTypes class members.
    full_bucket_name: A str, the name of the backup file to upload to remote
      storage.
  Returns:
    True on success, False otherwise.
  """
    # Create app backups dir if it doesn't exist.
    if not makedirs(APP_BACKUP_DIR_LOCATION):
        logging.warning(
            "Dir '{0}' already exists. Skipping dir creation...".format(
                APP_BACKUP_DIR_LOCATION))

    for dir_path, _, filenames in os.walk(APP_DIR_LOCATION):
        for filename in filenames:
            # Copy source code tars to backups location.
            source = '{0}/{1}'.format(dir_path, filename)
            destination = '{0}/{1}'.format(APP_BACKUP_DIR_LOCATION, filename)
            try:
                shutil.copy(source, destination)
            except Exception:
                logging.error("Error while backing up '{0}'. ".format(source))
                delete_app_tars(APP_BACKUP_DIR_LOCATION)
                return False

            # Upload to GCS.
            if storage == StorageTypes.GCS:
                source = '{0}/{1}'.format(APP_DIR_LOCATION, filename)
                destination = '{0}/apps/{1}'.format(full_bucket_name, filename)
                logging.debug("Destination: {0}".format(destination))
                if not gcs_helper.upload_to_bucket(destination, source):
                    logging.error(
                        "Error while uploading '{0}' to GCS. ".format(source))
                    delete_app_tars(APP_BACKUP_DIR_LOCATION)
                    return False
    return True
Example #8
0
    def test_upload_to_bucket(self):
        # Suppress logging output.
        flexmock(logging).should_receive('error').and_return()

        # Test with missing local file.
        flexmock(backup_recovery_helper).should_receive('does_file_exist').\
          and_return(False)
        self.assertEquals(
            False, gcs_helper.upload_to_bucket(FakeGCSPath, 'some/file'))

        # Test with invalid GCS object name.
        flexmock(backup_recovery_helper).should_receive('does_file_exist').\
          and_return(True)
        flexmock(gcs_helper).should_receive('extract_gcs_tokens').\
          and_return(('', ''))
        self.assertEquals(
            False, gcs_helper.upload_to_bucket(FakeInvalidGCSPath,
                                               'some/file'))

        # Test with valid GCS object name.
        flexmock(gcs_helper).should_receive(
          'extract_gcs_tokens').with_args(FakeGCSPath).\
          and_return(('foo', 'bar/baz.tar.gz'))
        # Test with successful POST request.
        flexmock(gcs_helper).should_receive('gcs_post_request').\
          and_return(FakeResponse())
        flexmock(gcs_helper).should_receive('gcs_put_request').and_return()
        self.assertEquals(
            True, gcs_helper.upload_to_bucket(FakeGCSPath, 'some/file'))
        # Test with HTTPError from POST request.
        flexmock(gcs_helper).should_receive('gcs_post_request').\
          and_raise(requests.HTTPError)
        self.assertEquals(
            False, gcs_helper.upload_to_bucket(FakeGCSPath, 'some/file'))
        # Test with missing Location from POST response.
        flexmock(gcs_helper).should_receive('gcs_post_request').\
          and_return(FakeInvalidResponse())
        self.assertEquals(
            False, gcs_helper.upload_to_bucket(FakeGCSPath, 'some/file'))

        # Test with HTTPError from PUT request.
        flexmock(gcs_helper).should_receive('gcs_put_request').\
          and_raise(requests.HTTPError)
        self.assertEquals(
            False, gcs_helper.upload_to_bucket(FakeGCSPath, 'some/file'))
def app_backup(storage, full_bucket_name=None):
  """ Saves the app source code at the backups location on the filesystem.

  Args:
    storage: A str, one of the StorageTypes class members.
    full_bucket_name: A str, the name of the backup file to upload to remote
      storage.
  Returns:
    True on success, False otherwise.
  """
  # Create app backups dir if it doesn't exist.
  if not makedirs(APP_BACKUP_DIR_LOCATION):
    logging.warning("Dir '{0}' already exists. Skipping dir creation...".
      format(APP_BACKUP_DIR_LOCATION))

  for dir_path, _, filenames in os.walk(APP_DIR_LOCATION):
    for filename in filenames:
      # Copy source code tars to backups location.
      source = '{0}/{1}'.format(dir_path, filename)
      destination = '{0}/{1}'.format(APP_BACKUP_DIR_LOCATION, filename)
      try:
        shutil.copy(source, destination)
      except Exception:
        logging.error("Error while backing up '{0}'. ".format(source))
        delete_app_tars(APP_BACKUP_DIR_LOCATION)
        return False

      # Upload to GCS.
      if storage == StorageTypes.GCS:
        source = '{0}/{1}'.format(APP_DIR_LOCATION, filename)
        destination = '{0}/apps/{1}'.format(full_bucket_name, filename)
        logging.debug("Destination: {0}".format(destination))
        if not gcs_helper.upload_to_bucket(destination, source):
          logging.error("Error while uploading '{0}' to GCS. ".format(source))
          delete_app_tars(APP_BACKUP_DIR_LOCATION)
          return False
  return True
Example #10
0
  def test_upload_to_bucket(self):
    # Suppress logging output.
    flexmock(logging).should_receive('error').and_return()

    # Test with missing local file.
    flexmock(backup_recovery_helper).should_receive('does_file_exist').\
      and_return(False)
    self.assertEquals(False, gcs_helper.upload_to_bucket(FakeGCSPath,
      'some/file'))

    # Test with invalid GCS object name.
    flexmock(backup_recovery_helper).should_receive('does_file_exist').\
      and_return(True)
    flexmock(gcs_helper).should_receive('extract_gcs_tokens').\
      and_return(('', ''))
    self.assertEquals(False, gcs_helper.upload_to_bucket(FakeInvalidGCSPath,
      'some/file'))

    # Test with valid GCS object name.
    flexmock(gcs_helper).should_receive(
      'extract_gcs_tokens').with_args(FakeGCSPath).\
      and_return(('foo', 'bar/baz.tar.gz'))
    # Test with successful POST request.
    flexmock(gcs_helper).should_receive('gcs_post_request').\
      and_return(FakeResponse())
    flexmock(gcs_helper).should_receive('gcs_put_request').and_return()
    self.assertEquals(True, gcs_helper.upload_to_bucket(FakeGCSPath,
      'some/file'))
    # Test with HTTPError from POST request.
    flexmock(gcs_helper).should_receive('gcs_post_request').\
      and_raise(requests.HTTPError)
    self.assertEquals(False, gcs_helper.upload_to_bucket(FakeGCSPath,
      'some/file'))
    # Test with missing Location from POST response.
    flexmock(gcs_helper).should_receive('gcs_post_request').\
      and_return(FakeInvalidResponse())
    self.assertEquals(False, gcs_helper.upload_to_bucket(FakeGCSPath,
      'some/file'))

    # Test with HTTPError from PUT request.
    flexmock(gcs_helper).should_receive('gcs_put_request').\
      and_raise(requests.HTTPError)
    self.assertEquals(False, gcs_helper.upload_to_bucket(FakeGCSPath,
      'some/file'))
def backup_data(storage, path=''):
  """ Backup Cassandra snapshot data directories/files.

  Args:
    storage: A str, the storage that is used for storing the backup.
    path: A str, the full backup filename path to use for cloud backup.
  Returns:
    The path to the backup file on success, None otherwise.
  """
  if storage not in StorageTypes().get_storage_types():
    logging.error("Storage '{0}' not supported.")
    return None

  logging.info("Starting new db backup.")
  clear_old_snapshots()

  if not create_snapshot():
    logging.error("Failed to create Cassandra snapshots. Aborting backup...")
    return None

  files = backup_recovery_helper.get_snapshot_paths('cassandra')
  if not files:
    logging.error("No Cassandra files were found to tar up. Aborting backup...")
    return None

  if not backup_recovery_helper.enough_disk_space('cassandra'):
    logging.error("There's not enough available space to create another db"
      "backup. Aborting...")
    return None

  tar_file = backup_recovery_helper.tar_backup_files(files,
    CASSANDRA_BACKUP_FILE_LOCATION)
  if not tar_file:
    logging.error('Error while tarring up snapshot files. Aborting backup...')
    clear_old_snapshots()
    backup_recovery_helper.delete_local_backup_file(tar_file)
    backup_recovery_helper.move_secondary_backup(tar_file)
    return None

  if storage == StorageTypes.LOCAL_FS:
    logging.info("Done with local db backup!")
    clear_old_snapshots()
    backup_recovery_helper.\
      delete_secondary_backup(CASSANDRA_BACKUP_FILE_LOCATION)
    return tar_file
  elif storage == StorageTypes.GCS:
    return_value = path
    # Upload to GCS.
    if not gcs_helper.upload_to_bucket(path, tar_file):
      logging.error("Upload to GCS failed. Aborting backup...")
      backup_recovery_helper.move_secondary_backup(tar_file)
      return_value = None
    else:
      logging.info("Done with db backup!")
      backup_recovery_helper.\
        delete_secondary_backup(CASSANDRA_BACKUP_FILE_LOCATION)

    # Remove local backup file(s).
    clear_old_snapshots()
    backup_recovery_helper.delete_local_backup_file(tar_file)
    return return_value