Beispiel #1
0
  def run_defense_work(self, work_id):
    """Runs one defense work.

    Args:
      work_id: ID of the piece of work to run

    Returns:
      elapsed_time_sec, submission_id - elapsed time and id of the submission

    Raises:
      WorkerError: if error occurred during execution.
    """
    class_batch_id = (
        self.defense_work.work[work_id]['output_classification_batch_id'])
    class_batch = self.class_batches.read_batch_from_datastore(class_batch_id)
    adversarial_batch_id = class_batch['adversarial_batch_id']
    submission_id = class_batch['submission_id']
    cloud_result_path = class_batch['result_path']
    logging.info('Defense work piece: '
                 'adversarial_batch_id="%s" submission_id="%s"',
                 adversarial_batch_id, submission_id)
    if submission_id in self.blacklisted_submissions:
      raise WorkerError('Blacklisted submission')
    # get defense
    defense = DefenseSubmission(submission_id, self.submissions,
                                self.storage_bucket)
    defense.download()
    # prepare input - copy adversarial batch locally
    input_dir = os.path.join(LOCAL_INPUT_DIR, adversarial_batch_id)
    if os.path.exists(input_dir):
      sudo_remove_dirtree(input_dir)
    os.makedirs(input_dir)
    try:
      shell_call([
          'gsutil', '-m', 'cp',
          # typical location of adv batch:
          # testing-round/adversarial_images/ADVBATCH000/
          os.path.join('gs://', self.storage_bucket, self.round_name,
                       'adversarial_images', adversarial_batch_id, '*'),
          input_dir
      ])
      adv_images_files = os.listdir(input_dir)
      if (len(adv_images_files) == 1) and adv_images_files[0].endswith('.zip'):
        logging.info('Adversarial batch is in zip archive %s',
                     adv_images_files[0])
        shell_call([
            'unzip', os.path.join(input_dir, adv_images_files[0]),
            '-d', input_dir
        ])
        os.remove(os.path.join(input_dir, adv_images_files[0]))
        adv_images_files = os.listdir(input_dir)
      logging.info('%d adversarial images copied', len(adv_images_files))
    except (subprocess.CalledProcessError, IOError) as e:
      raise WorkerError('Can''t copy adversarial batch locally', e)
    # prepare output directory
    if os.path.exists(LOCAL_OUTPUT_DIR):
      sudo_remove_dirtree(LOCAL_OUTPUT_DIR)
    os.mkdir(LOCAL_OUTPUT_DIR)
    output_filname = os.path.join(LOCAL_OUTPUT_DIR, 'result.csv')
    # run defense
    elapsed_time_sec = defense.run(input_dir, output_filname)
    # evaluate defense result
    batch_result = eval_lib.analyze_one_classification_result(
        storage_client=None,
        file_path=output_filname,
        adv_batch=self.adv_batches.data[adversarial_batch_id],
        dataset_batches=self.dataset_batches,
        dataset_meta=self.dataset_meta)
    # copy result of the defense into storage
    try:
      shell_call([
          'gsutil', 'cp', output_filname,
          os.path.join('gs://', self.storage_bucket, cloud_result_path)
      ])
    except subprocess.CalledProcessError as e:
      raise WorkerError('Can''t result to Cloud Storage', e)
    return elapsed_time_sec, submission_id, batch_result
Beispiel #2
0
  def run_defense_work(self, work_id):
    """Runs one defense work.

    Args:
      work_id: ID of the piece of work to run

    Returns:
      elapsed_time_sec, submission_id - elapsed time and id of the submission

    Raises:
      WorkerError: if error occurred during execution.
    """
    class_batch_id = (
        self.defense_work.work[work_id]['output_classification_batch_id'])
    class_batch = self.class_batches.read_batch_from_datastore(class_batch_id)
    adversarial_batch_id = class_batch['adversarial_batch_id']
    submission_id = class_batch['submission_id']
    cloud_result_path = class_batch['result_path']
    logging.info('Defense work piece: '
                 'adversarial_batch_id="%s" submission_id="%s"',
                 adversarial_batch_id, submission_id)
    if submission_id in self.blacklisted_submissions:
      raise WorkerError('Blacklisted submission')
    # get defense
    defense = DefenseSubmission(submission_id, self.submissions,
                                self.storage_bucket)
    defense.download()
    # prepare input - copy adversarial batch locally
    input_dir = os.path.join(LOCAL_INPUT_DIR, adversarial_batch_id)
    if os.path.exists(input_dir):
      sudo_remove_dirtree(input_dir)
    os.makedirs(input_dir)
    try:
      shell_call([
          'gsutil', '-m', 'cp',
          # typical location of adv batch:
          # testing-round/adversarial_images/ADVBATCH000/
          os.path.join('gs://', self.storage_bucket, self.round_name,
                       'adversarial_images', adversarial_batch_id, '*'),
          input_dir
      ])
      adv_images_files = os.listdir(input_dir)
      if (len(adv_images_files) == 1) and adv_images_files[0].endswith('.zip'):
        logging.info('Adversarial batch is in zip archive %s',
                     adv_images_files[0])
        shell_call([
            'unzip', os.path.join(input_dir, adv_images_files[0]),
            '-d', input_dir
        ])
        os.remove(os.path.join(input_dir, adv_images_files[0]))
        adv_images_files = os.listdir(input_dir)
      logging.info('%d adversarial images copied', len(adv_images_files))
    except (subprocess.CalledProcessError, IOError) as e:
      raise WorkerError('Can''t copy adversarial batch locally', e)
    # prepare output directory
    if os.path.exists(LOCAL_OUTPUT_DIR):
      sudo_remove_dirtree(LOCAL_OUTPUT_DIR)
    os.mkdir(LOCAL_OUTPUT_DIR)
    output_filname = os.path.join(LOCAL_OUTPUT_DIR, 'result.csv')
    # run defense
    elapsed_time_sec = defense.run(input_dir, output_filname)
    # evaluate defense result
    batch_result = eval_lib.analyze_one_classification_result(
        storage_client=None,
        file_path=output_filname,
        adv_batch=self.adv_batches.data[adversarial_batch_id],
        dataset_batches=self.dataset_batches,
        dataset_meta=self.dataset_meta)
    # copy result of the defense into storage
    try:
      shell_call([
          'gsutil', 'cp', output_filname,
          os.path.join('gs://', self.storage_bucket, cloud_result_path)
      ])
    except subprocess.CalledProcessError as e:
      raise WorkerError('Can''t result to Cloud Storage', e)
    return elapsed_time_sec, submission_id, batch_result