コード例 #1
0
  def DoTestAuthorize(self, specified_project=None):
    # Randomly pick 1 of 1000 key names.
    key_name = testcase.KmsTestingResources.MUTABLE_KEY_NAME_TEMPLATE % (
        randint(0, 9), randint(0, 9), randint(0, 9))
    # Make sure the key with that name has been created.
    key_fqn = self.kms_api.CreateCryptoKey(self.keyring_fqn, key_name)
    # They key may have already been created and used in a previous test
    # invocation; make sure it doesn't contain the IAM policy binding that
    # allows our project to encrypt/decrypt with it.
    key_policy = self.kms_api.GetKeyIamPolicy(key_fqn)
    while key_policy.bindings:
      key_policy.bindings.pop()
    self.kms_api.SetKeyIamPolicy(key_fqn, key_policy)
    # Set up the authorize command tokens.
    authorize_cmd = ['kms', 'authorize', '-k', key_fqn]
    if specified_project:
      authorize_cmd.extend(['-p', specified_project])

    stdout1 = self.RunGsUtil(authorize_cmd, return_stdout=True)
    stdout2 = self.RunGsUtil(authorize_cmd, return_stdout=True)

    self.assertIn(
        'Authorized project %s to encrypt and decrypt with key:\n%s' %
        (PopulateProjectId(None), key_fqn), stdout1)
    self.assertIn(
        ('Project %s was already authorized to encrypt and decrypt with '
         'key:\n%s.' % (PopulateProjectId(None), key_fqn)), stdout2)
コード例 #2
0
 def test_create_with_k_flag_p_flag_not_authorized(self):
     bucket_name = self.MakeTempName('bucket')
     bucket_uri = boto.storage_uri('gs://%s' % (bucket_name.lower()),
                                   suppress_consec_slashes=False)
     key = self.GetKey()
     stderr = self.RunGsUtil([
         'mb', '-l', testcase.KmsTestingResources.KEYRING_LOCATION, '-k',
         key, '-p',
         PopulateProjectId(),
         suri(bucket_uri)
     ],
                             return_stderr=True,
                             expected_status=1)
     self.assertIn('To authorize, run:', stderr)
     self.assertIn('-p %s' % PopulateProjectId(), stderr)
コード例 #3
0
ファイル: test.py プロジェクト: unhooked/gsutil
def CreateTestProcesses(parallel_tests,
                        test_index,
                        process_list,
                        process_done,
                        max_parallel_tests,
                        root_coverage_file=None):
    """Creates test processes to run tests in parallel.

  Args:
    parallel_tests: List of all parallel tests.
    test_index: List index of last created test before this function call.
    process_list: List of running subprocesses. Created processes are appended
                  to this list.
    process_done: List of booleans indicating process completion. One 'False'
                  will be added per process created.
    max_parallel_tests: Maximum number of tests to run in parallel.
    root_coverage_file: The root .coverage filename if coverage is requested.

  Returns:
    Index of last created test.
  """
    orig_test_index = test_index
    executable_prefix = [sys.executable
                         ] if sys.executable and IS_WINDOWS else []
    s3_argument = ['-s'] if tests.util.RUN_S3_TESTS else []
    project_id_arg = []
    try:
        project_id_arg = [
            '-o', 'GSUtil:default_project_id=%s' % PopulateProjectId()
        ]
    except ProjectIdException:
        # If we don't have a project ID, unit tests should still be able to pass.
        pass

    process_create_start_time = time.time()
    last_log_time = process_create_start_time
    while (CountFalseInList(process_done) < max_parallel_tests
           and test_index < len(parallel_tests)):
        env = os.environ.copy()
        if root_coverage_file:
            env['GSUTIL_COVERAGE_OUTPUT_FILE'] = root_coverage_file
        process_list.append(
            subprocess.Popen(
                executable_prefix + [gslib.GSUTIL_PATH] + project_id_arg +
                ['test'] + s3_argument + ['--' + _SEQUENTIAL_ISOLATION_FLAG] +
                [parallel_tests[test_index][len('gslib.tests.test_'):]],
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                env=env))
        test_index += 1
        process_done.append(False)
        if time.time() - last_log_time > 5:
            print('Created %d new processes (total %d/%d created)' %
                  (test_index - orig_test_index, len(process_list),
                   len(parallel_tests)))
            last_log_time = time.time()
    if test_index == len(parallel_tests):
        print('Test process creation finished (%d/%d created)' %
              (len(process_list), len(parallel_tests)))
    return test_index
コード例 #4
0
ファイル: test_iam.py プロジェクト: Akiho-Yasuda/wip
  def setUp(self):
    super(TestIamSet, self).setUp()

    self.public_bucket_read_binding = gen_binding(IAM_BUCKET_READ_ROLE)
    self.public_object_read_binding = gen_binding(IAM_OBJECT_READ_ROLE)
    self.project_viewer_objectviewer_with_cond_binding = gen_binding(
        IAM_OBJECT_VIEWER_ROLE,
        # Note: We use projectViewer:some-project-id here because conditions
        # cannot be applied to a binding that only has allUsers in the members
        # list; the API gives back a 400 error if you try.
        members=['projectViewer:%s' % PopulateProjectId()],
        condition={
            'title': TEST_CONDITION_TITLE,
            'description': TEST_CONDITION_DESCRIPTION,
            'expression': TEST_CONDITION_EXPR_RESOURCE_IS_OBJECT,
        })

    self.bucket = self.CreateBucket()
    self.versioned_bucket = self.CreateVersionedBucket()

    # Create a bucket to fetch its policy, used as a base for other policies.
    self.bucket_iam_string = self.RunGsUtil(
        ['iam', 'get', self.bucket.uri], return_stdout=True)
    self.old_bucket_iam_path = self.CreateTempFile(
        contents=self.bucket_iam_string)

    # Using the existing bucket's policy, make an altered policy that allows
    # allUsers to be "legacyBucketReader"s. Some tests will later apply this
    # policy.
    self.new_bucket_iam_policy = self._patch_binding(
        json.loads(self.bucket_iam_string),
        IAM_BUCKET_READ_ROLE,
        self.public_bucket_read_binding)
    self.new_bucket_iam_path = self.CreateTempFile(
        contents=json.dumps(self.new_bucket_iam_policy))

    # Using the existing bucket's policy, make an altered policy that contains
    # a binding with a condition in it. Some tests will later apply this policy.
    self.new_bucket_policy_with_conditions_policy = json.loads(
        self.bucket_iam_string)
    self.new_bucket_policy_with_conditions_policy['bindings'].append(
        self.project_viewer_objectviewer_with_cond_binding[0])
    self.new_bucket_policy_with_conditions_path = self.CreateTempFile(
        contents=json.dumps(self.new_bucket_policy_with_conditions_policy))

    # Create an object to fetch its policy, used as a base for other policies.
    tmp_object = self.CreateObject(contents='foobar')
    self.object_iam_string = self.RunGsUtil(
        ['iam', 'get', tmp_object.uri], return_stdout=True)
    self.old_object_iam_path = self.CreateTempFile(
        contents=self.object_iam_string)

    # Using the existing object's policy, make an altered policy that allows
    # allUsers to be "legacyObjectReader"s. Some tests will later apply this
    # policy.
    self.new_object_iam_policy = self._patch_binding(
        json.loads(self.object_iam_string), IAM_OBJECT_READ_ROLE,
        self.public_object_read_binding)
    self.new_object_iam_path = self.CreateTempFile(
        contents=json.dumps(self.new_object_iam_policy))
コード例 #5
0
  def _GatherSubOptions(self, subcommand_name):
    self.CheckArguments()
    self.clear_kms_key = False
    self.kms_key = None
    self.warn_on_key_authorize_failure = False

    if self.sub_opts:
      for o, a in self.sub_opts:
        if o == '-p':
          self.project_id = a
        elif o == '-k':
          self.kms_key = a
          ValidateCMEK(self.kms_key)
        elif o == '-d':
          self.clear_kms_key = True
        elif o == '-w':
          self.warn_on_key_authorize_failure = True

    if self.warn_on_key_authorize_failure and (
        self.subcommand_name != 'encryption' or not self.kms_key):
      raise CommandException('\n'.join(
          textwrap.wrap(
              'The "-w" option should only be specified for the "encryption" '
              'subcommand and must be used with the "-k" option.')))
    # Determine the project (used in the serviceaccount and authorize
    # subcommands), either from the "-p" option's value or the default specified
    # in the user's Boto config file.
    if not self.project_id:
      self.project_id = PopulateProjectId(None)
コード例 #6
0
  def RunGsUtil(self, cmd, return_status=False,
                return_stdout=False, return_stderr=False,
                expected_status=0, stdin=None, env_vars=None):
    """Runs the gsutil command.

    Args:
      cmd: The command to run, as a list, e.g. ['cp', 'foo', 'bar']
      return_status: If True, the exit status code is returned.
      return_stdout: If True, the standard output of the command is returned.
      return_stderr: If True, the standard error of the command is returned.
      expected_status: The expected return code. If not specified, defaults to
                       0. If the return code is a different value, an exception
                       is raised.
      stdin: A string of data to pipe to the process as standard input.
      env_vars: A dictionary of variables to extend the subprocess's os.environ
                with.

    Returns:
      If multiple return_* values were specified, this method returns a tuple
      containing the desired return values specified by the return_* arguments
      (in the order those parameters are specified in the method definition).
      If only one return_* value was specified, that value is returned directly
      rather than being returned within a 1-tuple.
    """
    cmd = ([gslib.GSUTIL_PATH] + ['--testexceptiontraces'] +
           ['-o', 'GSUtil:default_project_id=' + PopulateProjectId()] +
           cmd)
    if IS_WINDOWS:
      cmd = [sys.executable] + cmd
    env = os.environ.copy()
    if env_vars:
      env.update(env_vars)
    p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                         stdin=subprocess.PIPE, env=env)
    (stdout, stderr) = p.communicate(stdin)
    status = p.returncode

    if expected_status is not None:
      self.assertEqual(
          status, expected_status,
          msg='Expected status %d, got %d.\nCommand:\n%s\n\nstderr:\n%s' % (
              expected_status, status, ' '.join(cmd), stderr))

    toreturn = []
    if return_status:
      toreturn.append(status)
    if return_stdout:
      if IS_WINDOWS:
        stdout = stdout.replace('\r\n', '\n')
      toreturn.append(stdout)
    if return_stderr:
      if IS_WINDOWS:
        stderr = stderr.replace('\r\n', '\n')
      toreturn.append(stderr)

    if len(toreturn) == 1:
      return toreturn[0]
    elif toreturn:
      return tuple(toreturn)
コード例 #7
0
ファイル: integration_testcase.py プロジェクト: pondix/gsutil
    def RunGsUtil(self,
                  cmd,
                  return_status=False,
                  return_stdout=False,
                  return_stderr=False,
                  expected_status=0,
                  stdin=None):
        """Runs the gsutil command.

    Args:
      cmd: The command to run, as a list, e.g. ['cp', 'foo', 'bar']
      return_status: If True, the exit status code is returned.
      return_stdout: If True, the standard output of the command is returned.
      return_stderr: If True, the standard error of the command is returned.
      expected_status: The expected return code. If not specified, defaults to
                       0. If the return code is a different value, an exception
                       is raised.
      stdin: A string of data to pipe to the process as standard input.

    Returns:
      A tuple containing the desired return values specified by the return_*
      arguments.
    """
        cmd = ([gslib.GSUTIL_PATH] + ['--testexceptiontraces'] +
               ['-o', 'GSUtil:default_project_id=' + PopulateProjectId()] +
               cmd)
        if IS_WINDOWS:
            cmd = [sys.executable] + cmd
        p = subprocess.Popen(cmd,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE,
                             stdin=subprocess.PIPE)
        (stdout, stderr) = p.communicate(stdin)
        status = p.returncode

        if expected_status is not None:
            self.assertEqual(
                status,
                expected_status,
                msg='Expected status %d, got %d.\nCommand:\n%s\n\nstderr:\n%s'
                % (expected_status, status, ' '.join(cmd), stderr))

        toreturn = []
        if return_status:
            toreturn.append(status)
        if return_stdout:
            if IS_WINDOWS:
                stdout = stdout.replace('\r\n', '\n')
            toreturn.append(stdout)
        if return_stderr:
            if IS_WINDOWS:
                stderr = stderr.replace('\r\n', '\n')
            toreturn.append(stderr)

        if len(toreturn) == 1:
            return toreturn[0]
        elif toreturn:
            return tuple(toreturn)
コード例 #8
0
  def CreateBucket(self, bucket_name=None, test_objects=0, storage_class=None,
                   provider=None, prefer_json_api=False):
    """Creates a test bucket.

    The bucket and all of its contents will be deleted after the test.

    Args:
      bucket_name: Create the bucket with this name. If not provided, a
                   temporary test bucket name is constructed.
      test_objects: The number of objects that should be placed in the bucket.
                    Defaults to 0.
      storage_class: storage class to use. If not provided we us standard.
      provider: Provider to use - either "gs" (the default) or "s3".
      prefer_json_api: If true, use the JSON creation functions where possible.

    Returns:
      StorageUri for the created bucket.
    """
    if not provider:
      provider = self.default_provider

    if prefer_json_api and provider == 'gs':
      json_bucket = self.CreateBucketJson(bucket_name=bucket_name,
                                          test_objects=test_objects,
                                          storage_class=storage_class)
      bucket_uri = boto.storage_uri(
          'gs://%s' % json_bucket.name.encode(UTF8).lower(),
          suppress_consec_slashes=False)
      self.bucket_uris.append(bucket_uri)
      return bucket_uri

    bucket_name = bucket_name or self.MakeTempName('bucket')

    bucket_uri = boto.storage_uri('%s://%s' % (provider, bucket_name.lower()),
                                  suppress_consec_slashes=False)

    if provider == 'gs':
      # Apply API version and project ID headers if necessary.
      headers = {'x-goog-api-version': self.api_version}
      headers[GOOG_PROJ_ID_HDR] = PopulateProjectId()
    else:
      headers = {}

    # Parallel tests can easily run into bucket creation quotas.
    # Retry with exponential backoff so that we create them as fast as we
    # reasonably can.
    @Retry(StorageResponseError, tries=7, timeout_secs=1)
    def _CreateBucketWithExponentialBackoff():
      bucket_uri.create_bucket(storage_class=storage_class, headers=headers)

    _CreateBucketWithExponentialBackoff()
    self.bucket_uris.append(bucket_uri)
    for i in range(test_objects):
      self.CreateObject(bucket_uri=bucket_uri,
                        object_name=self.MakeTempName('obj'),
                        contents='test %d' % i)
    return bucket_uri
コード例 #9
0
  def _RegisterDefaultTopicCreation(self, bucket_name):
    """Records the name of a topic we expect to create, for cleanup."""
    if self.test_api == ApiSelector.XML:
      return unittest.skip('Notifications only work with the JSON API.')

    expected_topic_name = 'projects/%s/topics/%s' % (
        PopulateProjectId(None), bucket_name)
    self.created_topic = expected_topic_name
    return expected_topic_name
コード例 #10
0
 def setUp(self):
   super(TestKmsSuccessCases, self).setUp()
   # Make sure our keyRing exists (only needs to be done once, but subsequent
   # attempts will receive a 409 and be treated as a success). Save the fully
   # qualified name for use with creating keys later.
   self.keyring_fqn = self.kms_api.CreateKeyRing(
       PopulateProjectId(None),
       testcase.KmsTestingResources.KEYRING_NAME,
       location=testcase.KmsTestingResources.KEYRING_LOCATION)
コード例 #11
0
ファイル: hmac.py プロジェクト: Guliux10/bchacks_deepbreath
    def RunCommand(self):
        """Command entry point for the hmac command."""

        if self.gsutil_api.GetApiSelector(provider='gs') != ApiSelector.JSON:
            raise CommandException(
                'The "hmac" command can only be used with the GCS JSON API')

        self.action_subcommand = self.args.pop(0)
        self.ParseSubOpts(check_args=True)
        # Commands with both suboptions and subcommands need to reparse for
        # suboptions, so we log again.
        LogCommandParams(sub_opts=self.sub_opts)

        self.service_account_email = None
        self.state = None
        self.show_all = False
        self.long_list = False
        self.etag = None

        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-u':
                    self.service_account_email = a
                elif o == '-p':
                    # Project IDs are sent as header values when using gs and s3 XML APIs.
                    InsistAscii(
                        a, 'Invalid non-ASCII character found in project ID')
                    self.project_id = a
                elif o == '-s':
                    self.state = a
                elif o == '-a':
                    self.show_all = True
                elif o == '-l':
                    self.long_list = True
                elif o == '-e':
                    self.etag = a

        if not self.project_id:
            self.project_id = PopulateProjectId(None)

        method_for_arg = {
            'create': self._CreateHmacKey,
            'delete': self._DeleteHmacKey,
            'get': self._GetHmacKey,
            'list': self._ListHmacKeys,
            'update': self._UpdateHmacKey,
        }
        if self.action_subcommand not in method_for_arg:
            raise CommandException(
                'Invalid subcommand "%s" for the %s command.\n'
                'See "gsutil help hmac".' %
                (self.action_subcommand, self.command_name))

        LogCommandParams(subcommands=[self.action_subcommand])
        method_for_arg[self.action_subcommand]()

        return 0
コード例 #12
0
class TestAclBase(testcase.GsUtilIntegrationTestCase):
    """Integration test case base class for acl command."""

    _set_acl_prefix = ['acl', 'set']
    _get_acl_prefix = ['acl', 'get']
    _set_defacl_prefix = ['defacl', 'set']
    _ch_acl_prefix = ['acl', 'ch']

    _project_team = 'owners'
    _project_test_acl = '%s-%s' % (_project_team, PopulateProjectId())
コード例 #13
0
 def authorize_project_to_use_testing_kms_key(
     self, key_name=testcase.KmsTestingResources.CONSTANT_KEY_NAME):
   # Make sure our keyRing and cryptoKey exist.
   keyring_fqn = self.kms_api.CreateKeyRing(
       PopulateProjectId(None), testcase.KmsTestingResources.KEYRING_NAME,
       location=testcase.KmsTestingResources.KEYRING_LOCATION)
   key_fqn = self.kms_api.CreateCryptoKey(keyring_fqn, key_name)
   # Make sure that the service account for our default project is authorized
   # to use our test KMS key.
   self.RunGsUtil(['kms', 'authorize', '-k', key_fqn])
   return key_fqn
コード例 #14
0
 def set_default_kms_key_on_bucket(self, bucket_uri):
   # Make sure our keyRing and cryptoKey exist.
   keyring_fqn = self.kms_api.CreateKeyRing(
       PopulateProjectId(None), testcase.KmsTestingResources.KEYRING_NAME,
       location=testcase.KmsTestingResources.KEYRING_LOCATION)
   key_fqn = self.kms_api.CreateCryptoKey(
       keyring_fqn, testcase.KmsTestingResources.CONSTANT_KEY_NAME)
   # Make sure that the service account for the desired bucket's parent project
   # is authorized to encrypt with the key above.
   self.RunGsUtil(['kms', 'encryption', '-k', key_fqn, suri(bucket_uri)])
   return key_fqn
コード例 #15
0
 def setUp(self):
     super(TestRequesterPays, self).setUp()
     self.non_requester_pays_bucket_uri = self.CreateBucket()
     self.requester_pays_bucket_uri = self.CreateBucket()
     self._set_requester_pays(self.requester_pays_bucket_uri)
     self.non_requester_pays_object_uri = self.CreateObject(
         bucket_uri=self.non_requester_pays_bucket_uri,
         contents=OBJECT_CONTENTS)
     self.requester_pays_object_uri = self.CreateObject(
         bucket_uri=self.requester_pays_bucket_uri,
         contents=OBJECT_CONTENTS)
     self.user_project_flag = ['-u', PopulateProjectId()]
コード例 #16
0
    def test_get(self):
        access_id = self.CreateHelper(ALT_SERVICE_ACCOUNT)
        stdout = self.RunGsUtil(['hmac', 'get', access_id], return_stdout=True)

        try:
            self.AssertKeyMetadataMatches(
                stdout,
                access_id=access_id,
                service_account=ALT_SERVICE_ACCOUNT,
                project=PopulateProjectId(None),
            )
        finally:
            self.CleanupHelper(access_id)
コード例 #17
0
ファイル: kms.py プロジェクト: rgaritta/Art-Roulette
    def _GatherSubOptions(self):
        self.CheckArguments()
        self.clear_kms_key = False
        self.kms_key = None

        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-p':
                    self.project_id = a
                elif o == '-k':
                    self.kms_key = a
                    ValidateCMEK(self.kms_key)
                elif o == '-d':
                    self.clear_kms_key = True
        # Determine the project (used in the serviceaccount and authorize
        # subcommands), either from the "-p" option's value or the default specified
        # in the user's Boto config file.
        if not self.project_id:
            self.project_id = PopulateProjectId(None)
コード例 #18
0
 def GetKey(self, mutable=False):
     # Make sure our keyRing exists (only needs to be done once, but subsequent
     # attempts will receive a 409 and be treated as a success).
     keyring_fqn = self.kms_api.CreateKeyRing(
         PopulateProjectId(None),
         testcase.KmsTestingResources.KEYRING_NAME,
         location=testcase.KmsTestingResources.KEYRING_LOCATION)
     key_name = testcase.KmsTestingResources.CONSTANT_KEY_NAME_DO_NOT_AUTHORIZE
     if mutable:
         # Randomly pick 1 of 1000 key names.
         key_name = testcase.KmsTestingResources.MUTABLE_KEY_NAME_TEMPLATE % (
             randint(0, 9), randint(0, 9), randint(0, 9))
     # Make sure the key with that name has been created.
     key_fqn = self.kms_api.CreateCryptoKey(keyring_fqn, key_name)
     # The key may have already been created and used in a previous test
     # invocation; make sure it doesn't contain the IAM policy binding that
     # allows our project to encrypt/decrypt with it.
     key_policy = self.kms_api.GetKeyIamPolicy(key_fqn)
     if key_policy.bindings:
         key_policy.bindings = []
         self.kms_api.SetKeyIamPolicy(key_fqn, key_policy)
     return key_fqn
コード例 #19
0
  def _ServiceAccount(self):
    self.CheckArguments()
    if not self.args:
      self.args = ['gs://']
    if self.sub_opts:
      for o, a in self.sub_opts:
        if o == '-p':
          self.project_id = a

    if not self.project_id:
      self.project_id = PopulateProjectId(None)

    # Request the service account for that project; this might create the
    # service account if it doesn't already exist.
    self.logger.debug('Checking service account for project %s',
                      self.project_id)

    service_account = self.gsutil_api.GetProjectServiceAccount(
        self.project_id, provider='gs').email_address

    print(service_account)

    return 0
コード例 #20
0
  def RunGsUtil(self, cmd, return_status=False,
                return_stdout=False, return_stderr=False,
                expected_status=0, stdin=None, env_vars=None):
    """Runs the gsutil command.

    Args:
      cmd: The command to run, as a list, e.g. ['cp', 'foo', 'bar']
      return_status: If True, the exit status code is returned.
      return_stdout: If True, the standard output of the command is returned.
      return_stderr: If True, the standard error of the command is returned.
      expected_status: The expected return code. If not specified, defaults to
                       0. If the return code is a different value, an exception
                       is raised.
      stdin: A string of data to pipe to the process as standard input.
      env_vars: A dictionary of variables to extend the subprocess's os.environ
                with.

    Returns:
      If multiple return_* values were specified, this method returns a tuple
      containing the desired return values specified by the return_* arguments
      (in the order those parameters are specified in the method definition).
      If only one return_* value was specified, that value is returned directly
      rather than being returned within a 1-tuple.
    """
    cmd = [
        gslib.GSUTIL_PATH,
        '--testexceptiontraces',
        '-o',
        'GSUtil:default_project_id=' + PopulateProjectId()
    ] + cmd
    if stdin is not None:
      if six.PY3:
        if not isinstance(stdin, bytes):
          stdin = stdin.encode(UTF8)
      else:
        stdin = stdin.encode(UTF8)
    # checking to see if test was invoked from a par file (bundled archive)
    # if not, add python executable path to ensure correct version of python
    # is used for testing
    cmd = [str(sys.executable)] + cmd if not InvokedFromParFile() else cmd
    env = os.environ.copy()
    if env_vars:
      env.update(env_vars)
    # Ensuring correct text types
    envstr = dict()
    for k, v in six.iteritems(env):
      envstr[six.ensure_str(k)] = six.ensure_str(v)
    cmd = [six.ensure_str(part) for part in cmd]
    # executing command
    p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE, stdin=subprocess.PIPE, env=envstr)
    c_out = p.communicate(stdin)
    try:
      c_out = [six.ensure_text(output) for output in c_out]
    except UnicodeDecodeError:
      c_out = [six.ensure_text(output, locale.getpreferredencoding(False))
               for output in c_out]
    stdout = c_out[0].replace(os.linesep, '\n')
    stderr = c_out[1].replace(os.linesep, '\n')
    status = p.returncode

    if expected_status is not None:
      cmd = map(six.ensure_text, cmd)
      self.assertEqual(
        int(status), int(expected_status),
        msg='Expected status {}, got {}.\nCommand:\n{}\n\nstderr:\n{}'.format(
          expected_status, status, ' '.join(cmd), stderr))

    toreturn = []
    if return_status:
      toreturn.append(status)
    if return_stdout:
      toreturn.append(stdout)
    if return_stderr:
      toreturn.append(stderr)

    if len(toreturn) == 1:
      return toreturn[0]
    elif toreturn:
      return tuple(toreturn)
コード例 #21
0
ファイル: test.py プロジェクト: Guliux10/bchacks_deepbreath
def CreateTestProcesses(parallel_tests,
                        test_index,
                        process_list,
                        process_done,
                        max_parallel_tests,
                        root_coverage_file=None):
    """Creates test processes to run tests in parallel.

  Args:
    parallel_tests: List of all parallel tests.
    test_index: List index of last created test before this function call.
    process_list: List of running subprocesses. Created processes are appended
                  to this list.
    process_done: List of booleans indicating process completion. One 'False'
                  will be added per process created.
    max_parallel_tests: Maximum number of tests to run in parallel.
    root_coverage_file: The root .coverage filename if coverage is requested.

  Returns:
    Index of last created test.
  """
    orig_test_index = test_index
    # checking to see if test was invoked from a par file (bundled archive)
    # if not, add python executable path to ensure correct version of python
    # is used for testing
    executable_prefix = [sys.executable] if not InvokedFromParFile() else []
    s3_argument = ['-s'] if tests.util.RUN_S3_TESTS else []
    multiregional_buckets = ['-b'
                             ] if tests.util.USE_MULTIREGIONAL_BUCKETS else []
    project_id_arg = []
    try:
        project_id_arg = [
            '-o', 'GSUtil:default_project_id=%s' % PopulateProjectId()
        ]
    except ProjectIdException:
        # If we don't have a project ID, unit tests should still be able to pass.
        pass

    process_create_start_time = time.time()
    last_log_time = process_create_start_time
    while (CountFalseInList(process_done) < max_parallel_tests
           and test_index < len(parallel_tests)):
        env = os.environ.copy()
        if root_coverage_file:
            env['GSUTIL_COVERAGE_OUTPUT_FILE'] = root_coverage_file
        envstr = dict()
        # constructing command list and ensuring each part is str
        cmd = [
            six.ensure_str(part) for part in list(
                executable_prefix +
                [gslib.GSUTIL_PATH] +
                project_id_arg +
                ['test'] +
                s3_argument +
                multiregional_buckets +
                ['--' + _SEQUENTIAL_ISOLATION_FLAG] +
                [parallel_tests[test_index][len('gslib.tests.test_'):]]
            )
        ]  # yapf: disable
        for k, v in six.iteritems(env):
            envstr[six.ensure_str(k)] = six.ensure_str(v)
        process_list.append(
            subprocess.Popen(cmd,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE,
                             env=envstr))
        test_index += 1
        process_done.append(False)
        if time.time() - last_log_time > 5:
            print(('Created %d new processes (total %d/%d created)' %
                   (test_index - orig_test_index, len(process_list),
                    len(parallel_tests))))
            last_log_time = time.time()
    if test_index == len(parallel_tests):
        print(('Test process creation finished (%d/%d created)' %
               (len(process_list), len(parallel_tests))))
    return test_index
コード例 #22
0
    def _Create(self):
        self.CheckArguments()

        # User-specified options
        pubsub_topic = None
        payload_format = None
        custom_attributes = {}
        event_types = []
        object_name_prefix = None
        should_setup_topic = True

        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-e':
                    event_types.append(a)
                elif o == '-f':
                    payload_format = a
                elif o == '-m':
                    if ':' not in a:
                        raise CommandException(
                            'Custom attributes specified with -m should be of the form '
                            'key:value')
                    key, value = a.split(':')
                    custom_attributes[key] = value
                elif o == '-p':
                    object_name_prefix = a
                elif o == '-s':
                    should_setup_topic = False
                elif o == '-t':
                    pubsub_topic = a

        if payload_format not in PAYLOAD_FORMAT_MAP:
            raise CommandException(
                "Must provide a payload format with -f of either 'json' or 'none'"
            )
        payload_format = PAYLOAD_FORMAT_MAP[payload_format]

        bucket_arg = self.args[-1]

        bucket_url = StorageUrlFromString(bucket_arg)
        if not bucket_url.IsCloudUrl() or not bucket_url.IsBucket():
            raise CommandException(
                "%s %s requires a GCS bucket name, but got '%s'" %
                (self.command_name, self.subcommand_name, bucket_arg))
        if bucket_url.scheme != 'gs':
            raise CommandException(
                'The %s command can only be used with gs:// bucket URLs.' %
                self.command_name)
        bucket_name = bucket_url.bucket_name
        self.logger.debug('Creating notification for bucket %s', bucket_url)

        # Find the project this bucket belongs to
        bucket_metadata = self.gsutil_api.GetBucket(bucket_name,
                                                    fields=['projectNumber'],
                                                    provider=bucket_url.scheme)
        bucket_project_number = bucket_metadata.projectNumber

        # If not specified, choose a sensible default for the Cloud Pub/Sub topic
        # name.
        if not pubsub_topic:
            pubsub_topic = 'projects/%s/topics/%s' % (PopulateProjectId(None),
                                                      bucket_name)
        if not pubsub_topic.startswith('projects/'):
            # If a user picks a topic ID (mytopic) but doesn't pass the whole name (
            # projects/my-project/topics/mytopic ), pick a default project.
            pubsub_topic = 'projects/%s/topics/%s' % (PopulateProjectId(None),
                                                      pubsub_topic)
        self.logger.debug('Using Cloud Pub/Sub topic %s', pubsub_topic)

        just_modified_topic_permissions = False
        if should_setup_topic:
            # Ask GCS for the email address that represents GCS's permission to
            # publish to a Cloud Pub/Sub topic from this project.
            service_account = self.gsutil_api.GetProjectServiceAccount(
                bucket_project_number,
                provider=bucket_url.scheme).email_address
            self.logger.debug('Service account for project %d: %s',
                              bucket_project_number, service_account)
            just_modified_topic_permissions = self._CreateTopic(
                pubsub_topic, service_account)

        for attempt_number in range(0, 2):
            try:
                create_response = self.gsutil_api.CreateNotificationConfig(
                    bucket_name,
                    pubsub_topic=pubsub_topic,
                    payload_format=payload_format,
                    custom_attributes=custom_attributes,
                    event_types=event_types if event_types else None,
                    object_name_prefix=object_name_prefix,
                    provider=bucket_url.scheme)
                break
            except PublishPermissionDeniedException:
                if attempt_number == 0 and just_modified_topic_permissions:
                    # If we have just set the IAM policy, it may take up to 10 seconds to
                    # take effect.
                    self.logger.info(
                        'Retrying create notification in 10 seconds '
                        '(new permissions may take up to 10 seconds to take effect.)'
                    )
                    time.sleep(10)
                else:
                    raise

        notification_name = 'projects/_/buckets/%s/notificationConfigs/%s' % (
            bucket_name, create_response.id)
        self.logger.info('Created notification config %s', notification_name)

        return 0
コード例 #23
0
 def testKmsServiceaccountWithProjectOption(self):
   self.DoTestServiceaccount(specified_project=PopulateProjectId(None))
コード例 #24
0
 def testKmsAuthorizeWithProjectOption(self):
   self.DoTestAuthorize(specified_project=PopulateProjectId(None))
コード例 #25
0
  def CreateBucket(self,
                   bucket_name=None,
                   test_objects=0,
                   storage_class=None,
                   retention_policy=None,
                   provider=None,
                   prefer_json_api=False,
                   versioning_enabled=False,
                   bucket_policy_only=False):
    """Creates a test bucket.

    The bucket and all of its contents will be deleted after the test.

    Args:
      bucket_name: Create the bucket with this name. If not provided, a
                   temporary test bucket name is constructed.
      test_objects: The number of objects that should be placed in the bucket.
                    Defaults to 0.
      storage_class: Storage class to use. If not provided we us standard.
      retention_policy: Retention policy to be used on the bucket.
      provider: Provider to use - either "gs" (the default) or "s3".
      prefer_json_api: If True, use the JSON creation functions where possible.
      versioning_enabled: If True, set the bucket's versioning attribute to
          True.
      bucket_policy_only: If True, set the bucket's iamConfiguration's
          bucketPolicyOnly attribute to True.

    Returns:
      StorageUri for the created bucket.
    """
    if not provider:
      provider = self.default_provider

    # Location is controlled by the -b test flag.
    if self.multiregional_buckets or provider == 's3':
      location = None
    else:
      # We default to the "us-central1" location for regional buckets, but allow
      # overriding this value in the Boto config.
      location = boto.config.get(
          'GSUtil', 'test_cmd_regional_bucket_location', 'us-central1')

    if bucket_name:
      bucket_name = util.MakeBucketNameValid(bucket_name)

    if prefer_json_api and provider == 'gs':
      json_bucket = self.CreateBucketJson(bucket_name=bucket_name,
                                          test_objects=test_objects,
                                          storage_class=storage_class,
                                          location=location,
                                          versioning_enabled=versioning_enabled,
                                          retention_policy=retention_policy,
                                          bucket_policy_only=bucket_policy_only)
      bucket_uri = boto.storage_uri(
          'gs://%s' % json_bucket.name.encode(UTF8).lower(),
          suppress_consec_slashes=False)
      return bucket_uri

    bucket_name = bucket_name or self.MakeTempName('bucket')

    bucket_uri = boto.storage_uri('%s://%s' % (provider, bucket_name.lower()),
                                  suppress_consec_slashes=False)

    if provider == 'gs':
      # Apply API version and project ID headers if necessary.
      headers = {'x-goog-api-version': self.api_version}
      headers[GOOG_PROJ_ID_HDR] = PopulateProjectId()
    else:
      headers = {}

    # Parallel tests can easily run into bucket creation quotas.
    # Retry with exponential backoff so that we create them as fast as we
    # reasonably can.
    @Retry(StorageResponseError, tries=7, timeout_secs=1)
    def _CreateBucketWithExponentialBackoff():
      try:
        bucket_uri.create_bucket(storage_class=storage_class,
                                 location=location or '',
                                 headers=headers)
      except StorageResponseError, e:
        # If the service returns a transient error or a connection breaks,
        # it's possible the request succeeded. If that happens, the service
        # will return 409s for all future calls even though our intent
        # succeeded. If the error message says we already own the bucket,
        # assume success to reduce test flakiness. This depends on
        # randomness of test naming buckets to prevent name collisions for
        # test buckets created concurrently in the same project, which is
        # acceptable because this is far less likely than service errors.
        if e.status == 409 and e.body and 'already own' in e.body:
          pass
        else:
          raise