Пример #1
0
    def __init__(self, bucket_name, dns_server=None):
        from boto.s3.connection import S3Connection
        from boto.utils import find_class
        from boto import config

        S3CompatibleStorage.__init__(self,
            bucket_name=bucket_name,
            host=S3Connection.DefaultHost,
            # The boto config can override the default calling format, and since
            # we don't use boto for get(), we need to use the right calling format.
            calling_format=S3Connection.DefaultCallingFormat,
            dns_server=dns_server
        )

        # Allow to use a different host/calling format for GET requests by
        # adding the following boto config:
        #   [get]
        #   host = my.server.tld
        #   calling_format = boto.s3.connection.TheCallingFormat
        get_host = config.get('get', 'host', S3Connection.DefaultHost)
        get_calling_format = config.get('get', 'calling_format',
                                        S3Connection.DefaultCallingFormat)

        if get_host != S3Connection.DefaultHost:
            if get_calling_format != S3Connection.DefaultCallingFormat:
                self._calling_format = find_class(get_calling_format)()
            self._host = self._calling_format.build_host(get_host,
                                                         self._bucket_name)
def _GetOauth2UserAccountCredentials():
  """Retrieves OAuth2 service account credentials for a refresh token."""
  if not _HasOauth2UserAccountCreds():
    return

  provider_token_uri = _GetProviderTokenUri()
  gsutil_client_id, gsutil_client_secret = (
      system_util.GetGsutilClientIdAndSecret())
  client_id = config.get('OAuth2', 'client_id',
                         os.environ.get('OAUTH2_CLIENT_ID', gsutil_client_id))
  client_secret = config.get(
      'OAuth2', 'client_secret',
      os.environ.get('OAUTH2_CLIENT_SECRET', gsutil_client_secret))
  # Note that these scopes don't necessarily correspond to the refresh token
  # being used. This list is is used for obtaining the RAPT in the reauth flow,
  # to determine which challenges should be used.
  scopes_for_reauth_challenge = [
      constants.Scopes.CLOUD_PLATFORM, constants.Scopes.REAUTH
  ]
  return reauth_creds.Oauth2WithReauthCredentials(
      None,  # access_token
      client_id,
      client_secret,
      config.get('Credentials', 'gs_oauth2_refresh_token'),
      None,  # token_expiry
      provider_token_uri,
      None,  # user_agent
      scopes=scopes_for_reauth_challenge)
Пример #3
0
    def get_credentials(self, access_key=None, secret_key=None, security_token=None):
        access_key_name, secret_key_name = self.CredentialMap[self.name]
        if access_key is not None:
            self._access_key = access_key
        elif os.environ.has_key(access_key_name.upper()):
            self._access_key = os.environ[access_key_name.upper()]
        elif config.has_option('Credentials', access_key_name):
            self._access_key = config.get('Credentials', access_key_name)

        if secret_key is not None:
            self._secret_key = secret_key
        elif os.environ.has_key(secret_key_name.upper()):
            self._secret_key = os.environ[secret_key_name.upper()]
        elif config.has_option('Credentials', secret_key_name):
            self._secret_key = config.get('Credentials', secret_key_name)

        security_token_name = self.HeaderInfoMap[self.name][SECURITY_TOKEN_HEADER_KEY]
        security_token_name_env = security_token_name.upper().replace('-', '_')

        if security_token is not None:
            self._security_token = security_token
        elif os.environ.has_key(security_token_name_env):
            self._security_token = os.environ[security_token_name_env]
        elif config.has_option('Credentials', security_token_name):
            self._security_token = config.get('Credentials', security_token_name)

        if isinstance(self._secret_key, unicode):
            # the secret key must be bytes and not unicode to work
            #  properly with hmac.new (see http://bugs.python.org/issue5285)
            self._secret_key = str(self._secret_key)

        stsagent_command = os.environ.get('AWS_STSAGENT')
        if stsagent_command:
            self.stsagent = stsagent.STSAgent(stsagent_command, 60)
Пример #4
0
 def __init__(self, name, access_key=None, secret_key=None,
              security_token=None):
     self.host = None
     self.port = None
     self.host_header = None
     self.access_key = access_key
     self.secret_key = secret_key
     self.security_token = security_token
     self.name = name
     self.acl_class = self.AclClassMap[self.name]
     self.canned_acls = self.CannedAclsMap[self.name]
     self._credential_expiry_time = None
     self.get_credentials(access_key, secret_key)
     self.configure_headers()
     self.configure_errors()
     # Allow config file to override default host and port.
     host_opt_name = '%s_host' % self.HostKeyMap[self.name]
     if config.has_option('Credentials', host_opt_name):
         self.host = config.get('Credentials', host_opt_name)
     port_opt_name = '%s_port' % self.HostKeyMap[self.name]
     if config.has_option('Credentials', port_opt_name):
         self.port = config.getint('Credentials', port_opt_name)
     host_header_opt_name = '%s_host_header' % self.HostKeyMap[self.name]
     if config.has_option('Credentials', host_header_opt_name):
         self.host_header = config.get('Credentials', host_header_opt_name)
Пример #5
0
    def __init__(self, name, access_key=None, secret_key=None,
                 security_token=None, profile_name=None):
        self.host = None
        self.port = None
        self.host_header = None
        self.access_key = access_key
        self.secret_key = secret_key
        self.security_token = security_token
        self.profile_name = profile_name
        self.name = name
        self.acl_class = self.AclClassMap[self.name]
        self.canned_acls = self.CannedAclsMap[self.name]
        self._credential_expiry_time = None

        # Load shared credentials file if it exists
        shared_path = os.path.join(expanduser('~'), '.' + name, 'credentials')
        self.shared_credentials = Config(do_load=False)
        if os.path.isfile(shared_path):
            self.shared_credentials.load_from_path(shared_path)

        self.get_credentials(access_key, secret_key, security_token, profile_name)
        self.configure_headers()
        self.configure_errors()

        # Allow config file to override default host and port.
        host_opt_name = '%s_host' % self.HostKeyMap[self.name]
        if config.has_option('Credentials', host_opt_name):
            self.host = config.get('Credentials', host_opt_name)
        port_opt_name = '%s_port' % self.HostKeyMap[self.name]
        if config.has_option('Credentials', port_opt_name):
            self.port = config.getint('Credentials', port_opt_name)
        host_header_opt_name = '%s_host_header' % self.HostKeyMap[self.name]
        if config.has_option('Credentials', host_header_opt_name):
            self.host_header = config.get('Credentials', host_header_opt_name)
Пример #6
0
  def __init__(self, logger=None, credentials=None, debug=0):
    """Performs necessary setup for interacting with Google Cloud KMS.

    Args:
      logger: logging.logger for outputting log messages.
      credentials: Credentials to be used for interacting with Cloud KMS
      debug: Debug level for the API implementation (0..3).
    """
    super(KmsApi, self).__init__()
    self.logger = logger

    no_op_credentials = False
    if not credentials:
      loaded_credentials = CheckAndGetCredentials(logger)

      if not loaded_credentials:
        loaded_credentials = NoOpCredentials()
        no_op_credentials = True
    else:
      if isinstance(credentials, NoOpCredentials):
        no_op_credentials = True

    self.credentials = credentials or loaded_credentials
    self.certs_file = GetCertsFile()
    self.http = GetNewHttp()

    self.http_base = 'https://'
    self.host_base = config.get('Credentials', 'gs_kms_host',
                                'cloudkms.googleapis.com')
    gs_kms_port = config.get('Credentials', 'gs_kms_port', None)
    if not gs_kms_port:
      self.host_port = ''
    else:
      self.host_port = ':' + gs_kms_port

    self.url_base = (self.http_base + self.host_base + self.host_port)

    self.num_retries = GetNumRetries()
    self.max_retry_wait = GetMaxRetryDelay()

    log_request = (debug >= 3)
    log_response = (debug >= 3)

    self.api_client = apitools_client.CloudkmsV1(
        url=self.url_base,
        http=self.http,
        log_request=log_request,
        log_response=log_response,
        credentials=self.credentials)

    self.api_client.max_retry_wait = self.max_retry_wait
    self.api_client.num_retries = self.num_retries

    if no_op_credentials:
      # This API key is not secret and is used to identify gsutil during
      # anonymous requests.
      self.api_client.AddGlobalParam('key',
                                     u'AIzaSyDnacJHrKma0048b13sh8cgxNUwulubmJM')
def _GetOauth2ServiceAccountCredentials():
  """Retrieves OAuth2 service account credentials for a private key file."""
  if not _HasOauth2ServiceAccountCreds():
    return

  provider_token_uri = _GetProviderTokenUri()
  service_client_id = config.get('Credentials', 'gs_service_client_id', '')
  private_key_filename = config.get('Credentials', 'gs_service_key_file', '')

  with io.open(private_key_filename, 'rb') as private_key_file:
    private_key = private_key_file.read()

  keyfile_is_utf8 = False
  try:
    private_key = private_key.decode(UTF8)
    # P12 keys won't be encoded as UTF8 bytes.
    keyfile_is_utf8 = True
  except UnicodeDecodeError:
    pass

  if keyfile_is_utf8:
    try:
      json_key_dict = json.loads(private_key)
    except ValueError:
      raise Exception('Could not parse JSON keyfile "%s" as valid JSON' %
                      private_key_filename)
    # Key file is in JSON format.
    for json_entry in ('client_id', 'client_email', 'private_key_id',
                       'private_key'):
      if json_entry not in json_key_dict:
        raise Exception('The JSON private key file at %s '
                        'did not contain the required entry: %s' %
                        (private_key_filename, json_entry))
    return ServiceAccountCredentials.from_json_keyfile_dict(
        json_key_dict, scopes=DEFAULT_SCOPES, token_uri=provider_token_uri)
  else:
    # Key file is in P12 format.
    if HAS_CRYPTO:
      if not service_client_id:
        raise Exception('gs_service_client_id must be set if '
                        'gs_service_key_file is set to a .p12 key file')
      key_file_pass = config.get('Credentials', 'gs_service_key_file_password',
                                 GOOGLE_OAUTH2_DEFAULT_FILE_PASSWORD)
      # We use _from_p12_keyfile_contents to avoid reading the key file
      # again unnecessarily.
      try:
        return ServiceAccountCredentials.from_p12_keyfile_buffer(
            service_client_id,
            BytesIO(private_key),
            private_key_password=key_file_pass,
            scopes=DEFAULT_SCOPES,
            token_uri=provider_token_uri)
      except Exception as e:
        raise Exception(
            'OpenSSL unable to parse PKCS 12 key {}.'
            'Please verify key integrity. Error message:\n{}'.format(
                private_key_filename, str(e)))
    def as_connection():
        """ Create and return an Auto Scale Connection """

        key_id = config.get('Credentials', 'aws_access_key_id')
        access_key = config.get('Credentials', 'aws_secret_access_key')

        conn = AutoScaleConnection(key_id, access_key)

        return conn
Пример #9
0
    def get_credentials(self, access_key=None, secret_key=None,
                        security_token=None):
        access_key_name, secret_key_name, security_token_name = self.CredentialMap[self.name]
        if access_key is not None:
            self.access_key = access_key
            boto.log.debug("Using access key provided by client.")
        elif access_key_name.upper() in os.environ:
            self.access_key = os.environ[access_key_name.upper()]
            boto.log.debug("Using access key found in environment variable.")
        elif config.has_option('Credentials', access_key_name):
            self.access_key = config.get('Credentials', access_key_name)
            boto.log.debug("Using access key found in config file.")

        if secret_key is not None:
            self.secret_key = secret_key
            boto.log.debug("Using secret key provided by client.")
        elif secret_key_name.upper() in os.environ:
            self.secret_key = os.environ[secret_key_name.upper()]
            boto.log.debug("Using secret key found in environment variable.")
        elif config.has_option('Credentials', secret_key_name):
            self.secret_key = config.get('Credentials', secret_key_name)
            boto.log.debug("Using secret key found in config file.")
        elif config.has_option('Credentials', 'keyring'):
            keyring_name = config.get('Credentials', 'keyring')
            try:
                import keyring
            except ImportError:
                boto.log.error("The keyring module could not be imported. "
                               "For keyring support, install the keyring "
                               "module.")
                raise
            self.secret_key = keyring.get_password(
                keyring_name, self.access_key)
            boto.log.debug("Using secret key found in keyring.")

        if security_token is not None:
            self.security_token = security_token
            boto.log.debug("Using security token provided by client.")
        elif security_token_name is not None:
            if security_token_name.upper() in os.environ:
                self.security_token = os.environ[security_token_name.upper()]
                boto.log.debug("Using security token found in environment"
                               " variable.")
            elif config.has_option('Credentials', security_token_name):
                self.security_token = config.get('Credentials',
                                                 security_token_name)
                boto.log.debug("Using security token found in config file.")

        if ((self._access_key is None or self._secret_key is None) and
                self.MetadataServiceSupport[self.name]):
            self._populate_keys_from_metadata_server()
        self._secret_key = self._convert_key_to_str(self._secret_key)
    def s3_aws_driver():
        """ Obtain and return the OpenStack S3 Driver for Amazon Web Services """

        key_id = config.get('Credentials', 'aws_access_key_id')
        secret_key = config.get('Credentials', 'aws_secret_access_key')

        # Obtain the generic driver for the desired provider
        s3_driver = get_storage_driver(StorageProvider.S3_EU_WEST)

        # Obtain the personalized driver and return it
        driver = s3_driver(key_id, secret_key, False)

        return driver
    def ec2_aws_driver():
        """ Obtain and return the Amazon Web Services EC2 Driver for LibCloud """

        # AWS EC2 Driver
        key_id = config.get('Credentials', 'aws_access_key_id')
        secret_key = config.get('Credentials', 'aws_secret_access_key')

        # Obtain the generic driver for the desired provider
        aws_driver = get_driver(Provider.EC2_EU_WEST)

        # Obtain the personalized driver and return it
        driver = aws_driver(key_id, secret_key)

        return driver
    def sns_connection():
        """ Create and return an SNS Connection """

        key_id = config.get('Credentials', 'aws_access_key_id')
        access_key = config.get('Credentials', 'aws_secret_access_key')

        # Undocumented way to connect SNS to a different zone... Create a SDBRegionInfo
        region_name = config.get('Credentials', 'region')
        region_endpoint = config.get('Boto', 'sns_endpoint')
        region = SDBRegionInfo(None, region_name, region_endpoint)

        # Build the connection using the keys and the new region
        conn = boto.connect_sns(key_id, access_key, region=region)

        return conn
Пример #13
0
    def get_credentials(self, access_key=None, secret_key=None):
        access_key_name, secret_key_name = self.CredentialMap[self.name]
        if access_key is not None:
            self.access_key = access_key
        elif os.environ.has_key(access_key_name.upper()):
            self.access_key = os.environ[access_key_name.upper()]
        elif config.has_option('Credentials', access_key_name):
            self.access_key = config.get('Credentials', access_key_name)

        if secret_key is not None:
            self.secret_key = secret_key
        elif os.environ.has_key(secret_key_name.upper()):
            self.secret_key = os.environ[secret_key_name.upper()]
        elif config.has_option('Credentials', secret_key_name):
            self.secret_key = config.get('Credentials', secret_key_name)
    def s3_os_driver():
        """ Obtain and return the OpenStack S3 Driver for LibCloud """

        key_id = config.get('LibCloud', 'username')
        secret_key = config.get('LibCloud', 'secret_key')
        auth_url = config.get('LibCloud', 'auth_url')

        # Obtain the generic driver for the desired provider
        s3_driver = get_storage_driver(StorageProvider.OPENSTACK_SWIFT)

        # Obtain the personalized driver and return it
        driver = s3_driver(key_id, secret_key, ex_force_auth_url=auth_url, ex_force_auth_version='2.0_password',
                           ex_tenant_name=key_id, ex_force_service_region='RegionOne')

        return driver
Пример #15
0
    def get_user_data(timeout=None, url=None, num_retries=None, data=None):
        """
        Get instance user data

        :type timeout: int
        :param timeout: timeout for the request

        :type url: string
        :param url: metadata_service_url

        :type num_retries: int
        :param num_retries: how many times to retry

        :type data: string
        :param data: user-defined userdata for testing

        :rtype: dict
        :return: instance user data as a dictionary
        """
        if data is not None:
            return data

        if timeout is None:
            timeout = config.getint('Boto', 'http_socket_timeout', 70)

        if num_retries is None:
            num_retries = config.getint('Boto', 'num_retries', 5)

        if url is None:
            url = config.get(
                'Boto', 'metadata_service_url', 'http://169.254.169.254')

        return get_userdata(timeout=timeout, url=url, num_retries=num_retries)
Пример #16
0
    def create_as_group(conn_as, launch_config_name, as_group_name, min_instances, max_instances):
        """ This method is tasked with the creation of both a Launch Configuration and an Auto Scaling Group
        based on the user requested names and features"""

        try:
            # Here we create the Launch configuration (a "how to" for the auto scaling group to start its instances)
            # and select from the constructor the values we need or want to use, in this case, we will hard-code
            # the ami and the instance type with free tier instances, we will retrieve from boto the AWS key_name
            # so anyone can use the interface without having to hard-code it.
            lc = LaunchConfiguration(name=launch_config_name, image_id='ami-c6972fb5', instance_type='t2.micro',
                                     key_name=config.get('Credentials', 'key_name'), security_groups=[])

            # Once created the launch configuration it's time to commit the configuration to AWS
            conn_as.create_launch_configuration(lc)
            print "Launch configuration created"

            # Then we move on to the AutoScaling group creation, group that includes the previously created launch
            # config as this launch configuration will contain the instructions to create new instances for the group.
            # Other than that, we include the availability zones for the group to launch on. Load Balancer along with
            # a number of extra options can be used in the Auto Scaling Group.
            ag = AutoScalingGroup(group_name=as_group_name, availability_zones=['eu-west-1c'], launch_config=lc,
                                  min_size=min_instances, max_size=max_instances, connection=conn_as)
            print "Auto Scaling Group created"

            # Once again, we will commit the created group and as a result, the minimum number of instances requested
            # will start to deploy.
            conn_as.create_auto_scaling_group(ag)
            print "Instances are being deployed"
        except exception.BotoServerError:
            print "The launch configurator name or the group name already exists"

        return True
Пример #17
0
def _GetOauth2UserAccountCredentials():
  """Retrieves OAuth2 service account credentials for a refresh token."""
  if not _HasOauth2UserAccountCreds():
    return

  provider_token_uri = _GetProviderTokenUri()
  gsutil_client_id, gsutil_client_secret = GetGsutilClientIdAndSecret()
  client_id = config.get('OAuth2', 'client_id',
                         os.environ.get('OAUTH2_CLIENT_ID', gsutil_client_id))
  client_secret = config.get('OAuth2', 'client_secret',
                             os.environ.get('OAUTH2_CLIENT_SECRET',
                                            gsutil_client_secret))
  return oauth2client.client.OAuth2Credentials(
      None, client_id, client_secret,
      config.get('Credentials', 'gs_oauth2_refresh_token'), None,
      provider_token_uri, None)
Пример #18
0
def needs_aws(test_item):
    """
    Use as a decorator before test classes or methods to only run them if AWS usable.
    """
    test_item = _mark_test('aws', test_item)
    try:
        # noinspection PyUnresolvedReferences
        from boto import config
    except ImportError:
        return unittest.skip("Install toil with the 'aws' extra to include this test.")(test_item)
    except:
        raise
    else:
        dot_aws_credentials_path = os.path.expanduser('~/.aws/credentials')
        hv_uuid_path = '/sys/hypervisor/uuid'
        boto_credentials = config.get('Credentials', 'aws_access_key_id')
        if boto_credentials:
            return test_item
        if (os.path.exists(dot_aws_credentials_path) or
                (os.path.exists(hv_uuid_path) and file_begins_with(hv_uuid_path, 'ec2'))):
            # Assume that EC2 machines like the Jenkins slave that we run CI on will have IAM roles
            return test_item
        else:
            return unittest.skip("Configure ~/.aws/credentials with AWS credentials to include "
                                 "this test.")(test_item)
Пример #19
0
    def __init__(
        self,
        aws_access_key_id=None,
        aws_secret_access_key=None,
        is_secure=True,
        port=None,
        proxy=None,
        proxy_port=None,
        proxy_user=None,
        proxy_pass=None,
        host=None,
        debug=0,
        https_connection_factory=None,
    ):
        if not host:
            if config.has_option("MTurk", "sandbox") and config.get("MTurk", "sandbox") == "True":
                host = "mechanicalturk.sandbox.amazonaws.com"
            else:
                host = "mechanicalturk.amazonaws.com"

        AWSQueryConnection.__init__(
            self,
            aws_access_key_id,
            aws_secret_access_key,
            is_secure,
            port,
            proxy,
            proxy_port,
            proxy_user,
            proxy_pass,
            host,
            debug,
            https_connection_factory,
        )
Пример #20
0
    def create_instance_with_so(conn, so):
        """ Create a new instance based on AMI"""

        # Select the AMI corresponding with Windows or Linux depending on the user
        ami = "ami-c6972fb5" if so == "windows" else "ami-f95ef58a"

        # Start an instance with the calculated ami, in this case I decided to hard code the type to a free tier machine
        conn.run_instances(ami, key_name=config.get('Credentials', 'key_name'), instance_type="t2.micro")
Пример #21
0
        def inject_default( name, default ):
            section = 'Boto'
            value = config.get( section, name )

            if value != default:
                if not config.has_section( section ):
                    config.add_section( section )
                config.set( section, name, default )
Пример #22
0
 def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
              is_secure=True, port=None, proxy=None, proxy_port=None,
              proxy_user=None, proxy_pass=None, host='ec2.amazonaws.com', debug=0,
              https_connection_factory=None):
     if config.has_option('Boto', 'ec2_host'):
         host = config.get('Boto', 'ec2_host')
     AWSQueryConnection.__init__(self, aws_access_key_id, aws_secret_access_key,
                                 is_secure, port, proxy, proxy_port, proxy_user, proxy_pass,
                                 host, debug, https_connection_factory)
Пример #23
0
  def __init__(self, logger=None, credentials=None, debug=0):
    """Performs necessary setup for interacting with Google Cloud Pub/Sub.

    Args:
      logger: logging.logger for outputting log messages.
      credentials: Credentials to be used for interacting with Google Cloud
          Pub/Sub
      debug: Debug level for the API implementation (0..3).
    """
    super(PubsubApi, self).__init__()
    self.logger = logger

    self.certs_file = GetCertsFile()
    self.http = GetNewHttp()
    self.http_base = 'https://'
    self.host_base = config.get('Credentials', 'gs_pubsub_host',
                                'pubsub.googleapis.com')
    gs_pubsub_port = config.get('Credentials', 'gs_pubsub_port', None)
    self.host_port = (':' + gs_pubsub_port) if gs_pubsub_port else ''
    self.url_base = (self.http_base + self.host_base + self.host_port)

    SetUpJsonCredentialsAndCache(self, logger, credentials=credentials)

    log_request = (debug >= 3)
    log_response = (debug >= 3)

    self.api_client = apitools_client.PubsubV1(url=self.url_base,
                                               http=self.http,
                                               log_request=log_request,
                                               log_response=log_response,
                                               credentials=self.credentials)

    self.num_retries = GetNumRetries()
    self.api_client.num_retries = self.num_retries

    self.max_retry_wait = GetMaxRetryDelay()
    self.api_client.max_retry_wait = self.max_retry_wait

    if isinstance(self.credentials, NoOpCredentials):
      # This API key is not secret and is used to identify gsutil during
      # anonymous requests.
      self.api_client.AddGlobalParam('key',
                                     'AIzaSyDnacJHrKma0048b13sh8cgxNUwulubmJM')
Пример #24
0
    def get_credentials(self, access_key=None, secret_key=None):
        access_key_name, secret_key_name = self.CredentialMap[self.name]
        if access_key is not None:
            self.access_key = access_key
        elif access_key_name.upper() in os.environ:
            self.access_key = os.environ[access_key_name.upper()]
        elif config.has_option('Credentials', access_key_name):
            self.access_key = config.get('Credentials', access_key_name)

        if secret_key is not None:
            self.secret_key = secret_key
        elif secret_key_name.upper() in os.environ:
            self.secret_key = os.environ[secret_key_name.upper()]
        elif config.has_option('Credentials', secret_key_name):
            self.secret_key = config.get('Credentials', secret_key_name)
        if isinstance(self.secret_key, unicode):
            # the secret key must be bytes and not unicode to work
            #  properly with hmac.new (see http://bugs.python.org/issue5285)
            self.secret_key = str(self.secret_key)
Пример #25
0
def _GetOauth2ServiceAccountCredentials():
  """Retrieves OAuth2 service account credentials for a private key file."""
  if not _HasOauth2ServiceAccountCreds():
    return

  provider_token_uri = _GetProviderTokenUri()
  service_client_id = config.get('Credentials', 'gs_service_client_id', '')
  private_key_filename = config.get('Credentials', 'gs_service_key_file', '')
  private_key = None
  with open(private_key_filename, 'rb') as private_key_file:
    private_key = private_key_file.read()

  json_key_dict = None
  try:
    json_key_dict = json.loads(private_key)
  except ValueError:
    pass
  if json_key_dict:
    # Key file is in JSON format.
    for json_entry in ('client_id', 'client_email', 'private_key_id',
                       'private_key'):
      if json_entry not in json_key_dict:
        raise Exception('The JSON private key file at %s '
                        'did not contain the required entry: %s' %
                        (private_key_filename, json_entry))
    return ServiceAccountCredentials.from_json_keyfile_dict(
        json_key_dict, scopes=DEFAULT_SCOPES, token_uri=provider_token_uri)
  else:
    # Key file is in P12 format.
    if HAS_CRYPTO:
      if not service_client_id:
        raise Exception('gs_service_client_id must be set if '
                        'gs_service_key_file is set to a .p12 key file')
      key_file_pass = config.get(
          'Credentials', 'gs_service_key_file_password',
          GOOGLE_OAUTH2_DEFAULT_FILE_PASSWORD)
      # We use _from_p12_keyfile_contents to avoid reading the key file
      # again unnecessarily.
      return ServiceAccountCredentials.from_p12_keyfile_buffer(
          service_client_id, BytesIO(private_key),
          private_key_password=key_file_pass, scopes=DEFAULT_SCOPES,
          token_uri=provider_token_uri)
Пример #26
0
    def __new__(cls, bucket_name, dns_server):
        from boto.s3.connection import S3Connection
        from boto import config

        # If the boto config points to S3 and doesn't have a separate host for
        # get requests (see further below), just return a S3Storage instance.
        get_host = config.get('get', 'host', S3Connection.DefaultHost)
        if get_host == S3CompatibleStorage.DefaultHost:
            return S3Storage(bucket_name=bucket_name, dns_server=dns_server)

        return super(BotoStorage, cls).__new__(cls)
Пример #27
0
    def get_credentials(self, access_key=None, secret_key=None):
        access_key_name, secret_key_name = self.CredentialMap[self.name]
        if access_key is not None:
            self.access_key = access_key
        elif access_key_name.upper() in os.environ:
            self.access_key = os.environ[access_key_name.upper()]
        elif config.has_option('Credentials', access_key_name):
            self.access_key = config.get('Credentials', access_key_name)

        if secret_key is not None:
            self.secret_key = secret_key
        elif secret_key_name.upper() in os.environ:
            self.secret_key = os.environ[secret_key_name.upper()]
        elif config.has_option('Credentials', secret_key_name):
            self.secret_key = config.get('Credentials', secret_key_name)

        if ((self._access_key is None or self._secret_key is None) and
                self.MetadataServiceSupport[self.name]):
            self._populate_keys_from_metadata_server()
        self._secret_key = self._convert_key_to_str(self._secret_key)
Пример #28
0
    def create_volume(conn):
        """ Creating a new volume """

        try:
            # Create a 30gb volume of type gp2 with the region eu-west-1a
            conn.create_volume(30, config.get('Credentials', 'region')+"a", None, "gp2")
            return True

        # If problems arise, warn the user
        except exception.EC2ResponseError:
            print "Problems creating volume"
            return False
Пример #29
0
def _GetGceCreds():
  if not _HasGceCreds():
    return

  try:
    return credentials_lib.GceAssertionCredentials(
        service_account_name=config.get(
            'GoogleCompute', 'service_account', 'default'),
        cache_filename=GetGceCredentialCacheFilename())
  except apitools_exceptions.ResourceUnavailableError, e:
    if 'service account' in str(e) and 'does not exist' in str(e):
      return None
    raise
Пример #30
0
def CreateTrackerDirIfNeeded():
  """Looks up the configured directory where gsutil keeps its resumable
     transfer tracker files, and creates it if it doesn't already exist.

  Returns:
    The pathname to the tracker directory.
  """
  tracker_dir = config.get(
      'GSUtil', 'resumable_tracker_dir',
      os.path.expanduser('~' + os.sep + '.gsutil'))
  if not os.path.exists(tracker_dir):
    os.makedirs(tracker_dir)
  return tracker_dir
Пример #31
0
    def create_as_group(conn_as, launch_config_name, as_group_name,
                        min_instances, max_instances):
        """ This method is tasked with the creation of both a Launch Configuration and an Auto Scaling Group
        based on the user requested names and features"""

        try:
            # Here we create the Launch configuration (a "how to" for the auto scaling group to start its instances)
            # and select from the constructor the values we need or want to use, in this case, we will hard-code
            # the ami and the instance type with free tier instances, we will retrieve from boto the AWS key_name
            # so anyone can use the interface without having to hard-code it.
            lc = LaunchConfiguration(name=launch_config_name,
                                     image_id='ami-c6972fb5',
                                     instance_type='t2.micro',
                                     key_name=config.get(
                                         'Credentials', 'key_name'),
                                     security_groups=[])

            # Once created the launch configuration it's time to commit the configuration to AWS
            conn_as.create_launch_configuration(lc)
            print "Launch configuration created"

            # Then we move on to the AutoScaling group creation, group that includes the previously created launch
            # config as this launch configuration will contain the instructions to create new instances for the group.
            # Other than that, we include the availability zones for the group to launch on. Load Balancer along with
            # a number of extra options can be used in the Auto Scaling Group.
            ag = AutoScalingGroup(group_name=as_group_name,
                                  availability_zones=['eu-west-1c'],
                                  launch_config=lc,
                                  min_size=min_instances,
                                  max_size=max_instances,
                                  connection=conn_as)
            print "Auto Scaling Group created"

            # Once again, we will commit the created group and as a result, the minimum number of instances requested
            # will start to deploy.
            conn_as.create_auto_scaling_group(ag)
            print "Instances are being deployed"
        except exception.BotoServerError:
            print "The launch configurator name or the group name already exists"

        return True
  def test_persists_custom_endpoint_through_json_sliced_download(self):
    gs_host = config.get('Credentials', 'gs_json_host', DEFAULT_HOST)
    if gs_host == DEFAULT_HOST:
      # Skips test when run without a custom endpoint configured.
      return

    temporary_directory = self.CreateTempDir()
    with SetBotoConfigForTest([
        ('GSUtil', 'sliced_object_download_threshold', '1B'),
        ('GSUtil', 'sliced_object_download_component_size', '1B')
    ]):
      bucket_uri = self.CreateBucket()
      key_uri = self.CreateObject(bucket_uri=bucket_uri, contents=b'foo')

      stdout = self.RunGsUtil(
          ['-DD', 'cp', ObjectToURI(key_uri), temporary_directory],
          env_vars=PYTHON_UNBUFFERED_ENV_VAR,
          return_stdout=True)

    self.assertIn(gs_host, stdout)
    self.assertNotIn(DEFAULT_HOST, stdout)
    def _SetUploadUrl(self, url):
        """Saves URL and resets upload state.

    Called when we start a new resumable upload or get a new tracker
    URL for the upload.

    Args:
      url: URL string for the upload.

    Raises InvalidUrlError if URL is syntactically invalid.
    """
        parse_result = urllib.parse.urlparse(url)
        if (parse_result.scheme.lower() not in ['http', 'https']
                or not parse_result.netloc):
            raise InvalidUrlError('Invalid upload URL (%s)' % url)
        self.upload_url = url
        self.upload_url_host = (config.get('Credentials', 'gs_host', None)
                                or parse_result.netloc)
        self.upload_url_path = '%s?%s' % (parse_result.path,
                                          parse_result.query)
        self.service_has_bytes = 0
Пример #34
0
def needs_google(test_item):
    """
    Use as a decorator before test classes or methods to only run them if Google Storage usable.
    """
    test_item = _mark_test('google', test_item)
    projectID = os.getenv('TOIL_GOOGLE_PROJECTID')
    if not projectID or projectID is None:
        return unittest.skip("Set TOIL_GOOGLE_PROJECTID to include this test.")(test_item)
    try:
        # noinspection PyUnresolvedReferences
        from boto import config
    except ImportError:
        return unittest.skip(
            "Install Toil with the 'google' extra to include this test.")(test_item)
    else:
        boto_credentials = config.get('Credentials', 'gs_access_key_id')
        if boto_credentials:
            return test_item
        else:
            return unittest.skip(
                "Configure ~/.boto with Google Cloud credentials to include this test.")(test_item)
Пример #35
0
 def run_scripts(self):
     scripts = config.get('Pyami', 'scripts')
     if scripts:
         for script in scripts.split(','):
             script = script.strip(" ")
             try:
                 pos = script.rfind('.')
                 if pos > 0:
                     mod_name = script[0:pos]
                     cls_name = script[pos + 1:]
                     cls = find_class(mod_name, cls_name)
                     boto.log.info('Running Script: %s' % script)
                     s = cls()
                     s.main()
                 else:
                     boto.log.warning('Trouble parsing script: %s' % script)
             except Exception as e:
                 boto.log.exception(
                     'Problem Running Script: %s. Startup process halting.'
                     % script)
                 raise e
  def test_persists_custom_endpoint_through_xml_parallel_composite_upload(self):
    gs_host = config.get('Credentials', 'gs_host', DEFAULT_HOST)
    if gs_host == DEFAULT_HOST:
      # Skips test when run without a custom endpoint configured.
      return

    temporary_file = self.CreateTempFile(contents=b'foo')
    with SetBotoConfigForTest([
        ('GSUtil', 'parallel_composite_upload_threshold', '1B'),
        ('GSUtil', 'parallel_composite_upload_component_size', '1B')
    ]):
      bucket_uri = self.CreateBucket()
      stdout, stderr = self.RunGsUtil(
          ['-D', 'cp', temporary_file,
           ObjectToURI(bucket_uri)],
          return_stdout=True,
          return_stderr=True)

    output = stdout + stderr
    self.assertIn(gs_host, output)
    self.assertNotIn('hostname=' + DEFAULT_HOST, output)
Пример #37
0
def CreateTrackerDirIfNeeded():
    """Looks up the configured directory where gsutil keeps its resumable
     transfer tracker files, and creates it if it doesn't already exist.

  Returns:
    The pathname to the tracker directory.
  """
    tracker_dir = config.get('GSUtil', 'resumable_tracker_dir',
                             os.path.expanduser('~' + os.sep + '.gsutil'))
    if not os.path.exists(tracker_dir):
        try:
            # Unfortunately, even though we catch and ignore EEXIST, this call will
            # will output a (needless) error message (no way to avoid that in Python).
            os.makedirs(tracker_dir)
        # Ignore 'already exists' in case user tried to start up several
        # resumable uploads concurrently from a machine where no tracker dir had
        # yet been created.
        except OSError as e:
            if e.errno != errno.EEXIST:
                raise
    return tracker_dir
  def test_persists_custom_endpoint_through_json_parallel_composite_upload(
      self):
    gs_host = config.get('Credentials', 'gs_json_host', DEFAULT_HOST)
    if gs_host == DEFAULT_HOST:
      # Skips test when run without a custom endpoint configured.
      return

    temporary_file = self.CreateTempFile(contents=b'foo')
    with SetBotoConfigForTest([
        ('GSUtil', 'parallel_composite_upload_threshold', '1B'),
        ('GSUtil', 'parallel_composite_upload_component_size', '1B')
    ]):
      bucket_uri = self.CreateBucket()
      stdout = self.RunGsUtil(
          ['-DD', 'cp', temporary_file,
           ObjectToURI(bucket_uri)],
          env_vars=PYTHON_UNBUFFERED_ENV_VAR,
          return_stdout=True)

    self.assertIn(gs_host, stdout)
    self.assertNotIn(DEFAULT_HOST, stdout)
Пример #39
0
    def __init__(self,
                 name,
                 access_key=None,
                 secret_key=None,
                 security_token=None):
        self.host = None

        self.stsagent = None

        self._access_key = access_key
        self._secret_key = secret_key
        self._security_token = security_token

        self.name = name
        self.acl_class = self.AclClassMap[self.name]
        self.canned_acls = self.CannedAclsMap[self.name]
        self.get_credentials(access_key, secret_key, security_token)
        self.configure_headers()
        self.configure_errors()
        # allow config file to override default host
        host_opt_name = '%s_host' % self.HostKeyMap[self.name]
        if config.has_option('Credentials', host_opt_name):
            self.host = config.get('Credentials', host_opt_name)
Пример #40
0
def needs_aws(test_item):
    """
    Use as a decorator before test classes or methods to only run them if AWS usable.
    """
    test_item = _mark_test('aws', test_item)
    try:
        # noinspection PyUnresolvedReferences
        from boto import config
    except ImportError:
        return unittest.skip("Install toil with the 'aws' extra to include this test.")(test_item)
    except:
        raise
    else:
        dot_aws_credentials_path = os.path.expanduser('~/.aws/credentials')
        boto_credentials = config.get('Credentials', 'aws_access_key_id')
        if boto_credentials:
            return test_item
        if os.path.exists(dot_aws_credentials_path) or runningOnEC2():
            # Assume that EC2 machines like the Jenkins slave that we run CI on will have IAM roles
            return test_item
        else:
            return unittest.skip("Configure ~/.aws/credentials with AWS credentials to include "
                                 "this test.")(test_item)
Пример #41
0
    def __init__(self,
                 aws_access_key_id=None,
                 aws_secret_access_key=None,
                 is_secure=False,
                 port=None,
                 proxy=None,
                 proxy_port=None,
                 proxy_user=None,
                 proxy_pass=None,
                 host=None,
                 debug=0,
                 https_connection_factory=None):
        if not host:
            if config.has_option('MTurk', 'sandbox') and config.get(
                    'MTurk', 'sandbox') == 'True':
                host = 'mechanicalturk.sandbox.amazonaws.com'
            else:
                host = 'mechanicalturk.amazonaws.com'

        AWSQueryConnection.__init__(self, aws_access_key_id,
                                    aws_secret_access_key, is_secure, port,
                                    proxy, proxy_port, proxy_user, proxy_pass,
                                    host, debug, https_connection_factory)
Пример #42
0
  def _provision_client_cert(self, cert_path):
    """Executes certificate provider to obtain client certificate and keys."""
    cert_command_string = config.get('Credentials', 'cert_provider_command',
                                     None)
    if cert_command_string:
      cert_command = cert_command_string.split(' ')
    else:
      # Load the default certificate provider if sit is not provided by user.
      cert_command = _default_command()

    try:
      command_stdout_string, _ = execution_util.ExecuteExternalCommand(
          cert_command)

      sections = _split_pem_into_sections(command_stdout_string, self.logger)
      with open(cert_path, 'w+') as f:
        f.write(sections['CERTIFICATE'])
        f.write(sections['ENCRYPTED PRIVATE KEY'])
      self.client_cert_password = sections['PASSPHRASE'].splitlines()[1]
    except OSError as e:
      raise CertProvisionError(e)
    except KeyError as e:
      raise CertProvisionError(
          'Invalid output format from certificate provider, no %s' % e)
Пример #43
0
    def test_persists_custom_endpoint_through_xml_sliced_download(self):
        gs_host = config.get('Credentials', 'gs_host', DEFAULT_HOST)
        if gs_host == DEFAULT_HOST:
            # Captures case where user may have manually set gs_host to the
            # value of DEFAULT_HOST.
            return

        temporary_directory = self.CreateTempDir()
        with SetBotoConfigForTest([
            ('GSUtil', 'sliced_object_download_threshold', '1B'),
            ('GSUtil', 'sliced_object_download_component_size', '1B')
        ]):
            bucket_uri = self.CreateBucket()
            key_uri = self.CreateObject(bucket_uri=bucket_uri, contents=b'foo')
            stdout, stderr = self.RunGsUtil(
                ['-D', 'cp',
                 ObjectToURI(key_uri), temporary_directory],
                return_stdout=True,
                return_stderr=True)

        output = stdout + stderr

        self.assertIn(gs_host, output)
        self.assertNotIn('hostname=' + DEFAULT_HOST, output)
Пример #44
0
 def __init__(self, domain='mf-aap'):
     self.db = boto.sdb.connect_to_region(
         'eu-west-1',
         aws_access_key_id=config.get('Credentials', 'aws_access_key_id'),
         aws_secret_access_key=config.get('Credentials', 'aws_secret_key'))
     self.domain = self.db.create_domain('mf-aap')
Пример #45
0
 def main(self):
     self.run_scripts()
     self.notify('Startup Completed for %s' %
                 config.get('Instance', 'instance-id'))
Пример #46
0
 def start_new_instance(self):
     ami_choice = self.ami_chooser()
     global_connection.run_instances(ami_choice,
                                     key_name=config.get(
                                         'CREDENTIALS', 'key_name'),
                                     instance_type="t2.micro")
Пример #47
0
                script = script.strip(" ")
                try:
                    pos = script.rfind('.')
                    if pos > 0:
                        mod_name = script[0:pos]
                        cls_name = script[pos + 1:]
                        cls = find_class(mod_name, cls_name)
                        boto.log.info('Running Script: %s' % script)
                        s = cls()
                        s.main()
                    else:
                        boto.log.warning('Trouble parsing script: %s' % script)
                except Exception as e:
                    boto.log.exception(
                        'Problem Running Script: %s. Startup process halting.'
                        % script)
                    raise e

    def main(self):
        self.run_scripts()
        self.notify('Startup Completed for %s' %
                    config.get('Instance', 'instance-id'))


if __name__ == "__main__":
    if not config.has_section('loggers'):
        boto.set_file_logger('startup', '/var/log/boto.log')
    sys.path.append(config.get('Pyami', 'working_dir'))
    su = Startup()
    su.main()
Пример #48
0
def _GetImpersonateServiceAccount():
    return (constants.IMPERSONATE_SERVICE_ACCOUNT or config.get(
        'Credentials', 'gs_impersonate_service_account',
        os.environ.get('CLOUDSDK_AUTH_IMPERSONATE_SERVICE_ACCOUNT')))
Пример #49
0
  def RunCommand(self):
    """Command entry point for the rewrite command."""
    self.continue_on_error = self.parallel_operations
    self.csek_hash_to_keywrapper = {}
    self.dest_storage_class = None
    self.no_preserve_acl = False
    self.read_args_from_stdin = False
    self.supported_transformation_flags = ['-k', '-s']
    self.transform_types = set()

    self.op_failure_count = 0
    self.boto_file_encryption_keywrapper = GetEncryptionKeyWrapper(config)
    self.boto_file_encryption_sha256 = (
        self.boto_file_encryption_keywrapper.crypto_key_sha256
        if self.boto_file_encryption_keywrapper else None)

    if self.sub_opts:
      for o, a in self.sub_opts:
        if o == '-f':
          self.continue_on_error = True
        elif o == '-k':
          self.transform_types.add(_TransformTypes.CRYPTO_KEY)
        elif o == '-I':
          self.read_args_from_stdin = True
        elif o == '-O':
          self.no_preserve_acl = True
        elif o == '-r' or o == '-R':
          self.recursion_requested = True
          self.all_versions = True
        elif o == '-s':
          self.transform_types.add(_TransformTypes.STORAGE_CLASS)
          self.dest_storage_class = NormalizeStorageClass(a)

    if self.read_args_from_stdin:
      if self.args:
        raise CommandException('No arguments allowed with the -I flag.')
      url_strs = StdinIterator()
    else:
      if not self.args:
        raise CommandException('The rewrite command (without -I) expects at '
                               'least one URL.')
      url_strs = self.args

    if not self.transform_types:
      raise CommandException(
          'rewrite command requires at least one transformation flag. '
          'Currently supported transformation flags: %s' %
          self.supported_transformation_flags)

    self.preconditions = PreconditionsFromHeaders(self.headers or {})

    url_strs_generator = GenerationCheckGenerator(url_strs)

    # Convert recursive flag to flat wildcard to avoid performing multiple
    # listings.
    if self.recursion_requested:
      url_strs_generator = ConvertRecursiveToFlatWildcard(url_strs_generator)

    # Expand the source argument(s).
    name_expansion_iterator = NameExpansionIterator(
        self.command_name,
        self.debug,
        self.logger,
        self.gsutil_api,
        url_strs_generator,
        self.recursion_requested,
        project_id=self.project_id,
        continue_on_error=self.continue_on_error or self.parallel_operations,
        bucket_listing_fields=['name', 'size'])

    seek_ahead_iterator = None
    # Cannot seek ahead with stdin args, since we can only iterate them
    # once without buffering in memory.
    if not self.read_args_from_stdin:
      # Perform the same recursive-to-flat conversion on original url_strs so
      # that it is as true to the original iterator as possible.
      seek_ahead_url_strs = ConvertRecursiveToFlatWildcard(url_strs)
      seek_ahead_iterator = SeekAheadNameExpansionIterator(
          self.command_name,
          self.debug,
          self.GetSeekAheadGsutilApi(),
          seek_ahead_url_strs,
          self.recursion_requested,
          all_versions=self.all_versions,
          project_id=self.project_id)

    # Rather than have each worker repeatedly calculate the sha256 hash for each
    # decryption_key in the boto config, do this once now and cache the results.
    for i in range(0, MAX_DECRYPTION_KEYS):
      key_number = i + 1
      keywrapper = CryptoKeyWrapperFromKey(
          config.get('GSUtil', 'decryption_key%s' % str(key_number), None))
      if keywrapper is None:
        # Stop at first attribute absence in lexicographical iteration.
        break
      if keywrapper.crypto_type == CryptoKeyType.CSEK:
        self.csek_hash_to_keywrapper[keywrapper.crypto_key_sha256] = keywrapper
    # Also include the encryption_key, since it should be used to decrypt and
    # then encrypt if the object's CSEK should remain the same.
    if self.boto_file_encryption_sha256 is not None:
      self.csek_hash_to_keywrapper[self.boto_file_encryption_sha256] = (
          self.boto_file_encryption_keywrapper)

    if self.boto_file_encryption_keywrapper is None:
      msg = '\n'.join(
          textwrap.wrap(
              'NOTE: No encryption_key was specified in the boto configuration '
              'file, so gsutil will not provide an encryption key in its rewrite '
              'API requests. This will decrypt the objects unless they are in '
              'buckets with a default KMS key set, in which case the service '
              'will automatically encrypt the rewritten objects with that key.')
      )
      print('%s\n' % msg, file=sys.stderr)

    # Perform rewrite requests in parallel (-m) mode, if requested.
    self.Apply(_RewriteFuncWrapper,
               name_expansion_iterator,
               _RewriteExceptionHandler,
               fail_on_error=(not self.continue_on_error),
               shared_attrs=['op_failure_count'],
               seek_ahead_iterator=seek_ahead_iterator)

    if self.op_failure_count:
      plural_str = 's' if self.op_failure_count else ''
      raise CommandException('%d file%s/object%s could not be rewritten.' %
                             (self.op_failure_count, plural_str, plural_str))

    return 0
Пример #50
0
def GetMaxUploadCompressionBufferSize():
  """Get the max amount of memory compressed transport uploads may buffer."""
  return HumanReadableToBytes(
      config.get('GSUtil', 'max_upload_compression_buffer_size', '2GiB'))
Пример #51
0
def _HttpsValidateCertifcatesEnabled():
    return config.get('Boto', 'https_validate_certificates', True)
Пример #52
0
def _GenSignedUrl(key,
                  api,
                  use_service_account,
                  provider,
                  client_id,
                  method,
                  duration,
                  gcs_path,
                  logger,
                  region,
                  content_type=None,
                  string_to_sign_debug=False):
  """Construct a string to sign with the provided key.

  Args:
    key: The private key to use for signing the URL.
    api: The CloudApiDelegator instance
    use_service_account: If True, use the service account credentials
        instead of using the key file to sign the url
    provider: Cloud storage provider to connect to.  If not present,
        class-wide default is used.
    client_id: Client ID signing this URL.
    method: The HTTP method to be used with the signed URL.
    duration: timedelta for which the constructed signed URL should be valid.
    gcs_path: String path to the bucket of object for signing, in the form
        'bucket' or 'bucket/object'.
    logger: logging.Logger for warning and debug output.
    region: Geographic region in which the requested resource resides.
    content_type: Optional Content-Type for the signed URL. HTTP requests using
        the URL must match this Content-Type.
    string_to_sign_debug: If true AND logger is enabled for debug level,
        print string to sign to debug. Used to differentiate user's
        signed URL from the probing permissions-check signed URL.

  Returns:
    The complete url (string).
  """
  gs_host = config.get('Credentials', 'gs_host', 'storage.googleapis.com')
  signed_headers = {'host': gs_host}

  if method == 'RESUMABLE':
    method = 'POST'
    signed_headers['x-goog-resumable'] = 'start'
    if not content_type:
      logger.warn('Warning: no Content-Type header was specified with the -c '
                  'flag, so uploads to the resulting Signed URL must not '
                  'specify a Content-Type.')

  if content_type:
    signed_headers['content-type'] = content_type

  if use_service_account:
    final_url = api.SignUrl(provider=provider,
                            method=method,
                            duration=duration,
                            path=gcs_path,
                            logger=logger,
                            region=region,
                            signed_headers=signed_headers,
                            string_to_sign_debug=string_to_sign_debug)
  else:
    if six.PY2:
      digest = b'RSA-SHA256'
    else:
      # Your IDE may complain about this due to a bad docstring in pyOpenSsl:
      # https://github.com/pyca/pyopenssl/issues/741
      digest = 'RSA-SHA256'
    string_to_sign, canonical_query_string = CreatePayload(
        client_id=client_id,
        method=method,
        duration=duration,
        path=gcs_path,
        logger=logger,
        region=region,
        signed_headers=signed_headers,
        string_to_sign_debug=string_to_sign_debug)
    raw_signature = sign(key, string_to_sign, digest)
    final_url = GetFinalUrl(raw_signature, gs_host, gcs_path,
                            canonical_query_string)
  return final_url
Пример #53
0
    def get_credentials(self,
                        access_key=None,
                        secret_key=None,
                        security_token=None,
                        profile_name=None):
        access_key_name, secret_key_name, security_token_name, \
            profile_name_name = self.CredentialMap[self.name]

        # Load profile from shared environment variable if it was not
        # already passed in and the environment variable exists
        if profile_name is None and profile_name_name is not None and \
           profile_name_name.upper() in os.environ:
            profile_name = os.environ[profile_name_name.upper()]

        shared = self.shared_credentials

        if access_key is not None:
            self.access_key = access_key
            boto.log.debug("Using access key provided by client.")
        elif access_key_name.upper() in os.environ:
            self.access_key = os.environ[access_key_name.upper()]
            boto.log.debug("Using access key found in environment variable.")
        elif profile_name is not None:
            if shared.has_option(profile_name, access_key_name):
                self.access_key = shared.get(profile_name, access_key_name)
                boto.log.debug("Using access key found in shared credential "
                               "file for profile %s." % profile_name)
            elif config.has_option("profile %s" % profile_name,
                                   access_key_name):
                self.access_key = config.get("profile %s" % profile_name,
                                             access_key_name)
                boto.log.debug("Using access key found in config file: "
                               "profile %s." % profile_name)
            else:
                raise ProfileNotFoundError('Profile "%s" not found!' %
                                           profile_name)
        elif shared.has_option('default', access_key_name):
            self.access_key = shared.get('default', access_key_name)
            boto.log.debug("Using access key found in shared credential file.")
        elif config.has_option('Credentials', access_key_name):
            self.access_key = config.get('Credentials', access_key_name)
            boto.log.debug("Using access key found in config file.")

        if secret_key is not None:
            self.secret_key = secret_key
            boto.log.debug("Using secret key provided by client.")
        elif secret_key_name.upper() in os.environ:
            self.secret_key = os.environ[secret_key_name.upper()]
            boto.log.debug("Using secret key found in environment variable.")
        elif profile_name is not None:
            if shared.has_option(profile_name, secret_key_name):
                self.secret_key = shared.get(profile_name, secret_key_name)
                boto.log.debug("Using secret key found in shared credential "
                               "file for profile %s." % profile_name)
            elif config.has_option("profile %s" % profile_name,
                                   secret_key_name):
                self.secret_key = config.get("profile %s" % profile_name,
                                             secret_key_name)
                boto.log.debug("Using secret key found in config file: "
                               "profile %s." % profile_name)
            else:
                raise ProfileNotFoundError('Profile "%s" not found!' %
                                           profile_name)
        elif shared.has_option('default', secret_key_name):
            self.secret_key = shared.get('default', secret_key_name)
            boto.log.debug("Using secret key found in shared credential file.")
        elif config.has_option('Credentials', secret_key_name):
            self.secret_key = config.get('Credentials', secret_key_name)
            boto.log.debug("Using secret key found in config file.")
        elif config.has_option('Credentials', 'keyring'):
            keyring_name = config.get('Credentials', 'keyring')
            try:
                import keyring
            except ImportError:
                boto.log.error("The keyring module could not be imported. "
                               "For keyring support, install the keyring "
                               "module.")
                raise
            self.secret_key = keyring.get_password(keyring_name,
                                                   self.access_key)
            boto.log.debug("Using secret key found in keyring.")

        if security_token is not None:
            self.security_token = security_token
            boto.log.debug("Using security token provided by client.")
        elif ((security_token_name is not None) and (access_key is None)
              and (secret_key is None)):
            # Only provide a token from the environment/config if the
            # caller did not specify a key and secret.  Otherwise an
            # environment/config token could be paired with a
            # different set of credentials provided by the caller
            if security_token_name.upper() in os.environ:
                self.security_token = os.environ[security_token_name.upper()]
                boto.log.debug("Using security token found in environment"
                               " variable.")
            elif shared.has_option(profile_name or 'default',
                                   security_token_name):
                self.security_token = shared.get(profile_name or 'default',
                                                 security_token_name)
                boto.log.debug("Using security token found in shared "
                               "credential file.")
            elif config.has_option('Credentials', security_token_name):
                self.security_token = config.get('Credentials',
                                                 security_token_name)
                boto.log.debug("Using security token found in config file.")

        if ((self._access_key is None or self._secret_key is None)
                and self.MetadataServiceSupport[self.name]):
            self._populate_keys_from_metadata_server()
        self._secret_key = self._convert_key_to_str(self._secret_key)
Пример #54
0
class OAuth2UserAccountClient(OAuth2Client):
  """An OAuth2 client."""

  def __init__(self, token_uri, client_id, client_secret, refresh_token,
               auth_uri=None, access_token_cache=None,
               datetime_strategy=datetime.datetime,
               disable_ssl_certificate_validation=False,
               proxy_host=None, proxy_port=None, proxy_user=None,
               proxy_pass=None, ca_certs_file=None):
    """Creates an OAuth2UserAccountClient.

    Args:
      token_uri: The URI used to refresh access tokens.
      client_id: The OAuth2 client ID of this client.
      client_secret: The OAuth2 client secret of this client.
      refresh_token: The token used to refresh the access token.
      auth_uri: The URI for OAuth2 authorization.
      access_token_cache: An optional instance of a TokenCache. If omitted or
          None, an InMemoryTokenCache is used.
      datetime_strategy: datetime module strategy to use.
      disable_ssl_certificate_validation: True if certifications should not be
          validated.
      proxy_host: An optional string specifying the host name of an HTTP proxy
          to be used.
      proxy_port: An optional int specifying the port number of an HTTP proxy
          to be used.
      proxy_user: An optional string specifying the user name for interacting
          with the HTTP proxy.
      proxy_pass: An optional string specifying the password for interacting
          with the HTTP proxy.
      ca_certs_file: The cacerts.txt file to use.
    """
    super(OAuth2UserAccountClient, self).__init__(
        cache_key_base=refresh_token, auth_uri=auth_uri, token_uri=token_uri,
        access_token_cache=access_token_cache,
        datetime_strategy=datetime_strategy,
        disable_ssl_certificate_validation=disable_ssl_certificate_validation,
        proxy_host=proxy_host, proxy_port=proxy_port, proxy_user=proxy_user,
        proxy_pass=proxy_pass, ca_certs_file=ca_certs_file)
    self.token_uri = token_uri
    self.client_id = client_id
    self.client_secret = client_secret
    self.refresh_token = refresh_token

  def GetCredentials(self):
    """Fetches a credentials objects from the provider's token endpoint."""
    access_token = self.GetAccessToken()
    credentials = OAuth2Credentials(
        access_token.token, self.client_id, self.client_secret,
        self.refresh_token, access_token.expiry, self.token_uri, None)
    return credentials

  @Retry(GsAccessTokenRefreshError,
         tries=config.get('OAuth2', 'oauth2_refresh_retries', 6),
         timeout_secs=1)
  def FetchAccessToken(self):
    """Fetches an access token from the provider's token endpoint.

    Fetches an access token from this client's OAuth2 provider's token endpoint.

    Returns:
      The fetched AccessToken.
    """
    try:
      http = self.CreateHttpRequest()
      credentials = OAuth2Credentials(None, self.client_id, self.client_secret,
          self.refresh_token, None, self.token_uri, None)
      credentials.refresh(http)
      return AccessToken(credentials.access_token,
          credentials.token_expiry, datetime_strategy=self.datetime_strategy)
    except AccessTokenRefreshError, e:
      if 'Invalid response 403' in e.message:
        # This is the most we can do at the moment to accurately detect rate
        # limiting errors since they come back as 403s with no further
        # information.
        raise GsAccessTokenRefreshError(e)
      elif 'invalid_grant' in e.message:
        LOG.info("""
Attempted to retrieve an access token from an invalid refresh token. Two common
cases in which you will see this error are:
1. Your refresh token was revoked.
2. Your refresh token was typed incorrectly.
""")
        raise GsInvalidRefreshTokenError(e)
      else:
        raise
Пример #55
0
from gslib.utils import system_util
from gslib.utils.constants import DEFAULT_GCS_JSON_API_VERSION
from gslib.utils.constants import DEFAULT_GSUTIL_STATE_DIR
from gslib.utils.constants import SSL_TIMEOUT_SEC
from gslib.utils.constants import UTF8
from gslib.utils.unit_util import HumanReadableToBytes
from gslib.utils.unit_util import ONE_MIB

import httplib2
from oauth2client.client import HAS_CRYPTO

if six.PY3:
  long = int

# Globals in this module are set according to values in the boto config.
BOTO_IS_SECURE = config.get('Boto', 'is_secure', True)
CERTIFICATE_VALIDATION_ENABLED = config.get('Boto',
                                            'https_validate_certificates', True)

configured_certs_file = None  # Single certs file for use across all processes.
temp_certs_file = None  # Temporary certs file for cleanup upon exit.


def ConfigureCertsFile():
  """Configures and returns the CA Certificates file.

  If one is already configured, use it. Otherwise, use the cert roots
  distributed with gsutil.

  Returns:
    string filename of the certs file to use.
Пример #56
0
def _BotoIsSecure():
    return config.get('Boto', 'is_secure', True)
Пример #57
0
def GetGcsJsonApiVersion():
  return config.get('GSUtil', 'json_api_version', DEFAULT_GCS_JSON_API_VERSION)
Пример #58
0
def _GetProviderTokenUri():
    return config.get('OAuth2', 'provider_token_uri',
                      DEFAULT_GOOGLE_OAUTH2_PROVIDER_TOKEN_URI)
Пример #59
0
def JsonResumableChunkSizeDefined():
  chunk_size_defined = config.get('GSUtil', 'json_resumable_chunk_size', None)
  return chunk_size_defined is not None
Пример #60
0
    def __init__(self,
                 host,
                 aws_access_key_id=None,
                 aws_secret_access_key=None,
                 is_secure=True,
                 port=None,
                 proxy=None,
                 proxy_port=None,
                 proxy_user=None,
                 proxy_pass=None,
                 debug=0,
                 https_connection_factory=None,
                 path='/'):
        """
        :type host: string
        :param host: The host to make the connection to

        :type aws_access_key_id: string
        :param aws_access_key_id: AWS Access Key ID (provided by Amazon)

        :type aws_secret_access_key: string
        :param aws_secret_access_key: Secret Access Key (provided by Amazon)

        :type is_secure: boolean
        :param is_secure: Whether the connection is over SSL

        :type https_connection_factory: list or tuple
        :param https_connection_factory: A pair of an HTTP connection
                                         factory and the exceptions to catch.
                                         The factory should have a similar
                                         interface to L{httplib.HTTPSConnection}.

        :type proxy:
        :param proxy:

        :type proxy_port: int
        :param proxy_port: The port to use when connecting over a proxy

        :type proxy_user: string
        :param proxy_user: The username to connect with on the proxy

        :type proxy_pass: string
        :param proxy_pass: The password to use when connection over a proxy.

        :type port: integer
        :param port: The port to use to connect
        """

        self.num_retries = 5
        self.is_secure = is_secure
        self.handle_proxy(proxy, proxy_port, proxy_user, proxy_pass)
        # define exceptions from httplib that we want to catch and retry
        self.http_exceptions = (httplib.HTTPException, socket.error,
                                socket.gaierror)
        # define values in socket exceptions we don't want to catch
        self.socket_exception_values = (errno.EINTR, )
        if https_connection_factory is not None:
            self.https_connection_factory = https_connection_factory[0]
            self.http_exceptions += https_connection_factory[1]
        else:
            self.https_connection_factory = None
        if (is_secure):
            self.protocol = 'https'
        else:
            self.protocol = 'http'
        self.host = host
        self.path = path
        if debug:
            self.debug = debug
        else:
            self.debug = config.getint('Boto', 'debug', debug)
        if port:
            self.port = port
        else:
            self.port = PORTS_BY_SECURITY[is_secure]

        if aws_access_key_id:
            self.aws_access_key_id = aws_access_key_id
        elif os.environ.has_key('AWS_ACCESS_KEY_ID'):
            self.aws_access_key_id = os.environ['AWS_ACCESS_KEY_ID']
        elif config.has_option('Credentials', 'aws_access_key_id'):
            self.aws_access_key_id = config.get('Credentials',
                                                'aws_access_key_id')

        if aws_secret_access_key:
            self.aws_secret_access_key = aws_secret_access_key
        elif os.environ.has_key('AWS_SECRET_ACCESS_KEY'):
            self.aws_secret_access_key = os.environ['AWS_SECRET_ACCESS_KEY']
        elif config.has_option('Credentials', 'aws_secret_access_key'):
            self.aws_secret_access_key = config.get('Credentials',
                                                    'aws_secret_access_key')

        # initialize an HMAC for signatures, make copies with each request
        self.hmac = hmac.new(self.aws_secret_access_key, digestmod=sha)
        if sha256:
            self.hmac_256 = hmac.new(self.aws_secret_access_key,
                                     digestmod=sha256)
        else:
            self.hmac_256 = None

        # cache up to 20 connections per host, up to 20 hosts
        self._pool = ConnectionPool(20, 20)
        self._connection = (self.server_name(), self.is_secure)
        self._last_rs = None