Пример #1
0
def _get_credentials(rse, endpoint):
    """
    Pass an endpoint and return its credentials.

    :param endpoint:      URL endpoint string.
    :param rse:           RSE name.
    :returns:             Dictionary of credentials.
    """

    key = '%s_%s' % (rse, endpoint)
    result = REGION.get(key)
    if type(result) is NoValue:
        try:
            logging.debug("Loading account credentials")
            result = config.get_rse_credentials(None)
            if result and rse in result:
                result = result[rse]
                result['is_secure'] = result['is_secure'][endpoint]
                REGION.set(key, result)
            else:
                raise Exception("Failed to load account credentials")
            logging.debug("Loaded account credentials")
        except KeyError, e:
            raise exception.CannotAuthenticate(
                'RSE %s endpoint %s not in rse account cfg: %s' %
                (rse, endpoint, e))
        except:
Пример #2
0
    def connect(self):
        """
            Establishes the actual connection to the referred RSE.

            :param: credentials needed to establish a connection with the stroage.

            :raises RSEAccessDenied: if no connection could be established.
        """
        try:
            scheme, prefix = self.attributes.get(
                'scheme'), self.attributes.get('prefix')
            netloc, port = self.attributes['hostname'], self.attributes.get(
                'port', 80)
            service_url = '%(scheme)s://%(netloc)s:%(port)s' % locals()

            access_key, secret_key, is_secure = None, None, None
            if 'S3_ACCESS_KEY' in os.environ:
                access_key = os.environ['S3_ACCESS_KEY']
            if 'S3_SECRET_KEY' in os.environ:
                secret_key = os.environ['S3_SECRET_KEY']
            if 'S3_IS_SECURE' in os.environ:
                if str(os.environ['S3_IS_SECURE']).lower() == 'true':
                    is_secure = True
                elif str(os.environ['S3_IS_SECURE']).lower() == 'false':
                    is_secure = False

            if is_secure is None or access_key is None or secret_key is None:
                credentials = get_rse_credentials()
                self.rse['credentials'] = credentials.get(self.rse['rse'])

                if not access_key:
                    access_key = self.rse['credentials']['access_key'],
                if not secret_key:
                    secret_key = self.rse['credentials']['secret_key'],
                if not is_secure:
                    is_secure = self.rse['credentials'].get('is_secure', {}).\
                        get(service_url, False)

            self._disable_http_proxy()
            self.__conn = connect_s3(host=self.attributes['hostname'],
                                     port=int(port),
                                     aws_access_key_id=access_key,
                                     aws_secret_access_key=secret_key,
                                     is_secure=is_secure,
                                     calling_format=OrdinaryCallingFormat())
            self._reset_http_proxy()
        except Exception as e:
            self._reset_http_proxy()
            raise exception.RSEAccessDenied(e)
Пример #3
0
def get_signed_url(rse_id, service, operation, url, lifetime=600):
    """
    Get a signed URL for a particular service and operation.

    The signed URL will be valid for 1 hour but can be overriden.

    :param rse_id: The ID of the RSE that the URL points to.
    :param service: The service to authorise, either 'gcs', 's3' or 'swift'.
    :param operation: The operation to sign, either 'read', 'write', or 'delete'.
    :param url: The URL to sign.
    :param lifetime: Lifetime of the signed URL in seconds.
    :returns: Signed URL as a variable-length string.
    """

    global CREDS_GCS

    if service not in ['gcs', 's3', 'swift']:
        raise UnsupportedOperation('Service must be "gcs", "s3" or "swift"')

    if operation not in ['read', 'write', 'delete']:
        raise UnsupportedOperation(
            'Operation must be "read", "write", or "delete"')

    if url is None or url == '':
        raise UnsupportedOperation('URL must not be empty')

    if lifetime:
        if not isinstance(lifetime, integer_types):
            try:
                lifetime = int(lifetime)
            except:
                raise UnsupportedOperation(
                    'Lifetime must be convertible to numeric.')

    signed_url = None
    if service == 'gcs':
        if not CREDS_GCS:
            CREDS_GCS = ServiceAccountCredentials.from_json_keyfile_name(
                config_get(
                    'credentials',
                    'gcs',
                    raise_exception=False,
                    default='/opt/rucio/etc/google-cloud-storage-test.json'))
        components = urlparse(url)
        host = components.netloc

        # select the correct operation
        operations = {'read': 'GET', 'write': 'PUT', 'delete': 'DELETE'}
        operation = operations[operation]

        # special case to test signature, force epoch time
        if lifetime is None:
            lifetime = 0
        else:
            # GCS is timezone-sensitive, don't use UTC
            # has to be converted to Unixtime
            lifetime = datetime.datetime.now() + datetime.timedelta(
                seconds=lifetime)
            lifetime = int(time.mktime(lifetime.timetuple()))

        # sign the path only
        path = components.path

        # assemble message to sign
        to_sign = "%s\n\n\n%s\n%s" % (operation, lifetime, path)

        # create URL-capable signature
        # first character is always a '=', remove it
        signature = urlencode(
            {'': base64.b64encode(CREDS_GCS.sign_blob(to_sign)[1])})[1:]

        # assemble final signed URL
        signed_url = 'https://%s%s?GoogleAccessId=%s&Expires=%s&Signature=%s' % (
            host, path, CREDS_GCS.service_account_email, lifetime, signature)

    elif service == 's3':
        # split URL to get hostname, bucket and key
        components = urlparse(url)
        host = components.netloc
        pathcomponents = components.path.split('/')
        if len(pathcomponents) < 3:
            raise UnsupportedOperation('Not a valid S3 URL')
        bucket = pathcomponents[1]
        key = '/'.join(pathcomponents[2:])

        # remove port number from host if present
        colon = host.find(':')
        port = '443'
        if colon >= 0:
            port = host[colon + 1:]
            host = host[:colon]

        # look up in RSE account configuration by RSE ID
        cred_name = rse_id
        cred = REGION.get('s3-%s' % cred_name)
        if cred is NO_VALUE:
            rse_cred = get_rse_credentials()
            cred = rse_cred.get(cred_name)
            REGION.set('s3-%s' % cred_name, cred)
        access_key = cred['access_key']
        secret_key = cred['secret_key']
        signature_version = cred['signature_version']
        region_name = cred['region']

        if operation == 'read':
            s3op = 'get_object'
        elif operation == 'write':
            s3op = 'put_object'
        else:
            s3op = 'delete_object'

        with record_timer_block('credential.signs3'):
            s3 = boto3.client('s3',
                              endpoint_url='https://' + host + ':' + port,
                              aws_access_key_id=access_key,
                              aws_secret_access_key=secret_key,
                              config=Config(
                                  signature_version=signature_version,
                                  region_name=region_name))

            signed_url = s3.generate_presigned_url(s3op,
                                                   Params={
                                                       'Bucket': bucket,
                                                       'Key': key
                                                   },
                                                   ExpiresIn=lifetime)

    elif service == 'swift':
        # split URL to get hostname and path
        components = urlparse(url)
        host = components.netloc

        # remove port number from host if present
        colon = host.find(':')
        if colon >= 0:
            host = host[:colon]

        # use RSE ID to look up key
        cred_name = rse_id

        # look up tempurl signing key
        cred = REGION.get('swift-%s' % cred_name)
        if cred is NO_VALUE:
            rse_cred = get_rse_credentials()
            cred = rse_cred.get(cred_name)
            REGION.set('swift-%s' % cred_name, cred)
        tempurl_key = cred['tempurl_key']

        if operation == 'read':
            swiftop = 'GET'
        elif operation == 'write':
            swiftop = 'PUT'
        else:
            swiftop = 'DELETE'

        expires = int(time.time() + lifetime)

        # create signed URL
        with record_timer_block('credential.signswift'):
            hmac_body = u'%s\n%s\n%s' % (swiftop, expires, components.path)
            # Python 3 hmac only accepts bytes or bytearray
            sig = hmac.new(bytearray(tempurl_key, 'utf-8'),
                           bytearray(hmac_body, 'utf-8'), sha1).hexdigest()
            signed_url = 'https://' + host + components.path + '?temp_url_sig=' + sig + '&temp_url_expires=' + str(
                expires)

    return signed_url
Пример #4
0
    def setuphostname(self, protocols):
        """ Initializes the rclone object with information about protocols in the referred RSE.

            :param protocols Protocols in the RSE
        """
        if protocols['scheme'] in ['scp', 'rsync', 'sftp']:
            self.hostname = 'ssh_rclone_rse'
            self.host = protocols['hostname']
            self.port = str(protocols['port'])
            if protocols['extended_attributes'] is not None and 'user' in list(
                    protocols['extended_attributes'].keys()):
                self.user = protocols['extended_attributes']['user']
            else:
                self.user = None
            try:
                data = load_conf_file('rclone-init.cfg')
                key_file = data[self.host + '_ssh']['key_file']
            except KeyError:
                self.logger(
                    logging.ERROR,
                    'rclone.init: rclone-init.cfg:- Field value missing for "{}_ssh: key_file"'
                    .format(self.host))
                return False
            try:
                if self.user:
                    cmd = 'rclone config create {0} sftp host {1} user {2} port {3} key_file {4}'.format(
                        self.hostname, self.host, self.user, str(self.port),
                        key_file)
                    self.logger(logging.DEBUG,
                                'rclone.init: cmd: {}'.format(cmd))
                    status, out, err = execute(cmd)
                    if status:
                        return False
                else:
                    cmd = 'rclone config create {0} sftp host {1} port {2} key_file {3}'.format(
                        self.hostname, self.host, str(self.port), key_file)
                    self.logger(logging.DEBUG,
                                'rclone.init: cmd: {}'.format(cmd))
                    status, out, err = execute(cmd)
                    if status:
                        return False
            except Exception as e:
                raise exception.ServiceUnavailable(e)

        elif protocols['scheme'] == 'file':
            self.hostname = '%s_rclone_rse' % (protocols['scheme'])
            self.host = 'localhost'
            try:
                cmd = 'rclone config create {0} local'.format(self.hostname)
                self.logger(logging.DEBUG, 'rclone.init: cmd: {}'.format(cmd))
                status, out, err = execute(cmd)
                if status:
                    return False
            except Exception as e:
                raise exception.ServiceUnavailable(e)

        elif protocols['scheme'] in ['davs', 'https']:
            self.hostname = '%s_rclone_rse' % (protocols['scheme'])
            self.host = protocols['hostname']
            url = '%s://%s:%s%s' % (protocols['scheme'], protocols['hostname'],
                                    str(protocols['port']),
                                    protocols['prefix'])
            try:
                data = load_conf_file('rclone-init.cfg')
                bearer_token = data[self.host + '_webdav']['bearer_token']
            except KeyError:
                self.logger(
                    logging.ERROR,
                    'rclone.init: rclone-init.cfg:- Field value missing for "{}_webdav: bearer_token"'
                    .format(self.host))
                return False
            try:
                cmd = 'rclone config create {0} webdav url {1} vendor other bearer_token {2}'.format(
                    self.hostname, url, bearer_token)
                self.logger(logging.DEBUG, 'rclone.init: cmd: {}'.format(cmd))
                status, out, err = execute(cmd)
                if status:
                    return False
            except Exception as e:
                raise exception.ServiceUnavailable(e)

        elif protocols['scheme'] == 's3':
            self.hostname = '%s_rclone_rse' % (protocols['scheme'])
            self.host = protocols['hostname']
            access_key, secret_key, is_secure = None, None, None
            if 'S3_ACCESS_KEY' in os.environ:
                access_key = os.environ['S3_ACCESS_KEY']
            if 'S3_SECRET_KEY' in os.environ:
                secret_key = os.environ['S3_SECRET_KEY']

            if is_secure is None or access_key is None or secret_key is None:
                credentials = get_rse_credentials()
                self.rse['credentials'] = credentials.get(self.rse['rse'])

                if not access_key:
                    access_key = self.rse['credentials']['access_key']
                if not secret_key:
                    secret_key = self.rse['credentials']['secret_key']

            if not access_key or not secret_key:
                self.logger(
                    logging.ERROR,
                    'rclone.init: Missing key(s) for s3 host: {}'.format(
                        self.host))
                return False

            try:
                cmd = 'rclone config create {0} s3 provider AWS env_auth false access_key_id {1} secret_access_key {2} region us-east-1 acl private'.format(
                    self.hostname, access_key, secret_key)
                self.logger(logging.DEBUG, 'rclone.init: cmd: {}'.format(cmd))
                status, out, err = execute(cmd)
                if status:
                    return False
            except Exception as e:
                raise exception.ServiceUnavailable(e)
        else:
            self.logger(
                logging.DEBUG,
                'rclone.init: {} protocol impl not supported by rucio rclone'.
                format(protocols['impl']))
            return False

        return True