Ejemplo n.º 1
0
 def get_client(self, url):
     if RAZ.IS_ENABLED.get():
         return resource.Resource(RazHttpClient())
     else:
         return resource.Resource(
             http_client.HttpClient(url,
                                    exc_class=WebHdfsException,
                                    logger=LOG))
Ejemplo n.º 2
0
    def __init__(self,
                 url,
                 fs_defaultfs,
                 logical_name=None,
                 hdfs_superuser=None,
                 security_enabled=False,
                 ssl_cert_ca_verify=True,
                 temp_dir="/tmp",
                 umask=01022):
        self._url = url
        self._superuser = hdfs_superuser
        self._security_enabled = security_enabled
        self._ssl_cert_ca_verify = ssl_cert_ca_verify
        self._temp_dir = temp_dir
        self._umask = umask
        self._fs_defaultfs = fs_defaultfs
        self._logical_name = logical_name

        self._client = self._make_client(url, security_enabled,
                                         ssl_cert_ca_verify)
        self._root = resource.Resource(self._client)

        # To store user info
        self._thread_local = threading.local()

        LOG.debug(
            "Initializing Hadoop WebHdfs: %s (security: %s, superuser: %s)" %
            (self._url, self._security_enabled, self._superuser))
Ejemplo n.º 3
0
  def __init__(self, url,
               fs_defaultfs,
               logical_name=None,
               hdfs_superuser=None,
               security_enabled=False,
               ssl_cert_ca_verify=True,
               temp_dir="/tmp",
               umask=0o1022,
               hdfs_supergroup=None,
               auth_provider=None):
    self._url = url
    self._superuser = hdfs_superuser
    self._security_enabled = security_enabled
    self._ssl_cert_ca_verify = ssl_cert_ca_verify
    self._temp_dir = temp_dir
    self._umask = umask
    self._fs_defaultfs = fs_defaultfs
    self._logical_name = logical_name
    self._supergroup = hdfs_supergroup
    self._auth_provider = auth_provider
    split = urlparse(fs_defaultfs)
    self._scheme = split.scheme
    self._netloc = split.netloc
    self._is_remote = True
    self._has_trash_support = False
    self._filebrowser_action = PERMISSION_ACTION_ABFS

    self._client = http_client.HttpClient(url, exc_class=WebHdfsException, logger=LOG)
    self._root = resource.Resource(self._client)

    # To store user info
    self._thread_local = threading.local()

    LOG.debug("Initializing ABFS : %s (security: %s, superuser: %s)" % (self._url, self._security_enabled, self._superuser))
Ejemplo n.º 4
0
    def __init__(self,
                 solr_url=None,
                 user=None,
                 security_enabled=False,
                 ssl_cert_ca_verify=SSL_CERT_CA_VERIFY.get()):
        if solr_url is None:
            solr_url = SOLR_URL.get()
        self._url = solr_url
        self._user = user
        self._client = HttpClient(self._url, logger=LOG)
        self.security_enabled = security_enabled or SECURITY_ENABLED.get()

        if self.security_enabled:
            self._client.set_kerberos_auth()

        self._client.set_verify(ssl_cert_ca_verify)

        self._root = resource.Resource(self._client)

        # The Kerberos handshake requires two requests in order to authenticate,
        # but if our first request is a PUT/POST, it might flat-out reject the
        # first request if the body is too large. So, connect here in order to get
        # a cookie so future PUT/POSTs will be pre-authenticated.
        if self.security_enabled:
            self._root.invoke('HEAD', '/')
Ejemplo n.º 5
0
  def __init__(
      self,
      url,
      fs_defaultfs,
      logical_name=None,
      hdfs_superuser=None,
      security_enabled=False,
      ssl_cert_ca_verify=True,
      temp_dir="/tmp",
      umask=0o1022,
      hdfs_supergroup=None):
    self._url = url
    self._superuser = hdfs_superuser
    self._security_enabled = security_enabled
    self._ssl_cert_ca_verify = ssl_cert_ca_verify
    self._temp_dir = temp_dir
    self._umask = umask
    self._fs_defaultfs = fs_defaultfs
    self._logical_name = logical_name
    self._supergroup = hdfs_supergroup
    self._scheme = ""
    self._netloc = "";
    self._is_remote = False
    self._has_trash_support = True
    self.expiration = None

    self._client = self._make_client(url, security_enabled, ssl_cert_ca_verify)
    self._root = resource.Resource(self._client)

    # To store user info
    self._thread_local = threading.local()

    LOG.debug("Initializing Hadoop WebHdfs: %s (security: %s, superuser: %s)" % (self._url, self._security_enabled, self._superuser))
Ejemplo n.º 6
0
  def _invoke_with_redirect(self, method, path, params=None, data=None, headers=None):
    """
    Issue a request, and expect a redirect, and then submit the data to
    the redirected location. This is used for create, write, etc.

    Returns the response from the redirected request.
    """
    next_url = None
    try:
      # Do not pass data in the first leg.
      self._root.invoke(method, path, params, headers=headers)
    except WebHdfsException as ex:
      # This is expected. We get a 307 redirect.
      # The following call may throw.
      next_url = self._get_redirect_url(ex)

    if next_url is None:
      raise WebHdfsException(_("Failed to create '%s'. HDFS did not return a redirect") % path)

    # Now talk to the real thing. The redirect url already includes the params.
    client = self._make_client(next_url, self.security_enabled, self.ssl_cert_ca_verify)

    # Make sure to reuse the session in order to preserve the Kerberos cookies.
    client._session = self._client._session
    if headers is None:
      headers = {}
    headers["Content-Type"] = 'application/octet-stream'
    return resource.Resource(client).invoke(method, data=data, headers=headers)
Ejemplo n.º 7
0
    def get_access_token(cls, session_code, **options):
        remote_url = options.get(
            'remote_url', VCS[GITHUB_OFFICIAL].REMOTE_URL.get()).strip('/')
        client_id = options.get('client_id',
                                VCS[GITHUB_OFFICIAL].CLIENT_ID.get())
        client_secret = options.get('client_secret',
                                    VCS[GITHUB_OFFICIAL].CLIENT_SECRET.get())

        try:
            client = HttpClient(remote_url, logger=LOG)
            root = resource.Resource(client)
            data = {
                'client_id': client_id,
                'client_secret': client_secret,
                'code': session_code
            }
            headers = {
                'content-type': 'application/json',
                'Accept': 'application/json'
            }
            response = root.post('login/oauth/access_token',
                                 headers=headers,
                                 data=json.dumps(data))
            result = cls._get_json(response)
            return result['access_token']
        except RestException as e:
            raise GithubClientException(
                'Failed to request access token from GitHub: %s' % e)
        except KeyError:
            raise GithubClientException(
                'Failed to find access_token in GitHub oAuth response')
Ejemplo n.º 8
0
Archivo: api.py Proyecto: ycaihua/hue
 def __init__(self, solr_url, user):
     self._url = solr_url
     self._user = user
     self._client = HttpClient(self._url, logger=LOG)
     self.security_enabled = SECURITY_ENABLED.get()
     if self.security_enabled:
         self._client.set_kerberos_auth()
     self._root = resource.Resource(self._client)
Ejemplo n.º 9
0
def azure_metadata():
  global AZURE_METADATA
  if AZURE_METADATA is None:
    from desktop.lib.rest import http_client, resource
    client = http_client.HttpClient(META_DATA_URL, logger=LOG)
    root = resource.Resource(client)
    try:
      AZURE_METADATA = root.get('/compute', params={'api-version': '2019-06-04', 'format': 'json'}, headers={'Metadata': 'true'})
    except Exception as e:
      AZURE_METADATA = False
  return AZURE_METADATA
Ejemplo n.º 10
0
    def is_authenticated(cls, access_token, **options):
        api_url = options.get('api_url', GITHUB_API_URL.get()).strip('/')

        try:
            client = HttpClient(api_url, logger=LOG)
            root = resource.Resource(client)
            params = (('access_token', access_token), )
            root.get('user', params=params)
            return True
        except RestException:
            return False
Ejemplo n.º 11
0
    def __init__(self, api_url=None, user=None, password=None):
        self._api_url = (api_url or NAVIGATOR.API_URL.get()).strip('/')
        self._username = user or NAVIGATOR.AUTH_USERNAME.get()
        self._password = password or NAVIGATOR.AUTH_PASSWORD.get()

        self._client = HttpClient(self._api_url, logger=LOG)
        self._client.set_basic_auth(self._username, self._password)
        self._root = resource.Resource(self._client)

        self.__headers = {}
        self.__params = ()
Ejemplo n.º 12
0
    def __init__(self,
                 url=None,
                 aws_access_key_id=None,
                 aws_secret_access_key=None):
        self._access_key_id = aws_access_key_id
        self._secret_access_key = aws_secret_access_key
        self._url = url

        self._client = http_client.HttpClient(url, logger=LOG)
        self._root = resource.Resource(self._client)
        self._token = None
Ejemplo n.º 13
0
  def __init__(self, api_url=None, product_name=None, product_secret=None, ssl_cert_ca_verify=OPTIMIZER.SSL_CERT_CA_VERIFY.get(), product_auth_secret=None):
    self._api_url = (api_url or OPTIMIZER.API_URL.get()).strip('/')
    self._product_name = product_name if product_name else OPTIMIZER.PRODUCT_NAME.get()
    self._product_secret = product_secret if product_secret else OPTIMIZER.PRODUCT_SECRET.get()
    self._product_auth_secret = product_auth_secret if product_auth_secret else OPTIMIZER.PRODUCT_AUTH_SECRET.get()
    self._email = OPTIMIZER.EMAIL.get()
    self._email_password = OPTIMIZER.EMAIL_PASSWORD.get()

    self._client = HttpClient(self._api_url, logger=LOG)
    self._client.set_verify(ssl_cert_ca_verify)

    self._root = resource.Resource(self._client)
Ejemplo n.º 14
0
    def get_client(self, url):
        if RAZ.IS_ENABLED.get():
            client = RazHttpClient(self._user,
                                   url,
                                   exc_class=WebHdfsException,
                                   logger=LOG)
        else:
            client = http_client.HttpClient(url,
                                            exc_class=WebHdfsException,
                                            logger=LOG)

        return resource.Resource(client)
Ejemplo n.º 15
0
  def __init__(self, user=None):
    self._api_url = '%s/%s' % (NAVIGATOR.API_URL.get().strip('/'), VERSION)
    self._username = NAVIGATOR.AUTH_USERNAME.get()
    self._password = NAVIGATOR.AUTH_PASSWORD.get()

    self.user = user
    self._client = HttpClient(self._api_url, logger=LOG)
    self._client.set_basic_auth(self._username, self._password)
    self._root = resource.Resource(self._client, urlencode=False) # For search_entities_interactive

    self.__headers = {}
    self.__params = ()
Ejemplo n.º 16
0
 def __init__(self,
              user=None,
              address=None,
              dt_path=None,
              path=None,
              security=None):
     self.user = user
     self.address = address
     self.dt_path = dt_path
     self.path = path
     self.security = security
     self._client = http_client.HttpClient(self.address, logger=LOG)
     self._root = resource.Resource(self._client)
Ejemplo n.º 17
0
  def __init__(self, user=None):
    self._api_url = '%s/%s' % (NAVIGATOR.API_URL.get().strip('/'), VERSION)
    self._username = get_navigator_auth_username()
    self._password = get_navigator_auth_password()

    self.user = user
    # Navigator does not support Kerberos authentication while other components usually requires it
    self._client = UnsecureHttpClient(self._api_url, logger=LOG)
    self._client.set_basic_auth(self._username, self._password)
    self._root = resource.Resource(self._client, urlencode=False) # For search_entities_interactive

    self.__headers = {}
    self.__params = ()
Ejemplo n.º 18
0
  def __init__(self, **options):
    self._github_base_url = options.get('remote_url', VCS[GITHUB_OFFICIAL].REMOTE_URL.get()).strip('/')
    self._api_url = options.get('api_url', VCS[GITHUB_OFFICIAL].API_URL.get()).strip('/')

    self._client = HttpClient(self._api_url, logger=LOG)
    self._root = resource.Resource(self._client)

    self.__headers = {}
    access_token = options.get('access_token')
    if access_token:
      self.__headers['Authorization'] = 'token %s' % access_token
      # TODO: Redact access_token from logs
    self.__params = ()
Ejemplo n.º 19
0
  def __init__(self, user=None):
    super(AtlasApi, self).__init__(user)

    self._api_url = CATALOG.API_URL.get().strip('/')
    self._username = CATALOG.SERVER_USER.get()
    self._password = get_catalog_auth_password()

    # Navigator does not support Kerberos authentication while other components usually requires it
    self._client = UnsecureHttpClient(self._api_url, logger=LOG)
    self._client.set_basic_auth(self._username, self._password)
    self._root = resource.Resource(self._client, urlencode=False) # For search_entities_interactive

    self.__headers = {}
    self.__params = ()
Ejemplo n.º 20
0
  def __init__(self, solr_url, user, security_enabled=SECURITY_ENABLED.get()):
    self._url = solr_url
    self._user = user
    self._client = HttpClient(self._url, logger=LOG)
    self.security_enabled = security_enabled

    if self.security_enabled:
      self._client.set_kerberos_auth()

    self._root = resource.Resource(self._client)

    # The Kerberos handshake requires two requests in order to authenticate,
    # but if our first request is a PUT/POST, it might flat-out reject the
    # first request if the body is too large. So, connect here in order to get
    # a cookie so future PUT/POSTs will be pre-authenticated.
    if self.security_enabled:
      self._root.invoke('HEAD', '/')
Ejemplo n.º 21
0
    def __init__(self, user=None):
        super(AtlasApi, self).__init__(user)

        self._api_url = CATALOG.API_URL.get().strip('/') + "/api/atlas"
        self._username = CATALOG.SERVER_USER.get()
        self._password = CATALOG.SERVER_PASSWORD.get()

        self._client = HttpClient(self._api_url, logger=LOG)
        if CATALOG.KERBEROS_ENABLED.get():
            self._client.set_kerberos_auth()
        elif self._password:
            self._client.set_basic_auth(self._username, self._password)

        self._root = resource.Resource(
            self._client, urlencode=False)  # For search_entities_interactive

        self.__headers = {}
        self.__params = ()
Ejemplo n.º 22
0
  def __init__(self, url,
               fs_defaultfs,
               hdfs_superuser=None,
               security_enabled=False,
               temp_dir="/tmp"):
    self._url = url
    self._superuser = hdfs_superuser
    self._security_enabled = security_enabled
    self._temp_dir = temp_dir
    self._fs_defaultfs = fs_defaultfs

    self._client = self._make_client(url, security_enabled)
    self._root = resource.Resource(self._client)

    # To store user info
    self._thread_local = threading.local()

    LOG.debug("Initializing Hadoop WebHdfs: %s (security: %s, superuser: %s)" %
              (self._url, self._security_enabled, self._superuser))
Ejemplo n.º 23
0
    def __init__(self, user=None):
        super(AtlasApi, self).__init__(user)

        self._api_url = None

        # Checking which server is active server if there are multiple else use the one thats listed
        atlas_servers = CATALOG.API_URL.get().replace("%20", "").replace(
            "['", "").replace("']", "").replace("'", "").split(',')

        for atlas_server in atlas_servers:
            atlas_url = atlas_server.strip().strip(
                '/') + '/api/atlas/admin/status'
            response = requests.get(atlas_url)
            atlas_is_active = response.json()

            if "ACTIVE" in atlas_is_active["Status"]:
                LOG.debug('Setting Atlas API endpoint to: %s' % atlas_server)
                self._api_url = atlas_server.strip().strip('/') + "/api/atlas"
                break

        if self._api_url is None:
            self._api_url = CATALOG.API_URL.get()
            LOG.warning('No Atlas server available for use, defaulting to %s' %
                        self._api_url)

        self._username = CATALOG.SERVER_USER.get()
        self._password = CATALOG.SERVER_PASSWORD.get()

        self._client = HttpClient(self._api_url, logger=LOG)
        if CATALOG.KERBEROS_ENABLED.get():
            self._client.set_kerberos_auth()
        elif self._password:
            self._client.set_basic_auth(self._username, self._password)

        self._root = resource.Resource(
            self._client, urlencode=False)  # For search_entities_interactive

        self.__headers = {}
        self.__params = ()
Ejemplo n.º 24
0
               ssl_cert_ca_verify=True,
               temp_dir="/tmp",
               umask=01022,
               hdfs_supergroup=None):
    self._url = url
    self._superuser = hdfs_superuser
    self._security_enabled = security_enabled
    self._ssl_cert_ca_verify = ssl_cert_ca_verify
    self._temp_dir = temp_dir
    self._umask = umask
    self._fs_defaultfs = fs_defaultfs
    self._logical_name = logical_name
    self._supergroup = hdfs_supergroup

    self._client = self._make_client(url, security_enabled, ssl_cert_ca_verify)
    self._root = resource.Resource(self._client)

    # To store user info
    self._thread_local = threading.local()

    LOG.debug("Initializing Hadoop WebHdfs: %s (security: %s, superuser: %s)" % (self._url, self._security_enabled, self._superuser))

  @classmethod
  def from_config(cls, hdfs_config):
    fs_defaultfs = hdfs_config.FS_DEFAULTFS.get()

    return cls(url=_get_service_url(hdfs_config),
               fs_defaultfs=fs_defaultfs,
               logical_name=hdfs_config.LOGICAL_NAME.get(),
               security_enabled=hdfs_config.SECURITY_ENABLED.get(),
               ssl_cert_ca_verify=hdfs_config.SSL_CERT_CA_VERIFY.get(),
Ejemplo n.º 25
0
 def get_client(self, url):
     return resource.Resource(
         http_client.HttpClient(url, exc_class=WebHdfsException,
                                logger=LOG))
Ejemplo n.º 26
0
  def __init__(self):
    self._remote_url = VCS[GIT_READ_ONLY].REMOTE_URL.get().strip('/')
    self._api_url = VCS[GIT_READ_ONLY].API_URL.get().strip('/')

    self._client = HttpClient(self._api_url, logger=LOG)
    self._root = resource.Resource(self._client)
Ejemplo n.º 27
0
    try:
      # Do not pass data in the first leg.
      self._root.invoke(method, path, params)
    except WebHdfsException, ex:
      # This is expected. We get a 307 redirect.
      # The following call may throw.
      next_url = self._get_redirect_url(ex)

    if next_url is None:
      raise WebHdfsException(
        "Failed to create '%s'. HDFS did not return a redirect" % (path,))

    # Now talk to the real thing. The redirect url already includes the params.
    client = self._make_client(next_url, self.security_enabled)
    headers = {'Content-Type': 'application/octet-stream'}
    return resource.Resource(client).invoke(method, data=data, headers=headers)


  def _get_redirect_url(self, webhdfs_ex):
    """Retrieve the redirect url from an exception object"""
    try:
      # The actual HttpError (307) is wrapped inside
      http_error = webhdfs_ex.get_parent_ex()
      if http_error is None:
        raise webhdfs_ex

      if http_error.code not in (301, 302, 303, 307):
        LOG.error("Response is not a redirect: %s" % (webhdfs_ex,))
        raise webhdfs_ex
      return http_error.headers.getheader('location')
    except Exception, ex:
Ejemplo n.º 28
0
        self._superuser = hdfs_superuser
        self._security_enabled = security_enabled
        self._ssl_cert_ca_verify = ssl_cert_ca_verify
        self._temp_dir = temp_dir
        self._umask = umask
        self._fs_defaultfs = fs_defaultfs
        self._logical_name = logical_name
        self._supergroup = hdfs_supergroup
        self._scheme = ""
        self._netloc = ""
        self._is_remote = False
        self._has_trash_support = True

        self._client = self._make_client(url, security_enabled,
                                         ssl_cert_ca_verify)
        self._root = resource.Resource(self._client)

        # To store user info
        self._thread_local = threading.local()

        LOG.debug(
            "Initializing Hadoop WebHdfs: %s (security: %s, superuser: %s)" %
            (self._url, self._security_enabled, self._superuser))

    @classmethod
    def from_config(cls, hdfs_config):
        fs_defaultfs = hdfs_config.FS_DEFAULTFS.get()

        return cls(url=_get_service_url(hdfs_config),
                   fs_defaultfs=fs_defaultfs,
                   logical_name=hdfs_config.LOGICAL_NAME.get(),