Пример #1
0
 def _post_data(self, path, data):
     norm_path = posixpath.join(CONF.metadata_base_url, path)
     LOG.debug('Posting metadata to: %s', norm_path)
     req = request.Request(norm_path, data=data)
     self._get_response(req)
     return True
Пример #2
0
 def delete(self, endpoint):
     req = request.Request(self.url + endpoint)
     req.get_method = lambda: 'DELETE'
     return self._open(req)
Пример #3
0
 def _get_data(self, path):
     norm_path = posixpath.join(CONF.metadata_base_url, path)
     LOG.debug('Getting metadata from: %s', norm_path)
     req = request.Request(norm_path)
     response = self._get_response(req)
     return response.read()
Пример #4
0
    def get(self,
            path,
            params=None,
            ssl=False,
            compress=True,
            try_json=True,
            is_retry=False,
            use_ordered_dict=False,
            out_folder=None,
            file_name=None,
            force_bytes=False,
            **kwargs):
        """ Returns result of an HTTP GET. Handles token timeout and all SSL mode."""
        path = quote(path, ':/%')
        url = path
        if url.lower().find("https://") > -1 or\
           url.lower().find("http://") > -1:
            url = path
        elif len(url) == 0:
            url = self.baseurl
        elif (len(url) > 0 and url[0] == '/' ) == False and \
             self.baseurl.endswith('/') == False:
            url = "/{path}".format(path=url)

        if not url.startswith('http://') and \
           not url.startswith('https://'):
            url = self.baseurl + url
        if ssl or self.all_ssl:
            url = url.replace('http://', 'https://')

        # Add the token if logged in
        if params is None:
            params = {}
        if try_json:
            params['f'] = 'json'
        if self.is_logged_in:
            params['token'] = self.token
        if len(params.keys()) > 0:
            params = {k: jsonize_dict(v) for k, v in params.items()}
            url = "{url}?{params}".format(url=url, params=urlencode(params))
            #url = self._url_add_token(url, self.token)

        _log.debug('REQUEST (get): ' + url)

        try:
            # Send the request and read the response
            headers = [('User-Agent', self._useragent)]
            if self._referer:  # and \
                #self._auth.lower() != 'pki':
                headers.append(('Referer', self._referer))

            if compress:
                headers.append(('Accept-encoding', 'gzip'))
            if self._handlers is None:
                self._handlers = self.get_handlers()
            opener = request.build_opener(*self._handlers)
            opener.addheaders = headers

            req = request.Request(url, headers={i[0]: i[1] for i in headers})
            resp = opener.open(req)
            resp_data = self._process_response(resp,
                                               out_folder=out_folder,
                                               file_name=file_name)
            #  if the response is a file saved to disk, return it.

            if (len(resp_data) < 32767) and os.path.isfile(resp_data):
                if force_bytes:
                    return open(resp_data, 'rb').read()
                return resp_data
            # If we're not trying to parse to JSON, return response as is
            if not try_json:
                return resp_data

            try:
                if use_ordered_dict:
                    resp_json = json.loads(resp_data,
                                           object_pairs_hook=OrderedDict)
                else:
                    resp_json = json.loads(resp_data)

                # Check for errors, and handle the case where the token timed
                # out during use (and simply needs to be re-generated)
                try:
                    if resp_json.get('error', None):
                        errorcode = resp_json['error']['code']
                        if errorcode == 498 and not is_retry:
                            _log.info('Token expired during get request, ' \
                                      + 'fetching a new token and retrying')
                            newtoken = self.relogin()
                            newpath = self._url_add_token(path, newtoken)
                            return self.get(path=newpath,
                                            params=params,
                                            ssl=ssl,
                                            compress=compress,
                                            try_json=try_json,
                                            is_retry=True)
                        elif errorcode == 498:
                            raise RuntimeError('Invalid token')
                        elif errorcode == 403:
                            message = resp_json['error'][
                                'message'] if 'message' in resp_json[
                                    'error'] else ''
                            if message == "SSL Required":
                                return self.get(path=path,
                                                params=params,
                                                ssl=True,
                                                compress=compress,
                                                try_json=try_json,
                                                is_retry=True)

                        self._handle_json_error(resp_json['error'])
                        return None
                except AttributeError:
                    # Top-level JSON object isnt a dict, so can't have an error
                    pass

                # If the JSON parsed correctly and there are no errors,
                # return the JSON
                #if 'status' in resp_json: # FOR DEMO TODO REMOVE ME
                #    return resp_json['status'] == 'success'
                return resp_json

            # If we couldnt parse the response to JSON, return it as is
            except ValueError:
                return resp

        # If we got an HTTPError when making the request check to see if it's
        # related to token timeout, in which case, regenerate a token
        except HTTPError as e:
            if e.code == 498 and not is_retry:
                _log.info('Token expired during get request, fetching a new ' \
                          + 'token and retrying')
                self.logout()
                newtoken = self.relogin()
                newpath = self._url_add_token(path, newtoken)
                return self.get(newpath, ssl, try_json, is_retry=True)
            elif e.code == 498:
                raise RuntimeError('Invalid token')
            else:
                raise e
Пример #5
0
 def post(self, endpoint, data=None, content_type="application/json"):
     if not data:
         data = {}
     req = request.Request(self.url + endpoint, data=json.dumps(data))
     req.add_header('Content-Type', content_type)
     return self._open(req)
Пример #6
0
def download_scripts(proxies=None, install_dir=None):

    print("Downloading scripts. It might take a while.")

    # location in which to download stuff:
    if install_dir is None:
        import visdom
        install_dir = os.path.dirname(visdom.__file__)

    # all files that need to be downloaded:
    b = 'https://unpkg.com/'
    bb = '%[email protected]/dist/' % b
    ext_files = {
        '%[email protected]/dist/jquery.min.js' % b:
        'jquery.min.js',
        '%[email protected]/dist/js/bootstrap.min.js' % b:
        'bootstrap.min.js',
        '%[email protected]/css/styles.css' % b:
        'react-resizable-styles.css',
        '%[email protected]/css/styles.css' % b:
        'react-grid-layout-styles.css',
        '%[email protected]/dist/react-modal.min.js' % b:
        'react-modal.min.js',
        '%[email protected]/dist/react.min.js' % b:
        'react-react.min.js',
        '%[email protected]/dist/react-dom.min.js' % b:
        'react-dom.min.js',
        '%[email protected]' % b:
        'classnames',
        '%[email protected]' % b:
        'layout_bin_packer',
        'https://raw.githubusercontent.com/STRML/react-grid-layout/0.14.0/dist/' + 'react-grid-layout.min.js':
        'react-grid-layout.min.js',
        'https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_SVG':
        'mathjax-MathJax.js',
        # here is another url in case the cdn breaks down again.
        # https://raw.githubusercontent.com/plotly/plotly.js/master/dist/plotly.min.js
        'https://cdn.plot.ly/plotly-latest.min.js':
        'plotly-plotly.min.js',
        '%scss/bootstrap.min.css' % bb:
        'bootstrap.min.css',
        '%sfonts/glyphicons-halflings-regular.eot' % bb:
        'glyphicons-halflings-regular.eot',
        '%sfonts/glyphicons-halflings-regular.woff2' % bb:
        'glyphicons-halflings-regular.woff2',
        '%sfonts/glyphicons-halflings-regular.woff' % bb:
        'glyphicons-halflings-regular.woff',
        '%sfonts/glyphicons-halflings-regular.ttf' % bb:
        'glyphicons-halflings-regular.ttf',
        '%sfonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular' % bb:
        'glyphicons-halflings-regular.svg#glyphicons_halflingsregular',
    }

    # make sure all relevant folders exist:
    dir_list = [
        '%s' % install_dir,
        '%s/static' % install_dir,
        '%s/static/js' % install_dir,
        '%s/static/css' % install_dir,
        '%s/static/fonts' % install_dir,
    ]
    for directory in dir_list:
        if not os.path.exists(directory):
            os.makedirs(directory)

    # set up proxy handler:
    from six.moves.urllib import request
    from six.moves.urllib.error import HTTPError, URLError
    handler = request.ProxyHandler(proxies) if proxies is not None \
        else request.BaseHandler()
    opener = request.build_opener(handler)
    request.install_opener(opener)

    # download files one-by-one:
    for (key, val) in ext_files.items():

        # set subdirectory:
        sub_dir = 'fonts'
        if '.js' in key:
            sub_dir = 'js'
        if '.css' in key:
            sub_dir = 'css'

        # download file:
        filename = '%s/static/%s/%s' % (install_dir, sub_dir, val)
        if not os.path.exists(filename):
            req = request.Request(key,
                                  headers={'User-Agent': 'Chrome/30.0.0.0'})
            try:
                data = opener.open(req).read()
                with open(filename, 'wb') as fwrite:
                    fwrite.write(data)
            except HTTPError as exc:
                logging.error('Error {} while downloading {}'.format(
                    exc.code, key))
            except URLError as exc:
                logging.error('Error {} while downloading {}'.format(
                    exc.reason, key))
Пример #7
0
def download_tile(tile, url, pid, srtmv3, one, username, password):

    grass.debug("Download tile: %s" % tile, debug=1)
    output = tile + ".r.in.srtm.tmp." + str(pid)
    if srtmv3:
        if one:
            local_tile = str(tile) + ".SRTMGL1.hgt.zip"
        else:
            local_tile = str(tile) + ".SRTMGL3.hgt.zip"
    else:
        local_tile = str(tile) + ".hgt.zip"

    urllib2.urlcleanup()

    if srtmv3:
        remote_tile = str(url) + local_tile
        goturl = 1

        try:
            password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
            password_manager.add_password(
                None, "https://urs.earthdata.nasa.gov", username, password
            )

            cookie_jar = CookieJar()

            opener = urllib2.build_opener(
                urllib2.HTTPBasicAuthHandler(password_manager),
                # urllib2.HTTPHandler(debuglevel=1),    # Uncomment these two lines to see
                # urllib2.HTTPSHandler(debuglevel=1),   # details of the requests/responses
                urllib2.HTTPCookieProcessor(cookie_jar),
            )
            urllib2.install_opener(opener)

            request = urllib2.Request(remote_tile)
            response = urllib2.urlopen(request)

            fo = open(local_tile, "w+b")
            fo.write(response.read())
            fo.close
            time.sleep(0.5)
        except:
            goturl = 0
            pass

        return goturl

    # SRTM subdirs: Africa, Australia, Eurasia, Islands, North_America, South_America
    for srtmdir in (
        "Africa",
        "Australia",
        "Eurasia",
        "Islands",
        "North_America",
        "South_America",
    ):
        remote_tile = str(url) + str(srtmdir) + "/" + local_tile
        goturl = 1

        try:
            response = urllib2.urlopen(request)
            fo = open(local_tile, "w+b")
            fo.write(response.read())
            fo.close
            time.sleep(0.5)
            # does not work:
            # urllib.urlretrieve(remote_tile, local_tile, data = None)
        except:
            goturl = 0
            pass

        if goturl == 1:
            return 1

    return 0
Пример #8
0
def _request_compute_metadata(path):
    req = request.Request('%s/computeMetadata/v1/%s' %
                          (_GCE_METADATA_ENDPOINT, path),
                          headers={'Metadata-Flavor': 'Google'})
    resp = request.urlopen(req)
    return compat.as_bytes(resp.read())
Пример #9
0
    def _post(self,
              url,
              param_dict={},
              files={},
              securityHandler=None,
              additional_headers={},
              custom_handlers=[],
              proxy_url=None,
              proxy_port=80,
              compress=True,
              out_folder=None,
              file_name=None):
        """
        Performs a POST operation on a URL.

        Inputs:
           param_dict - key/value pair of values
              ex: {"foo": "bar"}
           files - key/value pair of file objects where the key is
              the input name and the value is the file path
              ex: {"file": r"c:\temp\myfile.zip"}
           securityHandler - object that handles the token or other site
              security.  It must inherit from the base security class.
              ex: arcrest.AGOLSecurityHandler("SomeUsername", "SOMEPASSWORD")
           additional_headers - are additional key/value headers that a user
              wants to pass during the operation.
              ex: {"accept-encoding": "gzip"}
           custom_handlers - this is additional web operation handlers as a
              list of objects.
              Ex: [CustomAuthHandler]
           proxy_url - url of the proxy
           proxy_port - default 80, port number of the proxy
           compress - default true, determines if gzip should be used of not for
              the web operation.
           out_folder - if the URL requested returns a file, this will be the
              disk save location
           file_name - if the operation returns a file and the file name is not
             given in the header or a user wishes to override the return saved
             file name, provide value here.
        Output:
           returns dictionary or string depending on web operation.
        """
        self._last_method = "POST"
        headers = {}
        opener = None
        return_value = None
        handlers = [RedirectHandler()]
        param_dict, handler, cj = self._processHandler(securityHandler,
                                                       param_dict)
        if handler is not None:
            handlers.append(handler)
        if cj is not None:
            handlers.append(request.HTTPCookieProcessor(cj))
        if isinstance(custom_handlers, list) and \
           len(custom_handlers) > 0:
            for h in custom_handlers:
                handlers.append(h)
        if compress:
            headers['Accept-Encoding'] = 'gzip'
        else:
            headers['Accept-Encoding'] = ''
        for k, v in additional_headers.items():
            headers[k] = v
            del k, v
        opener = request.build_opener(*handlers)
        request.install_opener(opener)
        opener.addheaders = [(k, v) for k, v in headers.items()]
        if len(files) == 0:
            data = urlencode(param_dict)
            if self.PY3:
                data = data.encode('ascii')
            opener.data = data
            resp = opener.open(url, data=data)
        else:
            mpf = MultiPartForm(param_dict=param_dict, files=files)
            req = request.Request(url)
            body = mpf.make_result
            req.add_header('User-agent', self.useragent)
            req.add_header('Content-type', mpf.get_content_type())
            req.add_header('Content-length', len(body))
            req.data = body
            resp = request.urlopen(req)
        self._last_code = resp.getcode()
        self._last_url = resp.geturl()
        return_value = self._process_response(resp=resp, out_folder=out_folder)
        if isinstance(return_value, dict):
            if "error" in return_value and \
               'message' in return_value['error']:
                if return_value['error']['message'].lower(
                ) == 'request not made over ssl':
                    if url.startswith('http://'):
                        url = url.replace('http://', 'https://')
                        return self._post(url, param_dict, files,
                                          securityHandler, additional_headers,
                                          custom_handlers, proxy_url,
                                          proxy_port, compress, out_folder,
                                          file_name)
            return return_value
        else:
            return return_value
        return return_value
Пример #10
0
def download_scripts(proxies=None, install_dir=None):
    import visdom
    print("Downloading scripts. It might take a while.")

    # location in which to download stuff:
    if install_dir is None:
        install_dir = os.path.dirname(visdom.__file__)

    # all files that need to be downloaded:
    b = 'https://unpkg.com/'
    bb = '%[email protected]/dist/' % b
    ext_files = {
        # - js
        '%[email protected]/dist/jquery.min.js' % b: 'jquery.min.js',
        '%[email protected]/dist/js/bootstrap.min.js' % b: 'bootstrap.min.js',
        '%[email protected]/umd/react.production.min.js' % b: 'react-react.min.js',
        '%[email protected]/umd/react-dom.production.min.js' % b: 'react-dom.min.js',  # noqa
        '%[email protected]/dist/react-modal.min.js' % b: 'react-modal.min.js',  # noqa
        'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js?config=TeX-AMS-MML_SVG':  # noqa
            'mathjax-MathJax.js',
        # here is another url in case the cdn breaks down again.
        # https://raw.githubusercontent.com/plotly/plotly.js/master/dist/plotly.min.js
        'https://cdn.plot.ly/plotly-latest.min.js': 'plotly-plotly.min.js',
        # Stanford Javascript Crypto Library for Password Hashing
        '%[email protected]/sjcl.js' % b: 'sjcl.js',

        # - css
        '%[email protected]/css/styles.css' % b: 'react-resizable-styles.css',  # noqa
        '%[email protected]/css/styles.css' % b: 'react-grid-layout-styles.css',  # noqa
        '%scss/bootstrap.min.css' % bb: 'bootstrap.min.css',

        # - fonts
        '%[email protected]' % b: 'classnames',
        '%[email protected]' % b: 'layout_bin_packer',
        '%sfonts/glyphicons-halflings-regular.eot' % bb:
            'glyphicons-halflings-regular.eot',
        '%sfonts/glyphicons-halflings-regular.woff2' % bb:
            'glyphicons-halflings-regular.woff2',
        '%sfonts/glyphicons-halflings-regular.woff' % bb:
            'glyphicons-halflings-regular.woff',
        '%sfonts/glyphicons-halflings-regular.ttf' % bb:
            'glyphicons-halflings-regular.ttf',
        '%sfonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular' % bb:  # noqa
            'glyphicons-halflings-regular.svg#glyphicons_halflingsregular',
    }

    # make sure all relevant folders exist:
    dir_list = [
        '%s' % install_dir,
        '%s/static' % install_dir,
        '%s/static/js' % install_dir,
        '%s/static/css' % install_dir,
        '%s/static/fonts' % install_dir,
    ]
    for directory in dir_list:
        if not os.path.exists(directory):
            os.makedirs(directory)

    # set up proxy handler:
    from six.moves.urllib import request
    from six.moves.urllib.error import HTTPError, URLError
    handler = request.ProxyHandler(proxies) if proxies is not None \
        else request.BaseHandler()
    opener = request.build_opener(handler)
    request.install_opener(opener)

    built_path = os.path.join(here, 'static/version.built')
    is_built = visdom.__version__ == 'no_version_file'
    if os.path.exists(built_path):
        with open(built_path, 'r') as build_file:
            build_version = build_file.read().strip()
        if build_version == visdom.__version__:
            is_built = True
        else:
            os.remove(built_path)

    # download files one-by-one:
    for (key, val) in ext_files.items():

        # set subdirectory:
        sub_dir = 'fonts'
        if '.js' in key:
            sub_dir = 'js'
        if '.css' in key:
            sub_dir = 'css'

        # download file:
        filename = '%s/static/%s/%s' % (install_dir, sub_dir, val)
        if not os.path.exists(filename) or not is_built:
            req = request.Request(key,
                                  headers={'User-Agent': 'Chrome/30.0.0.0'})
            try:
                data = opener.open(req).read()
                with open(filename, 'wb') as fwrite:
                    fwrite.write(data)
            except HTTPError as exc:
                logging.error('Error {} while downloading {}'.format(
                    exc.code, key))
            except URLError as exc:
                logging.error('Error {} while downloading {}'.format(
                    exc.reason, key))

    if not is_built:
        with open(built_path, 'w+') as build_file:
            build_file.write(visdom.__version__)
Пример #11
0
 def open(self,
          url,
          data=None,
          method=None,
          login=True,
          async_get=True,
          async_wait=True,
          retjson=True):
     url = self.base_url + url
     if data:
         try:
             if method != 'PUT':
                 data = parse.urlencode(data).encode(encoding='UTF8')
             else:
                 data = data.encode('utf-8')
         except TypeError as ex:
             print(data)
             logger.debug(ex)
             pass
         req = request.Request(url, data)
     else:
         req = request.Request(url)
     if login:
         if not self.login_cookie:
             self.login()
         req.add_header("Cookie", self.login_cookie)
     for (k, v) in self.headers:
         req.add_header(k, v)
     if method:
         req.get_method = lambda: method
     try:
         resp = request.urlopen(req)
         self.resp = resp
         rdata = resp.read()
     except HTTPError as e:
         rdata = e.fp.read()
     except URLError as e:
         self.status_code = 500
         self.status = "ERROR"
         self.output = str(e)
         return
     except HTTPException as e:
         if six.PY3:
             rdata = e.args[0]
         else:
             raise e
     if not retjson:
         logger.debug(dict(resp))
         self.data = rdata
         self.status_code = resp.code
         self.status = "OK" if resp.code == 200 else "ERROR"
         self.output = self.data
         return self.data
     #print(type(rdata))
     if isinstance(rdata, bytes):
         rdata = rdata.decode('utf-8')
     rdata_reg = re.search('\{.+\}', rdata)
     rdata = rdata if rdata_reg is None else rdata_reg.group()
     try:
         rdata = json.loads(rdata)
         self.data = rdata
         self.status = self.data["status"]
         self.status_code = self.data["status_code"]
         self.output = self.data["output"]
     except ValueError as exc:
         print('ValueError:', exc, ' ; rdata:', rdata)
         self.data = rdata
         try:
             self.status_code = self.data["status_code"]
             self.status = self.data["status"]
         except Exception as exc:
             print(exc)
             logger.debug("exe : %s" % exc)
             logger.debug("rdata : %s" % rdata)
             try:
                 self.status_code = rdata["status_code"]
                 self.status = rdata["status"]
             except Exception as ex:
                 self.status_code = 200
                 self.status = 200
             self.status = "unknown"
         self.output = self.data
         if self.status_code == 200: self.status = "OK"
     if self.status_code == 201 and async_get:  # It's a async task !
         self.async_wait_time = async_wait_time
         time.sleep(async_first_wait_time)
         if DEBUG_ASYNC: logger.debug("jump to async")
         return self.open(async_url + '/' + self.output)
     if self.status_code == 100 and async_wait and self.async_wait_time > 0:  #  async task is running !
         time.sleep(1)
         self.async_wait_time -= 1
         if DEBUG_ASYNC: logger.debug(" async retry")
         return self.open(async_url + '/' + self.output)
     if self.async_wait_time <= 0:
         logger.error("Error : aysnc connection time out !!!")
         self.async_wait_time = async_wait_time
     logger.debug("return status  %s in( %s )  %s  secs" %
                  (self.status_code, async_wait, self.async_wait_time))
     return self.data
Пример #12
0
 def _get_data(self, path):
     req = request.Request(path)
     response = self._get_response(req)
     return response.read()
Пример #13
0
def _http_get(url):
    req = request.Request(url)
    req.add_header('User-Agent', 'Mozilla/5.0')
    return request.urlopen(req, cafile=certifi.where()).read()
Пример #14
0
    def post(self,
             path,
             postdata=None,
             files=None,
             ssl=False,
             compress=True,
             is_retry=False,
             use_ordered_dict=False,
             add_token=True,
             verify_cert=True,
             token=None,
             **kwargs):
        """ Returns result of an HTTP POST. Supports Multipart requests."""
        #if path.find(" ") > -1:
        path = quote(path, ':/%')
        out_folder = kwargs.pop('out_folder', tempfile.gettempdir())

        url = path
        if url.lower().find("https://") > -1 or\
           url.lower().find("http://") > -1:
            url = path
        elif len(url) == 0:
            url = self.baseurl
        elif (len(url) > 0 and url[0] == '/' ) == False and \
           self.baseurl.endswith('/') == False:
            url = "/{path}".format(path=url)

        if not url.startswith('http://') and \
           not url.startswith('https://'):
            url = self.baseurl + url

        if ssl or self.all_ssl:
            url = url.replace('http://', 'https://')
        if verify_cert == False:
            import ssl
            ssl._create_default_https_context = ssl._create_unverified_context
        # Add the token if logged in
        if add_token:
            if self.is_logged_in:
                postdata['token'] = self.token
        if token:
            postdata['token'] = token
        if _log.isEnabledFor(logging.DEBUG):
            msg = 'REQUEST: ' + url + ', ' + str(postdata)
            if files:
                msg += ', files=' + str(files)
            _log.debug(msg)

        # If there are files present, send a multipart request
        if files:
            #parsed_url = urlparse(url)
            mpf = MultiPartForm(param_dict=postdata, files=files)
            req = request.Request(url)
            body = mpf.make_result
            req.add_header('User-agent', self._useragent)
            req.add_header('Content-type', mpf.get_content_type())
            req.add_header('Content-length', len(body))
            req.data = body
            headers = [('User-Agent', self._useragent),
                       ('Content-type', mpf.get_content_type()),
                       ('Content-length', len(body))]
            if self._referer and \
               self._auth.lower() != 'pki':
                headers.append(('Referer', self._referer))
            if compress:
                headers.append(('Accept-encoding', 'gzip'))
            if self._handlers is None:
                self._handlers = self.get_handlers()
            #handlers = self.get_handlers()
            opener = request.build_opener(*self._handlers)

            opener.addheaders = headers

            resp = opener.open(req)
            resp_data = self._process_response(resp)
        # Otherwise send a normal HTTP POST request
        else:
            encoded_postdata = None
            if postdata:
                postdata = {k: jsonize_dict(v) for k, v in postdata.items()}
                encoded_postdata = urlencode(postdata)
            headers = [('User-Agent', self._useragent)]
            if self._referer and \
                self._auth.lower() != 'pki':
                headers.append(('Referer', self._referer))
            if compress:
                headers.append(('Accept-encoding', 'gzip'))
            if self._handlers is None:
                self._handlers = self.get_handlers()

            opener = request.build_opener(*self._handlers)
            opener.addheaders = headers
            #request.install_opener(opener)
            req = request.Request(url,
                                  data=encoded_postdata.encode('utf-8'),
                                  headers={i[0]: i[1]
                                           for i in headers})
            resp = opener.open(req)  #request.urlopen(req)
            resp_data = self._process_response(resp,
                                               out_folder=out_folder,
                                               file_name=None)
        # Parse the response into JSON
        if _log.isEnabledFor(logging.DEBUG):
            _log.debug('RESPONSE: ' + url + ', ' + resp_data)
        #print(resp_data);
        if use_ordered_dict:
            resp_json = json.loads(resp_data, object_pairs_hook=OrderedDict)
        else:
            resp_json = json.loads(resp_data)

        # Check for errors, and handle the case where the token timed out
        # during use (and simply needs to be re-generated)
        try:
            if 'error' in resp_json or \
               ('status' in resp_json and \
                resp_json.get('status', None) != "success"):

                errorcode = resp_json['code'] if 'code' in resp_json else 0
                if errorcode == 498 and not is_retry:
                    _log.info(
                        'Token expired during post request, fetching a new ' +
                        'token and retrying')
                    self.logout()
                    newtoken = self.relogin()
                    postdata['token'] = newtoken
                    return self.post(path,
                                     postdata,
                                     files,
                                     ssl,
                                     compress,
                                     is_retry=True)
                elif errorcode == 498:
                    raise RuntimeError('Invalid token')
                elif errorcode == 403:
                    message = resp_json['error'][
                        'message'] if 'message' in resp_json['error'] else ''
                    if message == "SSL Required":
                        return self.post(path,
                                         postdata,
                                         files,
                                         ssl=True,
                                         compress=compress,
                                         token=token,
                                         verify_cert=verify_cert,
                                         is_retry=True)

                if 'status' in resp_json:
                    self._handle_json_error(resp_json, errorcode)
                else:
                    self._handle_json_error(resp_json['error'], errorcode)
                return None
        except AttributeError:
            # Top-level JSON object isnt a dict, so can't have an error
            pass
        return resp_json
Пример #15
0
def open_url(url,
             data=None,
             headers=None,
             method=None,
             use_proxy=True,
             force=False,
             last_mod_time=None,
             timeout=10,
             validate_certs=True,
             url_username=None,
             url_password=None,
             http_agent=None,
             force_basic_auth=False,
             follow_redirects='urllib2',
             client_cert=None,
             client_key=None,
             cookies=None):
    '''
    Sends a request via HTTP(S) or FTP using urllib2 (Python2) or urllib (Python3)

    Does not require the module environment
    '''
    handlers = []
    ssl_handler = maybe_add_ssl_handler(url, validate_certs)
    if ssl_handler:
        handlers.append(ssl_handler)

    parsed = generic_urlparse(urlparse(url))
    if parsed.scheme != 'ftp':
        username = url_username

        if headers is None:
            headers = {}

        if username:
            password = url_password
            netloc = parsed.netloc
        elif '@' in parsed.netloc:
            credentials, netloc = parsed.netloc.split('@', 1)
            if ':' in credentials:
                username, password = credentials.split(':', 1)
            else:
                username = credentials
                password = ''

            parsed_list = parsed.as_list()
            parsed_list[1] = netloc

            # reconstruct url without credentials
            url = urlunparse(parsed_list)

        if username and not force_basic_auth:
            passman = urllib_request.HTTPPasswordMgrWithDefaultRealm()

            # this creates a password manager
            passman.add_password(None, netloc, username, password)

            # because we have put None at the start it will always
            # use this username/password combination for  urls
            # for which `theurl` is a super-url
            authhandler = urllib_request.HTTPBasicAuthHandler(passman)
            digest_authhandler = urllib_request.HTTPDigestAuthHandler(passman)

            # create the AuthHandler
            handlers.append(authhandler)
            handlers.append(digest_authhandler)

        elif username and force_basic_auth:
            headers["Authorization"] = basic_auth_header(username, password)

        else:
            try:
                rc = netrc.netrc(os.environ.get('NETRC'))
                login = rc.authenticators(parsed.hostname)
            except IOError:
                login = None

            if login:
                username, _, password = login
                if username and password:
                    headers["Authorization"] = basic_auth_header(
                        username, password)

    if not use_proxy:
        proxyhandler = urllib_request.ProxyHandler({})
        handlers.append(proxyhandler)

    if HAS_SSLCONTEXT and not validate_certs:
        # In 2.7.9, the default context validates certificates
        context = SSLContext(ssl.PROTOCOL_SSLv23)
        context.options |= ssl.OP_NO_SSLv2
        context.options |= ssl.OP_NO_SSLv3
        context.verify_mode = ssl.CERT_NONE
        context.check_hostname = False
        handlers.append(
            HTTPSClientAuthHandler(client_cert=client_cert,
                                   client_key=client_key,
                                   context=context))
    elif client_cert:
        handlers.append(
            HTTPSClientAuthHandler(client_cert=client_cert,
                                   client_key=client_key))

    # pre-2.6 versions of python cannot use the custom https
    # handler, since the socket class is lacking create_connection.
    # Some python builds lack HTTPS support.
    if hasattr(socket, 'create_connection') and CustomHTTPSHandler:
        handlers.append(CustomHTTPSHandler)

    handlers.append(RedirectHandlerFactory(follow_redirects, validate_certs))

    # add some nicer cookie handling
    if cookies is not None:
        handlers.append(urllib_request.HTTPCookieProcessor(cookies))

    opener = urllib_request.build_opener(*handlers)
    urllib_request.install_opener(opener)

    data = to_bytes(data, nonstring='passthru')
    if method:
        if method.upper() not in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT',
                                  'DELETE', 'TRACE', 'CONNECT', 'PATCH'):
            raise ConnectionError('invalid HTTP request method; %s' %
                                  method.upper())
        request = RequestWithMethod(url, method.upper(), data)
    else:
        request = urllib_request.Request(url, data)

    # add the custom agent header, to help prevent issues
    # with sites that block the default urllib agent string
    if http_agent:
        request.add_header('User-agent', http_agent)

    # Cache control
    # Either we directly force a cache refresh
    if force:
        request.add_header('cache-control', 'no-cache')
    # or we do it if the original is more recent than our copy
    elif last_mod_time:
        tstamp = last_mod_time.strftime('%a, %d %b %Y %H:%M:%S +0000')
        request.add_header('If-Modified-Since', tstamp)

    # user defined headers now, which may override things we've set above
    if headers:
        if not isinstance(headers, dict):
            raise ValueError("headers provided to fetch_url() must be a dict")
        for header in headers:
            request.add_header(header, headers[header])

    urlopen_args = [request, None]
    if sys.version_info >= (2, 6, 0):
        # urlopen in python prior to 2.6.0 did not
        # have a timeout parameter
        urlopen_args.append(timeout)

    r = urllib_request.urlopen(*urlopen_args)
    return r
Пример #16
0
    def request(self, path, request, body=None, **kwargs):
        """Make an HTTP request and return the results.

        :param path: Path used with the initialized URL to make a request.
        :param request: HTTP request type (GET, POST, PUT, DELETE).
        :param body: HTTP body of request.
        :key accept: Set HTTP 'Accept' header with this value.
        :key base_path: Override the base_path for this request.
        :key content: Set HTTP 'Content-Type' header with this value.
        """
        out_hdrs = dict.copy(self.headers)
        if kwargs.get("accept"):
            out_hdrs['accept'] = kwargs.get("accept")

        if body:
            if isinstance(body, dict):
                body = six.text_type(jsonutils.dumps(body))

        if body and len(body):
            out_hdrs['content-length'] = len(body)

        zfssaurl = self._path(path, kwargs.get("base_path"))
        req = urlrequest.Request(zfssaurl, body, out_hdrs)
        req.get_method = lambda: request
        maxreqretries = kwargs.get("maxreqretries", 10)
        retry = 0
        response = None

        log_debug_msg(self, 'Request: %s %s' % (request, zfssaurl))
        log_debug_msg(self, 'Out headers: %s' % out_hdrs)
        if body and body != '':
            log_debug_msg(self, 'Body: %s' % body)

        while retry < maxreqretries:
            try:
                response = urlrequest.urlopen(req, timeout=self.timeout)
            except urlerror.HTTPError as err:
                if err.code == http_client.NOT_FOUND:
                    log_debug_msg(self, 'REST Not Found: %s' % err.code)
                else:
                    log_debug_msg(self, ('REST Not Available: %s') % err.code)

                if (err.code == http_client.SERVICE_UNAVAILABLE
                        and retry < maxreqretries):
                    retry += 1
                    time.sleep(1)
                    log_debug_msg(self,
                                  ('Server Busy retry request: %s') % retry)
                    continue
                if ((err.code == http_client.UNAUTHORIZED
                     or err.code == http_client.INTERNAL_SERVER_ERROR)
                        and '/access/v1' not in zfssaurl):
                    try:
                        log_debug_msg(self, ('Authorizing request: '
                                             '%(zfssaurl)s'
                                             'retry: %(retry)d .') % {
                                                 'zfssaurl': zfssaurl,
                                                 'retry': retry
                                             })
                        self._authorize()
                        req.add_header('x-auth-session',
                                       self.headers['x-auth-session'])
                    except RestClientError:
                        log_debug_msg(self, ('Cannot authorize.'))
                    retry += 1
                    time.sleep(1)
                    continue

                return RestResult(self.log_function, err=err)

            except urlerror.URLError as err:
                log_debug_msg(self, ('URLError: %s') % err.reason)
                raise RestClientError(-1,
                                      name="ERR_URLError",
                                      message=err.reason)
            break

        if ((response
             and response.getcode() == http_client.SERVICE_UNAVAILABLE)
                and retry >= maxreqretries):
            raise RestClientError(response.getcode(),
                                  name="ERR_HTTPError",
                                  message="REST Not Available: Disabled")

        return RestResult(self.log_function, response=response)
Пример #17
0
def destral(modules,
            tests,
            all_tests=None,
            enable_coverage=None,
            report_coverage=None,
            report_junitxml=None,
            dropdb=None,
            requirements=None,
            **kwargs):
    enable_lint = kwargs.pop('enable_lint')
    sys.argv = sys.argv[:1]
    service = OpenERPService()
    if report_junitxml:
        os.environ['DESTRAL_JUNITXML'] = report_junitxml
    else:
        report_junitxml = os.environ.get('DESTRAL_JUNITXML', False)
    if report_junitxml:
        junitxml_directory = os.path.abspath(report_junitxml)
        if not os.path.isdir(junitxml_directory):
            os.makedirs(junitxml_directory)
    if not modules:
        ci_pull_request = os.environ.get('CI_PULL_REQUEST')
        token = os.environ.get('GITHUB_TOKEN')
        repository = os.environ.get('CI_REPO')
        if ci_pull_request and token and repository:
            try:
                int(ci_pull_request)
            except:
                # If CI_PULL_REQUEST contains URL instead of PR number, get it
                ci_pull_request = ci_pull_request.split('/')[-1]
            url = 'https://api.github.com/repos/{repo}/pulls/{pr_number}'.format(
                repo=repository, pr_number=ci_pull_request)
            req = urllib2.Request(url,
                                  headers={
                                      'Authorization':
                                      'token {0}'.format(token),
                                      'Accept': 'application/vnd.github.patch'
                                  })
            f = urllib2.urlopen(req)
            paths = find_files(f.read())
            logger.info('Files from Pull Request: {0}: {1}'.format(
                ci_pull_request, ', '.join(paths)))
        else:
            paths = subprocess.check_output(
                ["git", "diff", "--name-only", "HEAD~1..HEAD"])
            paths = [x for x in paths.split('\n') if x]
            logger.info('Files from last commit: {}'.format(', '.join(paths)))
        modules_to_test = []
        for path in paths:
            module = detect_module(path)
            if module and module not in modules_to_test:
                modules_to_test.append(module)
    else:
        modules_to_test = modules[:]

    results = []
    addons_path = service.config['addons_path']
    root_path = service.config['root_path']

    if not modules_to_test:
        coverage_config = {'source': [root_path], 'omit': ['*/addons/*/*']}
    else:
        coverage_config = {
            'source': coverage_modules_path(modules_to_test, addons_path),
            'omit': ['*/__terp__.py']
        }

    coverage = OOCoverage(**coverage_config)
    coverage.enabled = (enable_coverage or report_coverage)

    junitxml_suites = []

    coverage.start()
    server_spec_suite = get_spec_suite(root_path)
    if server_spec_suite:
        logging.info('Spec testing for server')
        report = run_spec_suite(server_spec_suite)
        results.append(not len(report.failed_examples) > 0)
        if report_junitxml:
            junitxml_suites += report.create_report_suites()
    coverage.stop()

    logger.info('Modules to test: {}'.format(','.join(modules_to_test)))
    for module in modules_to_test:
        with RestorePatchedRegisterAll():
            if requirements:
                install_requirements(module, addons_path)
            spec_suite = get_spec_suite(os.path.join(addons_path, module))
            if spec_suite:
                logger.info('Spec testing module %s', module)
                coverage.start()
                report = run_spec_suite(spec_suite)
                coverage.stop()
                results.append(not len(report.failed_examples) > 0)
                if report_junitxml:
                    junitxml_suites += report.create_report_suites()
            logger.info('Unit testing module %s', module)
            os.environ['DESTRAL_MODULE'] = module
            coverage.start()
            suite = get_unittest_suite(module, tests)
            suite.drop_database = dropdb
            suite.config['all_tests'] = all_tests
            if all_tests:
                for m in get_dependencies(module, addons_path):
                    for test in get_unittest_suite(m):
                        if test not in suite:
                            suite.addTest(test)
            result = run_unittest_suite(suite)
            coverage.stop()
            results.append(result.wasSuccessful())
            if report_junitxml:
                junitxml_suites.append(result.get_test_suite(module))
    if report_junitxml:
        from junit_xml import TestSuite
        for suite in junitxml_suites:
            with open(os.path.join(report_junitxml, suite.name + '.xml'),
                      'w') as report_file:
                report_file.write(TestSuite.to_xml_string([suite]))
        logger.info('Saved report XML on {}/'.format(report_junitxml))
    if report_coverage:
        coverage.report()
    if enable_coverage:
        coverage.save()

    if enable_lint:
        modules_path = [
            '{}/{}'.format(addons_path, m) for m in modules_to_test
        ]
        if modules_path:
            run_linter(modules_path)

    if not all(results):
        sys.exit(1)
Пример #18
0
 def get(self, endpoint=None, cookie=None):
     req = request.Request(self.url + endpoint)
     if cookie:
         req.add_header('cookie', cookie)
     return self.opener.open(req)
Пример #19
0
def request(url,
            endpoint=IDP_ENDPOINTS['LIGO.ORG'],
            use_kerberos=None,
            debug=False):
    """Request the given URL using ECP shibboleth authentication

    This requires an active Kerberos ticket for the user, to get one:

        >>> from ligo.org import kinit
        >>> kinit('albert.einstein')

    Then request as follows

        >>> from ligo.org import request
        >>> response = request(myurl)
        >>> print(response.read())

    Adapted from
    https://wiki.shibboleth.net/confluence/download/attachments/4358416/ecp.py

    Parameters
    ----------
    url : `str`
        URL path for request

    endpoint : `str`
        ECP endpoint URL for request

    use_kerberos : `bool`, optional
        use existing kerberos credential for login, default is to try, but
        fall back to username/password prompt

    debug : `bool`, optional, default: `False`
        query in verbose debugging mode

    Returns
    -------
    response : `str`
        the raw (decoded) response from the URL, probably XML/HTML or JSON
    """
    login_host = urlparse(endpoint).netloc

    # create a cookie jar and cookie handler (and read existing cookies)
    cookie_jar = ECPCookieJar()

    if os.path.exists(COOKIE_JAR):
        try:
            cookie_jar.load(COOKIE_JAR, ignore_discard=True)
        except http_cookiejar.LoadError as e:
            warnings.warn('Caught error loading ECP cookie: %s' % str(e))

    cookie_handler = urllib_request.HTTPCookieProcessor(cookie_jar)

    # need an instance of HTTPS handler to do HTTPS
    httpsHandler = urllib_request.HTTPSHandler(debuglevel=0)
    if debug:
        httpsHandler.set_http_debuglevel(1)

    # create the base opener object
    opener = urllib_request.build_opener(cookie_handler, httpsHandler)

    # get kerberos credentials if available
    if use_kerberos is None:
        try:
            creds = klist()
        except KerberosError:
            use_kerberos = False
        else:
            if creds:
                use_kerberos = True
            else:
                use_kerberos = False
    if use_kerberos:
        opener.add_handler(
            HTTPNegotiateAuthHandler(service_principal='HTTP@%s' % login_host))

    # -- intiate ECP request --------------------

    # headers needed to indicate to the SP an ECP request
    headers = {
        'Accept':
        'text/html; application/vnd.paos+xml',
        'PAOS':
        'ver="urn:liberty:paos:2003-08";'
        '"urn:oasis:names:tc:SAML:2.0:profiles:SSO:ecp"',
    }

    # request target from SP
    request = urllib_request.Request(url=url, headers=headers)
    response = opener.open(request)

    # convert the SP resonse from string to etree Element object
    sp_response = etree.XML(response.read())

    # pick out the relay state element from the SP so that it can
    # be included later in the response to the SP
    namespaces = {
        'ecp': 'urn:oasis:names:tc:SAML:2.0:profiles:SSO:ecp',
        'S': 'http://schemas.xmlsoap.org/soap/envelope/',
        'paos': 'urn:liberty:paos:2003-08'
    }

    relay_state = sp_response.xpath("//ecp:RelayState",
                                    namespaces=namespaces)[0]

    # make a deep copy of the SP response and then remove the header
    # in order to create the package for the IdP
    idp_request = deepcopy(sp_response)
    header = idp_request[0]
    idp_request.remove(header)

    # -- authenticate with endpoint -------------

    request = urllib_request.Request(endpoint,
                                     data=etree.tostring(idp_request))
    request.get_method = lambda: 'POST'
    request.add_header('Content-Type', 'test/xml; charset=utf-8')

    # get credentials for non-kerberos request
    if not use_kerberos:
        # prompt the user for a password
        login = input("Enter username for %s: " % login_host)
        password = getpass.getpass("Enter password for login '%s': " % login)
        # combine the login and password, base64 encode, and send
        # using the Authorization header
        base64string = base64.encodestring(
            ('%s:%s' % (login, password)).encode()).decode().replace('\n', '')
        request.add_header('Authorization', 'Basic %s' % base64string)

    response = opener.open(request)
    idp_response = etree.XML(response.read())

    assertion_consumer_service = idp_response.xpath(
        "/S:Envelope/S:Header/ecp:Response/@AssertionConsumerServiceURL",
        namespaces=namespaces)[0]

    # make a deep copy of the IdP response and replace its
    # header contents with the relay state initially sent by
    # the SP
    sp_package = deepcopy(idp_response)
    sp_package[0][0] = relay_state

    headers = {'Content-Type': 'application/vnd.paos+xml'}

    # POST the package to the SP
    request = urllib_request.Request(url=assertion_consumer_service,
                                     data=etree.tostring(sp_package),
                                     headers=headers)
    request.get_method = lambda: 'POST'
    response = opener.open(request)

    # -- cache cookies --------------------------

    cookie_jar.save(COOKIE_JAR, ignore_discard=True)

    # -- actually send GET ----------------------

    request = urllib_request.Request(url=url)
    response = opener.open(request)
    return response.read()
Пример #20
0
 def get(self, endpoint):
     req = request.Request(self.url + endpoint)
     return self._open(req)
Пример #21
0
def _request_compute_metadata(path):
    req = request.Request('%s/computeMetadata/v1/%s' %
                          (_gce_metadata_endpoint(), path),
                          headers={'Metadata-Flavor': 'Google'})
    resp = request.urlopen(req)
    return _as_text(resp.read())
Пример #22
0
def urlopen(url, *args, **kwargs):
    """Wrapper around urlopen. Same function as 'urlopen', but with the
    ability to define headers.
    """
    request = urlrequest.Request(url, headers=HEADERS)
    return urlrequest.urlopen(request, *args, **kwargs)