class ConfluenceFuturesAPI(ConfluenceAPI):
    def __init__(self,
                 username,
                 password,
                 uri_base,
                 user_agent=ConfluenceAPI.DEFAULT_USER_AGENT,
                 executor=None,
                 max_workers=10):
        """
        Initialize the async concurrent.futures API object.
        :param username: Your Confluence username.
        :param password: Your Confluence password.
        :param uri_base: The base url for your Confluence wiki (e.g. myorg.atlassian.com/wiki)
        :param user_agent: (Optional): The user-agent you wish to send on requests to the API.
                           DEFAULT: PythonConfluenceAPI.
        :param executor: (Optional): The concurrent.futures executor to power the API calls. Default: None, create a
                         new ThreadPoolExecutor.
        :param max_workers: (Optional): If the executor is not specified and the default ThreadPoolExecutor is spawned,
                            this specifies the number of worker threads to create.
        """
        super(ConfluenceFuturesAPI, self).__init__(username, password,
                                                   uri_base, user_agent)
        self.executor = executor
        self.max_workers = max_workers

    def _start_http_session(self):
        """
        Start a new requests HTTP session, clearing cookies and session data.
        :return: None
        """
        api_logger.debug("Starting new HTTP session...")
        self.session = FuturesSession(executor=self.executor,
                                      max_workers=self.max_workers)
        self.session.headers.update({"User-Agent": self.user_agent})
        if self.username and self.password:
            api_logger.debug("Requests will use authorization.")
            self.session.auth = HTTPBasicAuth(self.username, self.password)

    def _service_request(self,
                         request_type,
                         sub_uri,
                         params=None,
                         callback=None,
                         raise_for_status=True,
                         raw=False,
                         **kwargs):
        """
        Base method for handling HTTP requests via the current requests session.
        :param request_type: The request type as a string (e.g. "POST", "GET", "PUT", etc.)
        :param sub_uri: The REST end point (sub-uri) to communicate with.
        :param params: (Optional) HTTP Request parameters. Default: none
        :param callback: (Optional) A callback function to be excuted on the resulting requests response.
                         This synchronous implementation will return the results of the callback.
                         Default: None. This method returns either the decoded JSON or the raw request content.
        :param raise_for_status: (Optional) When set True, we raise requests.HTTPError on 4xx or 5xx status. When
                                 set False, non-2xx/3xx status code is ignored. Default: True
        :param raw: (Optional) If no callback is set, return the raw content from the request if this is set True.
                    If False, the method attempts to parse the request as JSON data and return the resutls.
                    Default: False
        :param kwargs: Additional parameters to pass to the session request call.
        :return: The concurrent.futures object that holds the future for the API method call.
        """
        api_logger.debug("Sending request: {} ({})".format(
            sub_uri, request_type))
        if not self.session:
            self._start_http_session()
        uri = urljoin(self.uri_base, sub_uri)
        if params:
            kwargs.update(params=params)
        if callback:

            def base_callback(_, response):
                if raise_for_status:
                    response.raise_for_status()
                return callback(response)
        else:

            def base_callback(_, response):
                if raise_for_status:
                    response.raise_for_status()
                return response.content if raw else json.loads(response.text)

        response_future = self.session.request(
            request_type, uri, background_callback=base_callback, **kwargs)
        return response_future
Example #2
0
class ConfluenceFuturesAPI(ConfluenceAPI):
    def __init__(self, username, password, uri_base, user_agent=ConfluenceAPI.DEFAULT_USER_AGENT,
                 executor=None, max_workers=10):
        """
        Initialize the async concurrent.futures API object.
        :param username: Your Confluence username.
        :param password: Your Confluence password.
        :param uri_base: The base url for your Confluence wiki (e.g. myorg.atlassian.com/wiki)
        :param user_agent: (Optional): The user-agent you wish to send on requests to the API.
                           DEFAULT: PythonConfluenceAPI.
        :param executor: (Optional): The concurrent.futures executor to power the API calls. Default: None, create a
                         new ThreadPoolExecutor.
        :param max_workers: (Optional): If the executor is not specified and the default ThreadPoolExecutor is spawned,
                            this specifies the number of worker threads to create.
        """
        super(ConfluenceFuturesAPI, self).__init__(username, password, uri_base, user_agent)
        self.executor = executor
        self.max_workers = max_workers

    def _start_http_session(self):
        """
        Start a new requests HTTP session, clearing cookies and session data.
        :return: None
        """
        api_logger.debug("Starting new HTTP session...")
        self.session = FuturesSession(executor=self.executor, max_workers=self.max_workers)
        self.session.headers.update({"User-Agent": self.user_agent})
        if self.username and self.password:
            api_logger.debug("Requests will use authorization.")
            self.session.auth = HTTPBasicAuth(self.username, self.password)

    def _service_request(self, request_type, sub_uri, params=None, callback=None,
                         raise_for_status=True, raw=False, **kwargs):
        """
        Base method for handling HTTP requests via the current requests session.
        :param request_type: The request type as a string (e.g. "POST", "GET", "PUT", etc.)
        :param sub_uri: The REST end point (sub-uri) to communicate with.
        :param params: (Optional) HTTP Request parameters. Default: none
        :param callback: (Optional) A callback function to be excuted on the resulting requests response.
                         This synchronous implementation will return the results of the callback.
                         Default: None. This method returns either the decoded JSON or the raw request content.
        :param raise_for_status: (Optional) When set True, we raise requests.HTTPError on 4xx or 5xx status. When
                                 set False, non-2xx/3xx status code is ignored. Default: True
        :param raw: (Optional) If no callback is set, return the raw content from the request if this is set True.
                    If False, the method attempts to parse the request as JSON data and return the resutls.
                    Default: False
        :param kwargs: Additional parameters to pass to the session request call.
        :return: The concurrent.futures object that holds the future for the API method call.
        """
        api_logger.debug("Sending request: {} ({})".format(sub_uri, request_type))
        if not self.session:
            self._start_http_session()
        uri = urljoin(self.uri_base, sub_uri)
        if params:
            kwargs.update(params=params)
        if callback:
            def base_callback(_, response):
                if raise_for_status:
                    response.raise_for_status()
                return callback(response)
        else:
            def base_callback(_, response):
                if raise_for_status:
                    response.raise_for_status()
                return response.content if raw else json.loads(response.text)
        response_future = self.session.request(request_type, uri, background_callback=base_callback, **kwargs)
        return response_future
Example #3
0
class THttpClient(TTransportBase):
    '''Http implementation of TTransport base.'''

    def __init__(self, uri_or_host, port=None, path=None, customThrift=False, request='httplib', http2=False, proxy_host=None, proxy_port=None, proxy_auth=None):
        '''THttpClient supports two different types constructor parameters.

        THttpClient(host, port, path) - deprecated
        THttpClient(uri)

        Only the second supports https.
        '''
        if port is not None:
            warnings.warn(
                'Please use the THttpClient("http://host:port/path") syntax',
                DeprecationWarning,
                stacklevel=2
            )
            self.host = uri_or_host
            self.port = port
            assert path
            self.path = path
            self.scheme = 'http'
        else:
            parsed = urllib.parse.urlparse(uri_or_host)
            self.scheme = parsed.scheme
            assert self.scheme in ('http', 'https')
            if self.scheme == 'http':
                self.port = parsed.port or http_client.HTTP_PORT
            elif self.scheme == 'https':
                self.port = parsed.port or http_client.HTTPS_PORT
            self.host = parsed.hostname
            self.path = parsed.path
            if parsed.query:
                self.path += '?%s' % parsed.query
        proxy = None
        self.request = request
        self.http2 = http2
        self.realhost = proxy_host
        self.realport = proxy_port
        self.proxy_auth = proxy_auth
        self.__wbuf = BytesIO()
        if self.scheme == 'https' and self.using_proxy() and self.proxy_auth:
            self.proxy_headers = {'Proxy-Authorization': self.proxy_auth}
        else:
            self.proxy_headers = None
        self.url = '%s://%s:%s%s' % (self.scheme, self.host, self.port, self.path)
        if customThrift:
            if self.request == 'hyper':
                if self.http2:
                    self.__http = hyper.HTTP20Connection(self.host, self.port, proxy_host=self.realhost, proxy_port=self.realport, proxy_headers=self.proxy_headers)
                else:
                    self.__http = hyper.HTTPConnection(self.host, self.port, proxy_host=self.realhost, proxy_port=self.realport, proxy_headers=self.proxy_headers)
            elif self.request == 'httpx':
                if self.http2:
                    self.__http = httpx.AsyncClient(base_url='%s://%s' % (self.scheme, self.host), http2=self.http2)
                else:
                    self.__http = httpx.Client(base_url='%s://%s' % (self.scheme, self.host))
            elif self.request == 'requests':
                self.__http = requests.Session()
                if self.using_proxy():
                    self.__http.proxies = urllib.request.getproxies()
            elif self.request == 'requests-futures':
                self.__http = FuturesSession()
                if self.using_proxy():
                    self.__http.proxies = urllib.request.getproxies()
            elif self.request == 'httplib2':
                self.__http = httplib2.Http()
            else:
                if self.scheme == 'http':
                    self.__http = http_client.HTTPConnection(self.host, self.port)
                elif self.scheme == 'https':
                    self.__http = http_client.HTTPSConnection(self.host, self.port)
                    if self.using_proxy():
                        self.__http.set_tunnel(self.realhost, self.realport, self.proxy_headers)
        else:
             self.__http = None
        self.__async_loop = asyncio.get_event_loop() if self.request == 'httpx' and self.http2 else None
        self.__http_response = None
        self.__response_data = None
        self.__last_read = 0
        self.__timeout = None
        self.__custom_headers = None
        self.__time = time.time()
        self.__custom_thrift = customThrift
        self.__loop = 0

    @staticmethod
    def basic_proxy_auth_header(proxy):
        if proxy is None or not proxy.username:
            return None
        ap = '%s:%s' % (urllib.parse.unquote(proxy.username),
                        urllib.parse.unquote(proxy.password))
        cr = base64.b64encode(ap).strip()
        return 'Basic ' + cr

    def using_proxy(self):
        return self.realhost is not None

    def open(self):
        if self.request == 'hyper':
            if self.http2:
                self.__http = hyper.HTTP20Connection(self.host, self.port, proxy_host=self.realhost, proxy_port=self.realport, proxy_headers=self.proxy_headers)
            else:
                self.__http = hyper.HTTPConnection(self.host, self.port, proxy_host=self.realhost, proxy_port=self.realport, proxy_headers=self.proxy_headers)
        elif self.request == 'httpx':
            if self.http2:
                self.__http = httpx.AsyncClient(base_url='%s://%s' % (self.scheme, self.host), http2=self.http2)
            else:
                self.__http = httpx.Client(base_url='%s://%s' % (self.scheme, self.host))
        elif self.request == 'requests':
            self.__http = requests.Session()
            if self.using_proxy():
                self.__http.proxies = urllib.request.getproxies()
        elif self.request == 'requests-futures':
            self.__http = FuturesSession()
            if self.using_proxy():
                self.__http.proxies = urllib.request.getproxies()
        elif self.request == 'httplib2':
            self.__http = httplib2.Http()
        else:
            if self.scheme == 'http':
                self.__http = http_client.HTTPConnection(self.host, self.port)
            elif self.scheme == 'https':
                self.__http = http_client.HTTPSConnection(self.host, self.port)
                if self.using_proxy():
                    self.__http.set_tunnel(self.realhost, self.realport, self.proxy_headers)

    def close(self):
        if self.request != 'httpx':
            self.__http.close()
        self.__http = None
        self.reset()

    def reset(self):
        self.__http_response = None
        self.__response_data = None
        self.__last_read = 0

    def getHeaders(self):
        return self.headers

    def isOpen(self):
        return self.__http is not None

    def setTimeout(self, ms):
        if not hasattr(socket, 'getdefaulttimeout'):
            raise NotImplementedError

        if ms is None:
            self.__timeout = None
        else:
            self.__timeout = ms / 1000.0

    def setCustomHeaders(self, headers):
        self.__custom_headers = headers

    def read(self, sz):
        if self.request in ['httpx', 'httplib2', 'requests', 'requests-futures']:
            max_sz = self.__last_read + sz
            min_sz = self.__last_read
            self.__last_read = max_sz
            content = self.__response_data[min_sz:max_sz]
        else:
            content = self.__http_response.read(sz)
        return content

    def write(self, buf):
        self.__wbuf.write(buf)

    def __withTimeout(f):
        def _f(*args, **kwargs):
            orig_timeout = socket.getdefaulttimeout()
            socket.setdefaulttimeout(args[0].__timeout)
            try:
                result = f(*args, **kwargs)
            finally:
                socket.setdefaulttimeout(orig_timeout)
            return result
        return _f

    async def httpx_flush(self, data, headers):
        # Building httpx request
        request = self.__http.build_request('POST', self.path, data=data, headers=headers)

        # Sending httpx request
        self.__http_response = await self.__http.send(request)
        self.code = self.__http_response.status_code
        self.message = self.__http_response.reason_phrase
        self.headers = self.__http_response.headers
        self.__response_data = self.__http_response.read()
        self.__last_read = 0

    def flush(self):
        if self.request == 'httplib': # Only when using httplib request
            if self.__custom_thrift:
                if self.__loop <= 2:
                    if self.isOpen(): self.close()
                    self.open(); self.__loop += 1
                elif time.time() - self.__time > 90:
                    self.close(); self.open(); self.__time = time.time()
            else:
                if self.isOpen():
                    self.close()
                self.open()
        else:
            self.reset()

        # Pull data out of buffer
        data = self.__wbuf.getvalue()
        self.__wbuf = BytesIO()

        if not self.__custom_headers or 'User-Agent' not in self.__custom_headers:
            user_agent = 'Python/THttpClient'
            script = os.path.basename(sys.argv[0])
            if script:
                user_agent = '%s (%s)' % (user_agent, urllib.parse.quote(script))
        else:
            user_agent = None
        if self.request == 'hyper':
            headers = {'Content-Type': 'application/x-thrift', 'Content-Length': str(len(data)), 'User-Agent': user_agent}
            if self.__custom_headers:
                headers.update(self.__custom_headers)

            # Sending request with payload
            request = self.__http.request('POST', self.path, data, headers)

            # Get reply to flush the request
            self.__http_response = self.__http.get_response(request)
            self.code = self.__http_response.status
            self.message = self.__http_response.reason
            self.headers = self.__http_response.headers
        elif self.request == 'httpx':
            headers = {'Content-Type': 'application/x-thrift', 'Content-Length': str(len(data)), 'User-Agent': user_agent}
            if self.__custom_headers:
                headers.update(self.__custom_headers)

            if self.http2:
                self.__async_loop.run_until_complete(self.httpx_flush(data, headers))
            else:
                # Building httpx request
                request = self.__http.build_request('POST', self.path, data=data, headers=headers)

                # Sending httpx request
                self.__http_response = self.__http.send(request)
                self.__response_data = self.__http_response.read()
                self.__last_read = 0
                self.code = self.__http_response.status_code
                self.message = self.__http_response.reason_phrase
                self.headers = self.__http_response.headers
        elif self.request == 'httplib2':
            headers = {'Content-Type': 'application/x-thrift', 'Content-Length': str(len(data)), 'User-Agent': user_agent}
            if self.__custom_headers:
                headers.update(self.__custom_headers)

            # Sending and get reply to request
            self.__http_response, self.__response_data = self.__http.request(self.url, 'POST', headers=headers, body=data)
            self.__last_read = 0
            self.code = self.__http_response.status
            self.message = self.__http_response.reason
            self.headers = self.__http_response
        elif self.request == 'requests':
            headers = {'Content-Type': 'application/x-thrift', 'Content-Length': str(len(data)), 'User-Agent': user_agent}
            if self.__custom_headers:
                headers.update(self.__custom_headers)

            # Sending and get reply to request
            self.__http_response = self.__http.request('POST', self.url, data=data, headers=headers)
            self.__response_data = self.__http_response.content
            self.__last_read = 0
            self.code = self.__http_response.status_code
            self.message = self.__http_response.reason
            self.headers = self.__http_response.headers
        elif self.request == 'requests-futures':
            headers = {'Content-Type': 'application/x-thrift', 'Content-Length': str(len(data)), 'User-Agent': user_agent}
            if self.__custom_headers:
                headers.update(self.__custom_headers)

            # Sending request with payload
            request = self.__http.request('POST', self.url, data=data, headers=headers)

            # Get reply to flush the request
            self.__http_response = request.result()
            self.__response_data = self.__http_response.content
            self.__last_read = 0
            self.code = self.__http_response.status_code
            self.message = self.__http_response.reason
            self.headers = self.__http_response.headers
        else:
            # HTTP request
            if self.using_proxy() and self.scheme == 'http':
                # need full URL of real host for HTTP proxy here (HTTPS uses CONNECT tunnel)
                self.__http.putrequest('POST', 'http://%s:%s%s' %
                                    (self.realhost, self.realport, self.path))
            else:
                self.__http.putrequest('POST', self.path)

            # Write headers
            self.__http.putheader('Content-Type', 'application/x-thrift')
            self.__http.putheader('Content-Length', str(len(data)))
            if not self.__custom_headers or 'User-Agent' not in self.__custom_headers:
                self.__http.putheader('User-Agent', user_agent)

            if self.__custom_headers:
                for key, val in six.iteritems(self.__custom_headers):
                    self.__http.putheader(key, val)

            self.__http.endheaders()

            # Write payload
            self.__http.send(data)

            # Get reply to flush the request
            self.__http_response = self.__http.getresponse()
            self.code = self.__http_response.status
            self.message = self.__http_response.reason
            self.headers = self.__http_response.msg

    # Decorate if we know how to timeout
    if hasattr(socket, 'getdefaulttimeout'):
        flush = __withTimeout(flush)
Example #4
0
class HackerOneClient(object):
    BASE_URL = "https://api.hackerone.com/v1"
    REQUEST_HEADERS = {
        "User-Agent": "HackerOne Python Client v" + __version__,
    }

    def __init__(self, identifier, token):
        self.identifier = identifier
        self.token = token
        self._init_session()

    def _init_session(self):
        self.s = FuturesSession()
        self.s.headers.update(self.REQUEST_HEADERS)
        self.s.auth = HTTPBasicAuth(self.identifier, self.token)

    def make_request(self, url, params=None, data=None, method=None):
        if method is None:
            method = "GET"
        if not url.startswith("http"):
            url = self.BASE_URL + url
        if isinstance(params, dict):
            params = encode_params(params)

        return self.s.request(method, url, params=params, data=data)

    def request_json(self, url, params=None, data=None, method=None):
        r = self.make_request(url, params, data, method).result()
        r.raise_for_status()
        return r.json()

    def request_object(self, url, params=None, data=None, method=None):
        data = self.request_json(url, params, data, method)["data"]
        # If we're fetching a single object make sure that consumers
        # know this is the canonical version
        data["_canonical"] = True
        return hydrate_object(data, self)

    def request_paginated_objects(self, url, params=None, yield_pages=False):
        future = self.make_request(url, params)
        res_iter = self._request_paginated_inner(url, future, yield_pages)
        return LazyListing(res_iter)

    def _request_paginated_inner(self, url, future, yield_pages):
        while url:
            resp = future.result()
            resp.raise_for_status()
            parsed = resp.json()
            url = parsed["links"].get('next')
            if url:
                future = self.make_request(url)

            hydrated_objs = hydrate_objects(parsed["data"], self)
            if yield_pages:
                yield hydrated_objs
            else:
                for obj in hydrated_objs:
                    yield obj

    def get_resource(self, rsrc_type, obj_id):
        return rsrc_type.get(self, obj_id)

    def find_resources(self, rsrc_type, sort=None, yield_pages=False, **kwargs):
        """Find instances of `rsrc_type` that match the filter in `**kwargs`"""
        return rsrc_type.find(self, sort=sort, yield_pages=yield_pages, **kwargs)
Example #5
0
class HackerOneClient(object):
    BASE_URL = "https://api.hackerone.com/v1"
    REQUEST_HEADERS = {
        "User-Agent": "HackerOne Python Client v" + __version__,
    }

    def __init__(self, identifier, token):
        self.identifier = identifier
        self.token = token
        self._init_session()

    def _init_session(self):
        self.s = FuturesSession()
        self.s.headers.update(self.REQUEST_HEADERS)
        self.s.auth = HTTPBasicAuth(self.identifier, self.token)

    def make_request(self, url, params=None, data=None, method=None):
        if method is None:
            method = "GET"
        if not url.startswith("http"):
            url = self.BASE_URL + url
        if isinstance(params, dict):
            params = encode_params(params)

        return self.s.request(method, url, params=params, data=data)

    def request_json(self, url, params=None, data=None, method=None):
        r = self.make_request(url, params, data, method).result()
        r.raise_for_status()
        return r.json()

    def request_object(self, url, params=None, data=None, method=None):
        data = self.request_json(url, params, data, method)["data"]
        # If we're fetching a single object make sure that consumers
        # know this is the canonical version
        data["_canonical"] = True
        return hydrate_object(data, self)

    def request_paginated_objects(self, url, params=None, yield_pages=False):
        future = self.make_request(url, params)
        res_iter = self._request_paginated_inner(url, future, yield_pages)
        return LazyListing(res_iter)

    def _request_paginated_inner(self, url, future, yield_pages):
        while url:
            resp = future.result()
            resp.raise_for_status()
            parsed = resp.json()
            url = parsed["links"].get('next')
            if url:
                future = self.make_request(url)

            hydrated_objs = hydrate_objects(parsed["data"], self)
            if yield_pages:
                yield hydrated_objs
            else:
                for obj in hydrated_objs:
                    yield obj

    def get_resource(self, rsrc_type, obj_id):
        return rsrc_type.get(self, obj_id)

    def find_resources(self,
                       rsrc_type,
                       sort=None,
                       yield_pages=False,
                       **kwargs):
        """Find instances of `rsrc_type` that match the filter in `**kwargs`"""
        return rsrc_type.find(self,
                              sort=sort,
                              yield_pages=yield_pages,
                              **kwargs)
Example #6
0
class DataProcessing:
    """Process the Bigquery Data."""

    def __init__(self, s3_client=None):
        """Initialize DataProcessing object."""
        self.data = None
        self.cache = CacheDict(max_len=50000)
        self.pkg_counter = Counter()
        self.s3_client = s3_client
        self.req_session = FuturesSession(session=Session())

    def async_fetch(self, url, payload={},
                    headers={"Accept": "application/json"},
                    method='GET',
                    others=None):
        """Fetch urls asynchronously."""
        if url in self.cache:
            self.responses.append(self.cache[url])
        else:
            self.process_queue.append(
                (others, url, self.req_session.request(method, url)))

    def is_fetch_done(self, callback=lambda x: x):
        """Check whether all the requests are processed or not."""
        _flag = True
        for resp in self.process_queue:
            _flag = False
            others, url, req_obj = resp
            logger.info("other:{}, url:{}, req_obj:{}".format(others, url, req_obj))

            if url in self.cache:
                req_obj.cancel()
                self.process_queue.remove(resp)
                self.responses.append(self.cache[url])
            elif req_obj.done():
                req_obj.cancel()
                self.process_queue.remove(resp)
                self.cache[url] = (others, callback(req_obj))
                self.responses.append((others, callback(req_obj)))
        return _flag

    def update_s3_bucket(self, data,
                         bucket_name,
                         filename='collated.json'):
        """Upload s3 bucket."""
        if self.s3_client is None:
            # creat s3 client if not exists.
            self.s3_client = AmazonS3(
                    bucket_name=bucket_name,
                    aws_access_key_id=os.getenv('AWS_S3_ACCESS_KEY_ID'),
                    aws_secret_access_key=os.getenv('AWS_S3_SECRET_ACCESS_KEY'),
                    )
            self.s3_client.connect()

        if not self.s3_client.is_connected():
            raise ValueError("Unable to connect to s3.")

        json_data = dict()

        if self.s3_client.object_exists(filename):
            logger.info("{} exists, updating it.".format(filename))
            json_data = self.s3_client.read_json_file(filename)
            if not json_data:
                raise ValueError("Unable to get the json data path:{}/{}"
                                 .format(bucket_name, filename))

        json_data.update(data)
        self.s3_client.write_json_file(filename, json_data)
        logger.info("Updated file Succefully!")
Example #7
0
def searchTickets(request, params):
    session = FuturesSession()
    async_list = []
    tic = []
    headers = {
        'user-agent': 'yatse/0.0.1',
        'api-key': settings.API_KEY,
        'api-user': request.user.username
    }
    for Srv in Server.objects.all():
        url = '%s/yatse/' % Srv.url
        # , hooks={'response': do_something}
        req = session.request('SEARCH',
                              url,
                              data=json.dumps(params),
                              headers=headers)
        setattr(req, 'serverName', Srv.name)
        setattr(req, 'serverID', Srv.id)
        setattr(req, 'serverShortName', Srv.short)
        async_list.append(req)

    for req in async_list:
        try:
            result = req.result()
            if result.status_code != 200:
                messages.add_message(
                    request, messages.ERROR,
                    _(u'%s respoded width: %s' %
                      (req.serverName, result.status_code)))

            else:
                data = json.loads(result.content)
                for date in data:
                    date['YATSServer'] = req.serverShortName
                    date['serverID'] = req.serverID
                    date['c_date'] = dateutil.parser.parse(date['c_date'])
                    date['last_action_date'] = dateutil.parser.parse(
                        date['last_action_date'])
                    if is_naive(date['last_action_date']):
                        date['last_action_date'] = make_aware(
                            date['last_action_date'])
                    if 'close_date' in date and date['close_date']:
                        date['close_date'] = dateutil.parser.parse(
                            date['close_date'])
                    date['is_late'] = 0
                    if 'daedline' in date and date['daedline']:
                        date['daedline'] = dateutil.parser.parse(
                            date['daedline'])
                        if date['daedline'] < datetime.date.today():
                            date['is_late'] = 2
                        if date['daedline'] < datetime.date.today(
                        ) + datetime.timedelta(days=7):
                            date['is_late'] = 1

                tic = tic + data

        except:
            messages.add_message(
                request, messages.ERROR,
                _(u'YATS nicht erreichbar: %s' % req.serverName))

    return tic
Example #8
0
class AsyncConnection(AbstractConnection):
    def __init__(self,
                 *,
                 base_url,
                 disable_ssl_certificate,
                 token_manager,
                 retries,
                 max_requests_workers=6,
                 proxy_url=None):
        super().__init__(base_url=base_url,
                         disable_ssl_certificate=disable_ssl_certificate,
                         token_manager=token_manager,
                         retries=retries)

        executor = cf.ThreadPoolExecutor(max_workers=max_requests_workers)
        adapter_kwargs = {
            'pool_connections': max_requests_workers,
            'pool_maxsize': max_requests_workers,
            'max_retries': self._retries,
            'pool_block': True
        }
        self._asession = FuturesSession(executor=executor)
        self._asession.mount('https://', HTTPAdapter(**adapter_kwargs))
        self._asession.mount('http://', HTTPAdapter(**adapter_kwargs))
        if proxy_url is not None:
            self._asession.proxies = {
                'http': proxy_url,
                'https': proxy_url,
            }
        self._access_token_lock = Lock()
        self._max_requests_workers = max_requests_workers

    @property
    def executor(self):
        return self._asession.executor

    @property
    def max_request_workers(self):
        return self._max_requests_workers

    def _add_authorization_maybe(self, headers: dict, url: str):
        with self._access_token_lock:
            super()._add_authorization_maybe(headers, url)

    def post(self, path, headers=None, callback=None, data=None, timeout=30.0):
        url = urljoin(self._base_url, path)
        params = {
            'method': 'POST',
            'url': url,
            'headers': headers,
            'data': data,
            'verify': (not self._disable_ssl_certificate),
            'timeout': timeout
        }
        return self._send_request(params, on_finish_callback=callback)

    def put(self, path, headers=None, callback=None, files=None, timeout=30.0):
        url = urljoin(self._base_url, self._encode_spaces(path))
        params = {
            'method': 'PUT',
            'url': url,
            'headers': headers,
            'files': files,
            'verify': (not self._disable_ssl_certificate),
            'timeout': timeout
        }
        return self._send_request(params=params, on_finish_callback=callback)

    def _send_request(self, params, on_finish_callback):
        params['headers'] = params['headers'] or {}
        self._add_authorization_maybe(params['headers'], params['url'])
        self._add_user_agent(params['headers'])
        try:
            token = params['headers']['Authorization'].split(
                'Bearer')[1].strip()
        except KeyError:
            token = None

        def extended_callback(response, *args, **kwargs):
            if response.status_code == 401:
                LOGGER.debug('Got a 401 status')
                skip = self._skip_token_renewal(params['url'])
                if not skip:
                    with self._access_token_lock:  # block concurrent send requests
                        renewed = (token !=
                                   self._token_manager.token.access_token)
                        if renewed:
                            LOGGER.debug('Token already renewed')
                        else:
                            self._renew_token()

            if on_finish_callback:
                on_finish_callback(response)

        c_params = params
        c_params['hooks'] = {'response': extended_callback}
        LOGGER.debug('Making request {} to {}'.format(params['method'],
                                                      params['url']))
        return self._asession.request(**c_params)
Example #9
0
from requests_html import HTML
from requests_futures.sessions import FuturesSession

BASE = 'http://theremin.music.uiowa.edu/'

# Downloaded from `MISPiano.html`
with open('mispiano.html') as f:
    root = HTML(html=f.read())

mfs = [h for h in root.links if 'Piano.ff' in h]
nmfs = len(mfs)
print(f'total: {nmfs} files')

session = FuturesSession(max_workers=10)

ars = [session.request('get', BASE + h) for h in mfs]
for i, ar in enumerate(ars):
    r = ar.result()
    fname = r.url.split('/')[-1]

    if r.status_code != 200:
        print(f'Failed to download {fname}')
        continue

    with open(fname, 'wb') as f:
        f.write(r.content)

    print(f'[{i+1}/{nmfs}] {fname} done')
Example #10
0
class Client(object):
    def __init__(self,
                 url,
                 key=None,
                 secret=None,
                 verbose=False,
                 do_async=False,
                 max_workers=8,
                 session=None,
                 response_hook=None,
                 **kwargs):
        """
        Creates a client to connect to the HTTP bridge services
        :param url: The URL to connect to to access the Crossbar
        :param do_async: Run request in async mode
        :param key: The key for the API calls
        :param secret: The secret for the API calls
        :param verbose: True if you want debug messages printed
        :param kwargs: Extra kwargs passed to requests.request e.g proxies, auth, verify etc.
        :return: Nothing
        """
        assert url is not None

        self.url = url
        self.do_async = do_async
        self.key = key
        self.secret = secret
        self.verbose = verbose
        self.sequence = 1
        self.kwargs = kwargs
        if self.do_async is True:
            self.session = FuturesSession(max_workers=max_workers,
                                          session=session)
            self.session.hooks['response'] = response_hook

    def get_url(self, ext_url):
        url = self.url
        if ext_url not in self.url:
            url = '{}{}'.format(self.url, ext_url)
        return url

    def publish(self, topic, *args, **kwargs):
        """
        Publishes the request to the bridge service
        :param topic: The topic to publish to
        :param args: The arguments
        :param kwargs: The key/word arguments
        :return: The ID of the publish
        """
        assert topic is not None
        ext_url = kwargs.pop('transport_path', 'publish')
        response_hook = kwargs.pop('response_hook', None)
        params = {"topic": topic, "args": args, "kwargs": kwargs}
        response = self._make_api_call("POST",
                                       self.get_url(ext_url),
                                       json_params=params,
                                       response_hook=response_hook)
        if self.do_async is True:
            return response
        return response["id"]

    def call(self, procedure, *args, **kwargs):
        """
        Calls a procedure from the bridge service
        :param topic: The topic to publish to
        :param args: The arguments
        :param kwargs: The key/word arguments
        :return: The response from calling the procedure
        """
        assert procedure is not None
        ext_url = kwargs.pop('transport_path', 'call')
        response_hook = kwargs.pop('response_hook', None)
        params = {"procedure": procedure, "args": args, "kwargs": kwargs}

        response = self._make_api_call("POST",
                                       self.get_url(ext_url),
                                       json_params=params,
                                       response_hook=response_hook)
        if self.do_async is True:
            return response
        value = None
        if "args" in response and len(response["args"]) > 0:
            value = response["args"][0]

        if "error" in response:
            error = response["error"]
            if "wamp.error.no_such_procedure" in error:
                raise ClientNoCalleeRegistered(value)
            else:
                raise ClientCallRuntimeError(value)

        return value

    def _compute_signature(self, body):
        """
        Computes the signature.

        Described at:
        http://crossbar.io/docs/HTTP-Bridge-Services-Caller/

        Reference code is at:
        https://github.com/crossbario/crossbar/blob/master/crossbar/adapter/rest/common.py

        :return: (signature, none, timestamp)
        """

        timestamp = datetime.datetime.utcnow().strftime(
            "%Y-%m-%dT%H:%M:%S.%fZ")
        nonce = randint(0, 2**53)

        # Compute signature: HMAC[SHA256]_{secret} (key | timestamp | seq | nonce | body) => signature
        hm = hmac.new(self.secret, None, hashlib.sha256)
        hm.update(self.key)
        hm.update(timestamp)
        hm.update(str(self.sequence))
        hm.update(str(nonce))
        hm.update(body)
        signature = base64.urlsafe_b64encode(hm.digest())

        return signature, nonce, timestamp

    def _make_api_call(self,
                       method,
                       url,
                       json_params=None,
                       response_hook=None):
        """
        Performs the REST API Call
        :param method: HTTP Method
        :param url:  The URL
        :param json_params: The parameters intended to be JSON serialized
        :return:
        """
        if self.verbose is True:
            print("\ncrossbarhttp: Request: %s %s" % (method, url))

        if json_params is not None and self.verbose is True:
            print("crossbarhttp: Params: " + json.dumps(json_params))

        if self.key is not None and self.secret is not None and json_params is not None:
            signature, nonce, timestamp = self._compute_signature(
                json.dumps(json_params))
            params = urlencode({
                "timestamp": timestamp,
                "seq": str(self.sequence),
                "nonce": nonce,
                "signature": signature,
                "key": self.key
            })
            if self.verbose is True:
                print("crossbarhttp: Signature Params: " + params)
            url += "?" + params

        # TODO: I can't figure out what this is.  Guessing it is a number you increment on every call
        self.sequence += 1

        try:
            if self.do_async is True:
                return self.session.request(method,
                                            url=url,
                                            json=json_params,
                                            hooks={'response': response_hook},
                                            **self.kwargs)
            response = requests.request(method,
                                        url=url,
                                        json=json_params,
                                        **self.kwargs)
            if response.status_code == 200:
                data = response.json()
                if self.verbose is True:
                    print("crossbarhttp: Response: " + str(data))
                return data
            elif response.status_code == 400:
                raise ClientMissingParams(str(response.text))
            elif response.status_code == 401:
                raise ClientSignatureError(str(response.text))
            else:
                raise ClientBadUrl(str(response.text))
        except requests.exceptions.RequestException as e:
            raise ClientBadHost(str(e))
Example #11
0
from polyswarmd.utils.response import success, failure, install_error_handlers

logger = logging.getLogger(__name__)
cache: Cache = Cache(config={"CACHE_TYPE": "simple", "CACHE_DEFAULT_TIMEOUT": 30})

# Set up our app object
app = Flask(__name__)
app.url_map.strict_slashes = False
_config = PolySwarmd.auto()
app.config['POLYSWARMD'] = _config
# Setting this value works even when Content-Length is omitted, we must have it
app.config['MAX_CONTENT_LENGTH'] = _config.artifact.max_size * _config.artifact.limit

session = FuturesSession(executor=ThreadPoolExecutor(4), adapter_kwargs={'max_retries': 2})

session.request = functools.partial(session.request, timeout=10)

app.config['REQUESTS_SESSION'] = session
app.config['CHECK_BLOCK_LIMIT'] = True
app.config['THREADPOOL'] = ThreadPoolExecutor()

install_error_handlers(app)

from polyswarmd.views.eth import misc
from polyswarmd.views.artifacts import artifacts
from polyswarmd.views.balances import balances
from polyswarmd.views.bounties import bounties
from polyswarmd.views.relay import relay
from polyswarmd.views.offers import offers
from polyswarmd.views.staking import staking
from polyswarmd.views.event_message import init_websockets