def fetch_comments_parallelly(self,
                                  bug_ids: List[int],
                                  n_max_workers: int = 16) -> [Comment]:
        headers = {
            "User-Agent":
            "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36",
            "Accept": "application/json"
        }

        def exception_handler(request, exception):
            print("ERROR: An exception occurred! {}".format(exception))

        urls = list(
            map(lambda bug_id: "{}/{}/comment".format(self._base_url, bug_id),
                bug_ids))
        n_workers = min(len(urls), n_max_workers)
        session = FuturesSession(max_workers=n_workers)
        futures = [session.get(u, headers=headers) for u in urls]
        done, incomplete = wait(futures)
        session.close()

        if len(incomplete) > 0:
            print("ERROR!!! Failed to process at least one request...")

        bug_comments = {}
        for f in done:
            r = f.result()
            r.raise_for_status()
            parsed_data = r.json()
            bugs_data = parsed_data["bugs"]
            included_bug_ids = list(bugs_data.keys())
            assert (len(included_bug_ids) == 1)
            bug_id = included_bug_ids[0]
            bug_comments[int(bug_id)] = bugs_data[bug_id]["comments"]
        return bug_comments
Ejemplo n.º 2
0
    def get(self, url_paths, timeout=300, stream=False, use_proxy=False):
        print(datetime.datetime.now())
        url_paths = [u.replace('://www.', '://') for u in url_paths]
        session = FuturesSession(max_workers=self._max_workers)

        if use_proxy:
            futures = [
                session.get(url,
                            headers=_get_headers(self._ua_generator.random),
                            proxies=_get_proxy(random.choice(
                                self._proxy_list)),
                            timeout=timeout,
                            stream=stream) for url in url_paths
            ]
        else:
            futures = [
                session.get(url, timeout=timeout, stream=stream)
                for url in url_paths
            ]

        scrapers = []
        for f, u in zip(futures, url_paths):
            try:
                r = f.result()
                s = SCRAPERS[url_path_to_dict(u)['host']](r)
            except Exception as e:
                # Tmp: add logging here of failure
                print("FAILURE: %s", e)
                s = SCRAPERS[url_path_to_dict(u)['host']](None)
            scrapers.append(s)

        session.close()
        print(datetime.datetime.now())
        return scrapers
Ejemplo n.º 3
0
async def getlyrics(artistsong):
    percents = {" ": "+", "!": "%21", '"': "%22", "#": "%23", "$": "%24", "%": "%25", "&": "%26", "'": "%27",
                "(": "%28", ")": "%29", "*": "%2A", "+": "%2B", "`": "%60", ",": "%2C", "-": "%2D", ".": "%2E",
                "/": "%2F"}
    searchquery = ""
    for char in artistsong:
        if char in percents:
            char = percents[char]
        searchquery += char
    session = FuturesSession()
    future = session.get("https://google.com/search?q=" + searchquery + "+lyrics")
    response_one = future.result()
    soup = BeautifulSoup(response_one.text, 'html.parser')
    bouncer = "Our systems have detected unusual traffic from your computer network"
    if bouncer in soup.get_text():
        title_ = ""
        artist_ = ""
        lyrics_ = "Google has detected us being suspicious, try again later."
        source_ = ""
    else:
        try:
            title_ = soup.find('span', class_="BNeawe tAd8D AP7Wnd").get_text()
            artist_ = soup.find_all('span', class_="BNeawe s3v9rd AP7Wnd")[-1].get_text()
            lyrics_ = soup.find_all('div', class_="BNeawe tAd8D AP7Wnd")[-1].get_text()
            source_ = soup.find_all('span', class_="uEec3 AP7Wnd")[-1].get_text()
        except AttributeError:
            title_, artist_, lyrics_, source_ = "", "", "Not able to find the lyrics for {}.".format(searchquery), ""
    session.close()
    return title_, artist_, lyrics_, source_
Ejemplo n.º 4
0
class GraphDatabase(GraphDatabase):
    def __init__(self,
                 url,
                 timeout=None,
                 username="",
                 password="",
                 loop=None,
                 verify_ssl=False,
                 future_class=None):
        super().__init__(url,
                         timeout=timeout,
                         username=username,
                         password=password,
                         loop=loop,
                         validate_cert=verify_ssl,
                         future_class=futures.Future)
        self._session = FuturesSession()

    def session(self):
        raise NotImplementedError

    def connect(self,
                session=None,
                force_close=False,
                force_release=False,
                pool=None):
        """
        Get a connection to the graph database.

        :param str session: Session id (optional). Typically a uuid
        :param bool force_close: force connection to close after read.
        :param bool force_release: If possible, force release to pool after
            read.
        :param gremlinclient.pool.Pool pool: Associated connection pool.

        :returns: :py:class:`gremlinclient.connection.Connection`
        """
        return self._connect(Connection, session, force_close, force_release,
                             pool)

    def _connect(self, conn_type, session, force_close, force_release, pool):
        future = self._future_class()
        resp = Response(url,
                        self._session,
                        self._future_class,
                        loop=self._loop)
        gc = conn_type(resp, self._future_class, self._timeout, self._username,
                       self._password, self._loop, self._validate_cert, False,
                       pool, force_release, session)
        future.set_result(gc)
        return future

    def close(self):
        self._session.close()
Ejemplo n.º 5
0
class GraphDatabase(GraphDatabase):

    def __init__(self, url, timeout=None, username="", password="",
                 loop=None, verify_ssl=False, future_class=None):
        super().__init__(url, timeout=timeout, username=username,
                         password=password, loop=loop,
                         validate_cert=verify_ssl, future_class=futures.Future)
        self._session = FuturesSession()

    def session(self):
        raise NotImplementedError

    def connect(self,
                session=None,
                force_close=False,
                force_release=False,
                pool=None):
        """
        Get a connection to the graph database.

        :param str session: Session id (optional). Typically a uuid
        :param bool force_close: force connection to close after read.
        :param bool force_release: If possible, force release to pool after
            read.
        :param gremlinclient.pool.Pool pool: Associated connection pool.

        :returns: :py:class:`gremlinclient.connection.Connection`
        """
        return self._connect(
            Connection, session, force_close, force_release, pool)

    def _connect(self,
                 conn_type,
                 session,
                 force_close,
                 force_release,
                 pool):
        future = self._future_class()
        resp = Response(url, self._session, self._future_class,
                        loop=self._loop)
        gc = conn_type(resp, self._future_class, self._timeout,
                       self._username, self._password, self._loop,
                       self._validate_cert, False, pool, force_release,
                       session)
        future.set_result(gc)
        return future

    def close(self):
        self._session.close()
Ejemplo n.º 6
0
    def _async_request(self, method_name, store_id=None, json_data=None):
        tasks = []
        session = FuturesSession()
        session.headers['Content-Type'] = 'application/json'
        for store in self.stores:
            func = getattr(store, method_name)
            request_task = RequestTask(store, func(session, json_data=json_data, store_id=store_id), method_name)
            tasks.append(request_task)

        results = []
        for task in tasks:
            result = self._get_result_object(task, method_name)
            results.append(result)
            print(result)

        session.close()
        return results
Ejemplo n.º 7
0
class Tracker:

    def __init__(self, app_id: str) -> None:
        self._app_id = app_id
        self._session = FuturesSession()

    def send_async(self, teacher: Teacher) -> Future:
        payload = {
            "v": 1,
            "tid": "UA-2241989-17",
            "cid": 555,
            "t": "pageview",
            "dh": self._app_id,
            "dp": teacher.id,
            "dt": teacher.name,
        }
        return self._session.post("http://www.google-analytics.com/collect", payload)

    def close(self) -> None:
        self._session.close()
Ejemplo n.º 8
0
def makeAsyncLoop(links):  # Распредилитель изображений
    video_capture = cv2.VideoCapture(0)
    sess = FuturesSession()
    counter = 0
    scale = 1
    print(str(links) + ' - links are looped')
    while True:
        if len(links) == 0:
            return 0
        ret, frame = video_capture.read()  # Чтение кадра с камеры

        small_frame = cv2.resize(frame, (0, 0), fx=scale, fy=scale)  # При небходимости, уменьшение разрешения кадра

        img = pickle.dumps(small_frame)  # Сериализация изображения
        # try:
        sess.post(links[counter] + 'handle', data=img)  # Попытка отправить следующий кадр
        # except:
        #     links.remove(links[counter])  # Удалить ссылку из списка если она не доступна
        counter += 1
        if counter > len(links) - 1:
            counter = 0
            sess.close()
            sess = FuturesSession()
Ejemplo n.º 9
0
async def getlyrics(artistsong):
    percents = {" ": "+", "!": "%21", '"': "%22", "#": "%23", "$": "%24", "%": "%25", "&": "%26", "'": "%27",
                "(": "%28", ")": "%29", "*": "%2A", "+": "%2B", "`": "%60", ",": "%2C", "-": "%2D", ".": "%2E",
                "/": "%2F"}
    searchquery = ""
    for char in artistsong:
        if char in percents.keys():
            char = percents[char]
        searchquery += char
    session = FuturesSession()
    future = session.get("https://google.com/search?q=" + searchquery + "+lyrics")
    response_one = future.result()
    soup = BeautifulSoup(response_one.text, 'html.parser')
    try:
        title_ = soup.find('span', class_="BNeawe tAd8D AP7Wnd").get_text()
        artist_ = soup.find_all('span', class_="BNeawe s3v9rd AP7Wnd")[-1].get_text()
        lyrics_ = soup.find_all('div', class_="BNeawe tAd8D AP7Wnd")[-1].get_text()
        source_ = soup.find_all('span', class_="uEec3 AP7Wnd")[-1].get_text()
    except AttributeError:
        title_, artist_, lyrics_, source_ = "Not Found: {}".format(artistsong), "Not Found: {}".format(artistsong), \
                                            "Not Found: {}".format(artistsong), "Not Found: {}".format(artistsong)
    session.close()
    return title_, artist_, lyrics_, source_
Ejemplo n.º 10
0
class TelegramNotifier(_MetricMonitorCallback):
    def __init__(self,
                 bot_id: str,
                 chat_id: str,
                 metric_name: str,
                 delta,
                 max_workers: int = 1):
        self.bot_id = bot_id
        self.chat_id = chat_id
        self.metric_name = metric_name
        self.session = FuturesSession(max_workers=max_workers)
        self.futures = []
        self.delta = delta
        self.previous_metric = np.inf

    def on_epoch_end(self, epoch: int, logs: ModelHistory) -> None:
        def _response_hook(resp, *args, **kwargs):
            if resp.status_code != 200:
                logging.warning('Failed to deliver Telegram message with ' +
                                f'error code {resp.status_code}')

        current_metric = self.get_metric('patience', self.metric_name, logs)
        current_delta = self.previous_metric - current_metric

        if current_delta >= self.delta:
            msg = '{} improved from%20{}%20to%20{}'.format(
                self.metric_name, self.previous_metric, current_metric)
            msg_url = 'https://api.telegram.org/bot{}/sendMessage?chat_id={}&text={}'.format(
                self.bot_id, self.chat_id, msg)

            future = self.session.get(msg_url,
                                      hooks={'response': _response_hook})
            self.futures.append(future)
            self.previous_metric = current_metric

    def on_train_end(self) -> None:
        self.session.close()
Ejemplo n.º 11
0
class BaseConnection(object):
    """Base Connection Class."""
    def __init__(self,
                 debug=False,
                 method='GET',
                 proxy_host=None,
                 timeout=20,
                 proxy_port=80,
                 parallel=None,
                 escape_xml=False,
                 **kwargs):

        if debug:
            set_stream_logger()

        self.response = None
        self.request = None
        self.verb = None
        self.config = None
        self.debug = debug
        self.method = method
        self.timeout = timeout
        self.proxy_host = proxy_host
        self.proxy_port = proxy_port
        self.escape_xml = escape_xml
        self.datetime_nodes = []
        self._list_nodes = []

        self.proxies = dict()
        if self.proxy_host:
            proxy = 'http://%s:%s' % (self.proxy_host, self.proxy_port)
            self.proxies = {'http': proxy, 'https': proxy}

        self.session = FuturesSession()
        self.session.mount('http://', HTTPAdapter(max_retries=3))
        self.session.mount('https://', HTTPAdapter(max_retries=3))

        self.parallel = parallel

        self.base_list_nodes = []
        self.datetime_nodes = []

        self._reset()

    def debug_callback(self, debug_type, debug_message):
        log.debug('type: ' + str(debug_type) + ' message' + str(debug_message))

    def v(self, *args, **kwargs):
        return getValue(self.response.dict(), *args, **kwargs)

    def getNodeText(self, nodelist):
        return getNodeTextUtils(nodelist)

    def _reset(self):
        self.response = None
        self.request = None
        self.verb = None
        self._list_nodes = []
        self._request_id = None
        self._request_dict = {}
        self._time = time.time()
        self._response_content = None
        self._response_dom = None
        self._response_obj = None
        self._response_soup = None
        self._response_dict = None
        self._response_error = None
        self._resp_body_errors = []
        self._resp_body_warnings = []
        self._resp_codes = []

    def _add_prefix(self, nodes, verb):
        if verb:
            for i, v in enumerate(nodes):
                if not nodes[i].startswith(verb.lower()):
                    nodes[i] = "%sresponse.%s" % (verb.lower(),
                                                  nodes[i].lower())

    def execute(self,
                verb,
                data=None,
                list_nodes=[],
                verb_attrs=None,
                files=None):
        "Executes the HTTP request."
        log.debug('execute: verb=%s data=%s' % (verb, data))

        self._reset()

        self._list_nodes += list_nodes
        self._add_prefix(self._list_nodes, verb)

        if hasattr(self, 'base_list_nodes'):
            self._list_nodes += self.base_list_nodes

        self.build_request(verb, data, verb_attrs, files)
        self.execute_request()

        if hasattr(self.response, 'content'):
            self.process_response()
            self.error_check()

        log.debug('total time=%s' % (time.time() - self._time))

        return self.response

    def build_request(self, verb, data, verb_attrs, files=None):

        self.verb = verb
        self._request_dict = data
        self._request_id = uuid.uuid4()

        url = self.build_request_url(verb)

        headers = self.build_request_headers(verb)
        headers.update({
            'User-Agent': UserAgent,
            'X-EBAY-SDK-REQUEST-ID': str(self._request_id)
        })

        # if we are adding files, we ensure there is no Content-Type header already defined
        # otherwise Request will use the existing one which is likely not to be multipart/form-data
        # data must also be a dict so we make it so if needed

        requestData = self.build_request_data(verb, data, verb_attrs)
        if files:
            del (headers['Content-Type'])
            if isinstance(requestData, str):  # pylint: disable-msg=E0602
                requestData = {'XMLPayload': requestData}

        request = Request(
            self.method,
            url,
            data=smart_encode_request_data(requestData),
            headers=headers,
            files=files,
        )

        self.request = request.prepare()

    def build_request_headers(self, verb):
        return {}

    def build_request_data(self, verb, data, verb_attrs):
        return ""

    def build_request_url(self, verb):
        url = "%s://%s%s" % (HTTP_SSL[self.config.get('https', False)],
                             self.config.get('domain'), self.config.get('uri'))
        return url

    def execute_request(self):

        log.debug("REQUEST (%s): %s %s" %
                  (self._request_id, self.request.method, self.request.url))
        log.debug('headers=%s' % self.request.headers)
        log.debug('body=%s' % self.request.body)

        if self.parallel:
            self.parallel._add_request(self)
            return None

        self.response = self.session.send(self.request,
                                          verify=True,
                                          proxies=self.proxies,
                                          timeout=self.timeout,
                                          allow_redirects=True)

        log.debug('RESPONSE (%s):' % self._request_id)
        log.debug('elapsed time=%s' % self.response.elapsed)
        log.debug('status code=%s' % self.response.status_code)
        log.debug('headers=%s' % self.response.headers)
        log.debug('content=%s' % self.response.text)

    def process_response(self, parse_response=True):
        """Post processing of the response"""

        self.response = Response(self.response,
                                 verb=self.verb,
                                 list_nodes=self._list_nodes,
                                 datetime_nodes=self.datetime_nodes,
                                 parse_response=parse_response)

        self.session.close()
        # set for backward compatibility
        self._response_content = self.response.content

        if self.response.status_code != 200:
            self._response_error = self.response.reason

    def error_check(self):
        estr = self.error()

        if estr and self.config.get('errors', True):
            log.error(estr)
            raise ConnectionError(estr, self.response)

    def response_codes(self):
        return self._resp_codes

    def response_status(self):
        "Retuns the HTTP response status string."

        return self.response.reason

    def response_code(self):
        "Returns the HTTP response status code."

        return self.response.status_code

    def response_content(self):
        return self.response.content

    def response_soup(self):
        "Returns a BeautifulSoup object of the response."

        if not self._response_soup:
            try:
                from bs4 import BeautifulStoneSoup
            except ImportError:
                from BeautifulSoup import BeautifulStoneSoup
                log.warn(
                    'DeprecationWarning: BeautifulSoup 3 or earlier is deprecated; install bs4 instead\n'
                )

            self._response_soup = BeautifulStoneSoup(
                smart_decode(self.response_content))

        return self._response_soup

    def response_obj(self):
        log.warn('response_obj() DEPRECATED, use response.reply instead')
        return self.response.reply

    def response_dom(self):
        """ Deprecated: use self.response.dom() instead
        Returns the response DOM (xml.dom.minidom).
        """
        log.warn('response_dom() DEPRECATED, use response.dom instead')

        if not self._response_dom:
            dom = None
            content = None

            try:
                if self.response.content:
                    regex = re.compile(b'xmlns="[^"]+"')
                    content = regex.sub(b'', self.response.content)
                else:
                    content = "<%sResponse></%sResponse>" % (self.verb,
                                                             self.verb)

                dom = parseString(content)
                self._response_dom = dom.getElementsByTagName(self.verb +
                                                              'Response')[0]

            except ExpatError as e:
                raise ConnectionResponseError(
                    "Invalid Verb: %s (%s)" % (self.verb, e), self.response)
            except IndexError:
                self._response_dom = dom

        return self._response_dom

    def response_dict(self):
        "Returns the response dictionary."
        log.warn(
            'response_dict() DEPRECATED, use response.dict() or response.reply instead'
        )

        return self.response.reply

    def response_json(self):
        "Returns the response JSON."
        log.warn('response_json() DEPRECATED, use response.json() instead')

        return self.response.json()

    def _get_resp_body_errors(self):
        """Parses the response content to pull errors.

        Child classes should override this method based on what the errors in the
        XML response body look like. They can choose to look at the 'ack',
        'Errors', 'errorMessage' or whatever other fields the service returns.
        the implementation below is the original code that was part of error()
        """

        if self._resp_body_errors and len(self._resp_body_errors) > 0:
            return self._resp_body_errors

        errors = []

        if self.verb is None:
            return errors

        dom = self.response.dom()
        if dom is None:
            return errors

        return []

    def error(self):
        "Builds and returns the api error message."

        error_array = []
        if self._response_error:
            error_array.append(self._response_error)

        error_array.extend(self._get_resp_body_errors())

        if len(error_array) > 0:
            # Force all errors to be unicode in a proper way
            error_array = [smart_decode(smart_encode(e)) for e in error_array]
            error_string = u"{verb}: {message}".format(
                verb=self.verb, message=u", ".join(error_array))

            return error_string

        return None

    def opendoc(self):
        webbrowser.open(self.config.get('doc_url'))
Ejemplo n.º 12
0
class HTTPDriver(BaseDriver):
    """HTTPDriver

    The :class:`HTTPDriver` class reads SBP messages from an HTTP
    service for a device and writes out to a stream. This driver is like
    a file-handle with read and writes over two separately HTTP
    connections, but can also be enabled and disabled by its consumer.

    Parameters
    ----------
    device_uid : uid
      Device unique id
    url : str
      HTTP endpoint
    retries : tuple
      Configure connect and read retry count. Defaults to
      (MAX_CONNECT_RETRIES, MAX_READ_RETRIES).
    timeout : tuple
      Configure connect and read timeouts. Defaults to
      (DEFAULT_CONNECT_TIMEOUT, DEFAULT_READ_TIMEOUT).

    """
    def __init__(
        self,
        device_uid=None,
        url="https://broker.staging.skylark.swiftnav.com",
        retries=DEFAULT_RETRIES,
        timeout=DEFAULT_TIMEOUT,
    ):
        self._retry = Retry(connect=DEFAULT_RETRIES[0],
                            read=DEFAULT_RETRIES[1],
                            redirect=MAX_REDIRECTS,
                            status_forcelist=[500],
                            backoff_factor=DEFAULT_BACKOFF_FACTOR)
        self.url = url
        self.read_session = requests.Session()
        self.read_session.mount(
            "http://",
            HTTPAdapter(pool_connections=DEFAULT_POOLSIZE,
                        pool_maxsize=DEFAULT_POOLSIZE,
                        pool_block=DEFAULT_POOLBLOCK,
                        max_retries=self._retry))
        self.read_session.mount(
            "https://",
            HTTPAdapter(pool_connections=DEFAULT_POOLSIZE,
                        pool_maxsize=DEFAULT_POOLSIZE,
                        pool_block=DEFAULT_POOLBLOCK,
                        max_retries=self._retry))
        self.write_session = None
        self.device_uid = device_uid
        self.timeout = timeout
        self.read_response = None
        self.write_response = None
        self.source = None

    def flush(self):
        """File-flush wrapper (noop).

        """
        pass

    def close(self):
        """File-handle close wrapper (noop).

        """
        try:
            self.read_close()
            self.write_close()
        except:
            pass

    @property
    def write_ok(self):
        """
        Are we connected for writes?
        """
        # Note that self.write_response is either None or a Response
        # object, which cast to False for 4xx and 5xx HTTP codes.
        return bool(self.write_response)

    def connect_write(self, source, whitelist, device_uid=None, pragma=None):
        """Initialize a streaming write HTTP response. Manually connects the
        underlying file-handle. In the event of a network disconnection,
        use to manually reinitiate an HTTP session.

        Parameters
        ----------
        source : sbp.client.handler.Handler
          Iterable source of SBP messages.
        whitelist : [int]
          Whitelist of messages to write

        """
        header_device_uid = device_uid or self.device_uid
        headers = {
            'Device-Uid': header_device_uid,
            'Content-Type': BROKER_SBP_TYPE,
            'Pragma': pragma
        }
        if not pragma:
            del headers['Pragma']
        try:
            self.executor = ThreadPoolExecutor(max_workers=DEFAULT_POOLSIZE)
            self.write_session = FuturesSession(executor=self.executor)
            self.write_session.mount(
                "http://",
                HTTPAdapter(pool_connections=DEFAULT_POOLSIZE,
                            pool_maxsize=DEFAULT_POOLSIZE,
                            pool_block=DEFAULT_POOLBLOCK,
                            max_retries=self._retry))
            self.write_session.mount(
                "https://",
                HTTPAdapter(pool_connections=DEFAULT_POOLSIZE,
                            pool_maxsize=DEFAULT_POOLSIZE,
                            pool_block=DEFAULT_POOLBLOCK,
                            max_retries=self._retry))
            self.source = source.filter(whitelist)
            gen = (msg.pack() for msg, _ in self.source)
            self.write_session.put(self.url, data=gen, headers=headers)
            self.write_response = True
        except requests.exceptions.ConnectionError:
            msg = "Client connection error to %s with [PUT] headers %s" \
                  % (self.url, headers)
            warnings.warn(msg)
        except requests.exceptions.ConnectTimeout:
            msg = "Client connection timeout to %s with [PUT] headers %s" \
                  % (self.url, headers)
            warnings.warn(msg)
        except requests.exceptions.RetryError:
            msg = "Client retry error to %s with [PUT] headers %s" \
                  % (self.url, headers)
            warnings.warn(msg)
        except requests.exceptions.ReadTimeout:
            msg = "Client read timeout to %s with [PUT] headers %s" \
                  % (self.url, headers)
            warnings.warn(msg)
        return self.write_ok

    def write(self, data):
        """Write wrapper (noop). Actual stream is initiated by the write
        connection.

        Parameters
        ----------
        data : object
          Data to write.

        """
        pass

    def write_close(self):
        """File-handle close wrapper (noop).

        """
        try:
            self.write_session.close()
            self.executor.shutdown(wait=False)
            self.source.breakiter()
            self.source = None
            self.executor = None
            self.write_session = None
        except:
            pass

    @property
    def read_ok(self):
        """
        Are we connected for reads?
        """
        return bool(self.read_response)

    def connect_read(self, device_uid=None, pragma=None):
        """Initialize a streaming read/write HTTP response. Manually connects
        the underlying file-handle. In the event of a network
        disconnection, use to manually reinitiate an HTTP session.

        """
        header_device_uid = device_uid or self.device_uid
        headers = {
            'Device-Uid': header_device_uid,
            'Accept': BROKER_SBP_TYPE,
            'Pragma': pragma
        }
        if not pragma:
            del headers['Pragma']
        try:
            self.read_response = self.read_session.get(self.url,
                                                       stream=True,
                                                       headers=headers,
                                                       timeout=self.timeout)
        except requests.exceptions.ConnectionError:
            msg = "Client connection error to %s with [GET] headers %s" \
                  % (self.url, headers)
            warnings.warn(msg)
        except requests.exceptions.ConnectTimeout:
            msg = "Client connection timeout to %s with [GET] headers %s" \
                  % (self.url, headers)
            warnings.warn(msg)
        except requests.exceptions.RetryError:
            msg = "Client retry error to %s with [GET] headers %s" \
                  % (self.url, headers)
            warnings.warn(msg)
        except requests.exceptions.ReadTimeout:
            msg = "Client read timeout to %s with [GET] headers %s" \
                  % (self.url, headers)
            warnings.warn(msg)
        return self.read_ok

    def read(self, size):
        """Read wrapper. If the client connection is closed or some other
        exception is thrown, raises an IOError.

        Parameters
        ----------
        size : int
          Size to read (in bytes).

        Returns
        ----------
        bytearray, or None

        """
        if self.read_response is None or not self.device_uid:
            raise ValueError("Invalid/insufficient HTTP request parameters!")
        elif not self.read_ok or self.read_response.raw.closed:
            raise IOError("HTTP read closed?!")
        try:
            return self.read_response.raw.read(size)
        except:
            raise IOError("HTTP read error!")

    def read_close(self):
        """File-handle close wrapper (noop).

        """
        try:
            self.read_response.close()
            self.read_response = None
        except:
            pass
Ejemplo n.º 13
0
class HTTPDriver(BaseDriver):
  """HTTPDriver

  The :class:`HTTPDriver` class reads SBP messages from an HTTP
  service for a device and writes out to a stream. This driver is like
  a file-handle with read and writes over two separately HTTP
  connections, but can also be enabled and disabled by its consumer.

  Parameters
  ----------
  device_uid : uid
    Device unique id
  url : str
    HTTP endpoint
  retries : tuple
    Configure connect and read retry count. Defaults to
    (MAX_CONNECT_RETRIES, MAX_READ_RETRIES).
  timeout : tuple
    Configure connect and read timeouts. Defaults to
    (DEFAULT_CONNECT_TIMEOUT, DEFAULT_READ_TIMEOUT).

  """

  def __init__(self,
               device_uid=None,
               url="https://broker.staging.skylark.swiftnav.com",
               retries=DEFAULT_RETRIES,
               timeout=DEFAULT_TIMEOUT,):
    self._retry = Retry(connect=DEFAULT_RETRIES[0],
                        read=DEFAULT_RETRIES[1],
                        redirect=MAX_REDIRECTS,
                        status_forcelist=[500],
                        backoff_factor=DEFAULT_BACKOFF_FACTOR)
    self.url = url
    self.read_session = requests.Session()
    self.read_session.mount("http://",
                            HTTPAdapter(pool_connections=DEFAULT_POOLSIZE,
                                        pool_maxsize=DEFAULT_POOLSIZE,
                                        pool_block=DEFAULT_POOLBLOCK,
                                        max_retries=self._retry))
    self.read_session.mount("https://",
                            HTTPAdapter(pool_connections=DEFAULT_POOLSIZE,
                                        pool_maxsize=DEFAULT_POOLSIZE,
                                        pool_block=DEFAULT_POOLBLOCK,
                                        max_retries=self._retry))
    self.write_session = None
    self.device_uid = device_uid
    self.timeout = timeout
    self.read_response = None
    self.write_response = None
    self.source = None

  def flush(self):
    """File-flush wrapper (noop).

    """
    pass

  def close(self):
    """File-handle close wrapper (noop).

    """
    try:
      self.read_close()
      self.write_close()
    except:
      pass

  @property
  def write_ok(self):
    """
    Are we connected for writes?
    """
    # Note that self.write_response is either None or a Response
    # object, which cast to False for 4xx and 5xx HTTP codes.
    return bool(self.write_response)

  def connect_write(self, source, whitelist, device_uid=None, pragma=None):
    """Initialize a streaming write HTTP response. Manually connects the
    underlying file-handle. In the event of a network disconnection,
    use to manually reinitiate an HTTP session.

    Parameters
    ----------
    source : sbp.client.handler.Handler
      Iterable source of SBP messages.
    whitelist : [int]
      Whitelist of messages to write

    """
    header_device_uid = device_uid or self.device_uid
    headers = {'Device-Uid': header_device_uid, 'Content-Type': BROKER_SBP_TYPE, 'Pragma': pragma}
    if not pragma:
      del headers['Pragma']
    try:
      self.executor = ThreadPoolExecutor(max_workers=DEFAULT_POOLSIZE)
      self.write_session = FuturesSession(executor=self.executor)
      self.write_session.mount("http://",
                               HTTPAdapter(pool_connections=DEFAULT_POOLSIZE,
                                           pool_maxsize=DEFAULT_POOLSIZE,
                                           pool_block=DEFAULT_POOLBLOCK,
                                           max_retries=self._retry))
      self.write_session.mount("https://",
                               HTTPAdapter(pool_connections=DEFAULT_POOLSIZE,
                                           pool_maxsize=DEFAULT_POOLSIZE,
                                           pool_block=DEFAULT_POOLBLOCK,
                                           max_retries=self._retry))
      self.source = source.filter(whitelist)
      gen = (msg.pack() for msg, _ in self.source)
      self.write_session.put(self.url, data=gen, headers=headers)
      self.write_response = True
    except requests.exceptions.ConnectionError as err:
      msg = "Client connection error to %s with [PUT] headers %s: msg=%s" \
            % (self.url, headers, err.message)
      warnings.warn(msg)
    except requests.exceptions.ConnectTimeout as err:
      msg = "Client connection timeout to %s with [PUT] headers %s: msg=%s" \
            % (self.url, headers, err.message)
      warnings.warn(msg)
    except requests.exceptions.RetryError:
      msg = "Client retry error to %s with [PUT] headers %s: msg=%s" \
            % (self.url, headers, err.message)
      warnings.warn(msg)
    except requests.exceptions.ReadTimeout:
      msg = "Client read timeout to %s with [PUT] headers %s: msg=%s" \
            % (self.url, headers, err.message)
      warnings.warn(msg)
    return self.write_ok

  def write(self, data):
    """Write wrapper (noop). Actual stream is initiated by the write
    connection.

    Parameters
    ----------
    data : object
      Data to write.

    """
    pass

  def write_close(self):
    """File-handle close wrapper (noop).

    """
    try:
      self.write_session.close()
      self.executor.shutdown(wait=False)
      self.source.breakiter()
      self.source = None
      self.executor = None
      self.write_session = None
    except:
      pass

  @property
  def read_ok(self):
    """
    Are we connected for reads?
    """
    return bool(self.read_response)

  def connect_read(self, device_uid=None, pragma=None):
    """Initialize a streaming read/write HTTP response. Manually connects
    the underlying file-handle. In the event of a network
    disconnection, use to manually reinitiate an HTTP session.

    """
    header_device_uid = device_uid or self.device_uid
    headers = {'Device-Uid': header_device_uid, 'Accept': BROKER_SBP_TYPE, 'Pragma': pragma}
    if not pragma:
      del headers['Pragma']
    try:
      self.read_response = self.read_session.get(self.url,
                                                 stream=True,
                                                 headers=headers,
                                                 timeout=self.timeout)
    except requests.exceptions.ConnectionError as err:
      msg = "Client connection error to %s with [GET] headers %s: msg=%s" \
            % (self.url, headers, err.message)
      warnings.warn(msg)
    except requests.exceptions.ConnectTimeout as err:
      msg = "Client connection timeout to %s with [GET] headers %s: msg=%s" \
            % (self.url, headers, err.message)
      warnings.warn(msg)
    except requests.exceptions.RetryError:
      msg = "Client retry error to %s with [GET] headers %s: msg=%s" \
            % (self.url, headers, err.message)
      warnings.warn(msg)
    except requests.exceptions.ReadTimeout:
      msg = "Client read timeout to %s with [GET] headers %s: msg=%s" \
            % (self.url, headers, err.message)
      warnings.warn(msg)
    return self.read_ok

  def read(self, size):
    """Read wrapper. If the client connection is closed or some other
    exception is thrown, raises an IOError.

    Parameters
    ----------
    size : int
      Size to read (in bytes).

    Returns
    ----------
    bytearray, or None

    """
    if self.read_response is None or not self.device_uid:
      raise ValueError("Invalid/insufficient HTTP request parameters!")
    elif not self.read_ok or self.read_response.raw.closed:
      raise IOError("HTTP read closed?!")
    try:
      return self.read_response.raw.read(size)
    except:
      raise IOError("HTTP read error!")

  def read_close(self):
    """File-handle close wrapper (noop).

    """
    try:
      self.read_response.close()
      self.read_response = None
    except:
      pass
Ejemplo n.º 14
0
class MozDefMessage(object):
    # Supported message types
    MSGTYPE_NONE = 0
    MSGTYPE_EVENT = 1
    MSGTYPE_COMPLIANCE = 2
    MSGTYPE_VULNERABILITY = 3
    MSGTYPE_ASSETHINT = 4
    MSGTYPE_RRA = 5

    def __init__(self, url):
        """This class is the new base class for MozDef messages. All other
        classes besides MozDefMsg derive from this class or from classes
        derived from this class (like MozDevEvent). This class shouldn't be
        used directly and the derived classes should be used instead.

        Note the very similar name between this class and the MozDefMsg
        class but the differing purposes between the two classes (see the
        MozDefMsg docstring)
        """
        self._msgtype = self.MSGTYPE_NONE

        self.log = {}
        self._sendlog = {}

        self._httpsession = Session()
        self._httpsession.trust_env = False
        self._httpsession.hooks['response'].append(self._httpsession_cb)
        self._url = url
        self.hostname = socket.getfqdn()
        # This is due to some systems incorrectly
        # setting the hostname field to localhost.localdomain
        # so, we add logic to use a different 'hostname' method
        # if that's the case
        if self.hostname == 'localhost.localdomain':
            self.hostname = socket.gethostname()

        # Set some default options
        self._send_to_syslog = False
        self._send_to_sqs = False
        self._syslog_only = False
        self._fire_and_forget = False
        self._verify_certificate = False
        self._verify_path = None

    def __del__(self):
        '''
            Close out any Sessions we started.
        '''
        self._httpsession.close()

    def validate(self):
        return True

    def validate_log(self):
        return True

    def set_verify(self, f):
        self._verify_certificate = f

    def set_verify_path(self, p):
        self._verify_path = p

    def set_fire_and_forget(self, f):
        self._fire_and_forget = f

    def set_sqs_queue_name(self, f):
        self._sqs_queue_name = f

    def set_sqs_aws_account_id(self, f):
        self._sqs_aws_account_id = f

    def set_sqs_region(self, f):
        self._sqs_region = f

    def set_send_to_sqs(self, f):
        self._send_to_sqs = f

    def set_send_to_syslog(self, f, only_syslog=False):
        self._send_to_syslog = f
        self._syslog_only = only_syslog

    def syslog_convert(self):
        raise MozDefError('message type does not support syslog conversion')

    def construct(self):
        raise MozDefError('subclass of MozDefMessage must override construct()')

    def _httpsession_cb(self, session, response):
        if response.result().status_code != 200:
            if not self._fire_and_forget:
                raise MozDefError('POST failed with code %r' % \
                    response.result().status_code)

    def send_syslog(self):
        raise MozDefError('message type does not support syslog submission')

    def send(self):
        if not self.validate():
            raise MozDefError('message failed validation')
        self.construct()
        if not self.validate_log():
            raise MozDefError('message failed post construct validation')

        if self._send_to_syslog:
            self.send_syslog()
            if self._syslog_only:
                return

        if self._send_to_sqs:
            self.send_sqs()
            return

        vflag = self._verify_certificate
        if vflag:
            if self._verify_path != None:
                vflag = self._verify_path

        buf = json.dumps(self._sendlog, sort_keys=True, indent=4)
        # Compatibility notes:
        # When updating either path (futures_loaded or not loaded) please ensure both have the same functionality
        # future_loaded is used by Python 2, the non-loaded version if for Python 3
        if futures_loaded:
            self._httpsession.post(self._url, buf, verify=vflag)
        else:
           response = self._httpsession.post(self._url, buf, verify=vflag)
           if response.ok == False:
                if not self._fire_and_forget:
                    raise MozDefError('POST failed with code %r msg %s' % \
                        (response.status_code, response.text))
Ejemplo n.º 15
0
class ManagedPersister(Persister):
    def __init__(self,
                 workflow_name: str,
                 wf_start_time: float,
                 service_url: str,
                 wf_exec_id=None,
                 context: str = None,
                 with_validation: bool = False,
                 db_name: str = None,
                 bag_size: int = 1,
                 should_send_to_file: bool = False,
                 should_send_to_service: bool = True):
        super().__init__(workflow_name, wf_start_time, wf_exec_id)
        self.retrospective_url = urljoin(service_url,
                                         "retrospective-provenance")
        self.prospective_url = urljoin(service_url, "prospective-provenance")
        self.context = context
        self.with_validation = with_validation
        self.db_name = db_name
        self.requests_queue = list()
        self.bag_size = bag_size
        self.should_send_to_service = should_send_to_service
        self.should_send_to_file = should_send_to_file

        self.session = None
        if self.should_send_to_service:
            logger.debug("You are using the Service URL: " + service_url)
            self.session = FuturesSession()

    def add_request(self, persistence_request: ProvRequestObj):
        try:
            request_data = persistence_request.as_dict()
            if self.context:
                request_data["context"] = self.context
            self.requests_queue.append(request_data)
            if len(self.requests_queue) >= self.bag_size:
                self._flush()
            # if `configuration` is present this object should be persisted synchronously
            # if "configuration" in prov_obj:
            #     self.__flush__(True)
        except Exception:
            logger.error("[Prov] Unexpected exception")
            traceback.print_exc()
            pass

    def _close(self):
        if self.session:
            logger.info(
                "Waiting to get response from all submitted provenance tasks..."
            )
            while not self.session.executor._work_queue.empty():
                # wait to guarantee that all provenance requests have been sent (fired) to collector service
                sleep(0.1)
        # Persist remaining tasks synchronously
        self._flush(all_and_wait=True)
        if self.session:
            self.session.close()

    def _flush(self, all_and_wait: bool = False):
        if len(self.requests_queue) > 0:
            if all_and_wait:
                logger.debug("Going to flush everything. Flushing " +
                             str(len(self.requests_queue)))
                if self.should_send_to_file:
                    offline_prov_log.debug(json.dumps(self.requests_queue))
                if self.should_send_to_service:
                    self._send_to_service(self.requests_queue)
                self.requests_queue = list()
            else:
                to_flush = self.requests_queue[:self.bag_size]
                del self.requests_queue[:self.bag_size]
                logger.debug("Going to flush a part. Flushing " +
                             str(len(to_flush)) + " out of " +
                             str(len(self.requests_queue)))
                if self.should_send_to_file:
                    offline_prov_log.debug(json.dumps(to_flush))
                if self.should_send_to_service:
                    self._send_to_service(to_flush)

    def _send_to_service(self, to_flush: List[dict]):
        params = {
            "with_validation": str(self.with_validation),
            "db_name": self.db_name
        }
        try:
            logger.debug("[Prov-Persistence]" + json.dumps(to_flush))
            # TODO: check whether we need this result() below
            r = self.session.post(self.retrospective_url,
                                  json=to_flush,
                                  params=params,
                                  verify=False).result()
        except ConnectionError as ex:
            logger.error(
                "[Prov][ConnectionError] There is a communication error between client and server -> "
                + str(ex))
            r = None
            pass
        except Exception as ex:
            traceback.print_exc()
            logger.error(
                "[Prov] Unexpected exception while adding retrospective provenance: "
                + type(ex).__name__ + "->" + str(ex))
            r = None
            pass
        # If requests were validated, check for errors
        if r and self.with_validation:
            self._log_validation_message(r)

    def persist_prospective(self, json_data: dict):
        try:
            if self.should_send_to_file:
                offline_prov_log.debug(json.dumps(self.requests_queue))
            if self.should_send_to_service:
                logger.debug("[Prov-Persistence][Prospective]" +
                             json.dumps(json_data))
                try:
                    r = self.session.post(self.prospective_url,
                                          json=json_data,
                                          params={
                                              'overwrite': True
                                          },
                                          verify=False).result()
                    if 200 <= r.status_code <= 209:
                        logger.debug(
                            "Prospective provenance inserted successfully.")
                    elif r.status_code == 406:
                        error_parsed = json.loads(r._content.decode('utf-8'))
                        error_obj = error_parsed['error'].replace("'", '"')
                        logger.error(error_obj)
                    elif r.status_code == 500:
                        r = self.session.put(self.prospective_url,
                                             json=json_data).result()
                        try:
                            assert 200 <= r.status_code <= 209
                        except AssertionError:
                            logger.error(
                                "Prospective provenance was not inserted correctly. Status code = "
                                + str(r.status_code))
                    elif r.status_code > 300:
                        logger.error(
                            "Prospective provenance was not inserted correctly. Status code = "
                            + str(r.status_code))
                except ConnectionError as ex:
                    traceback.print_exc()
                    logger.error(
                        "[Prov][ConnectionError] There is a communication error between client and server -> "
                        + str(ex))
                    pass
                except Exception as ex:
                    logger.error(
                        "[Prov] Unexpected exception while adding prospective provenance: "
                        + type(ex).__name__)
                    pass
        except Exception as ex:
            logger.error("[Prov] Unexpected exception " + type(ex).__name__)
            traceback.print_exc()
            pass

    @staticmethod
    def _log_validation_message(response):
        error_obj = json.loads(response._content.decode('utf-8'))
        if len(error_obj['error']) > 0:
            for error_list in error_obj['error']:
                for error in error_list:
                    if error['code'][0] == 'W':
                        logger.warning('{} {}{}'.format(
                            error['type'], error['explanation'], '\n'))
                    else:
                        logger.error('{} {}{}'.format(error['type'],
                                                      error['explanation'],
                                                      '\n'))
Ejemplo n.º 16
0
    def run(self):
        settings = QSettings()
        pref_target_path = settings.value(Settings.SETTINGS_SAVE_PATH, Settings.DEFAULT_TARGET_PATH, type=str)
        pref_max_pool_cnt = settings.value(Settings.SETTINGS_MAX_POOL_CNT, Settings.DEFAULT_MAX_POOL, type=int)
        gallery_save_path = pref_target_path+'/'+self.gallery.path
        if not os.path.exists(gallery_save_path):
            os.makedirs(gallery_save_path)

        # Cloudflare Authorization
        self.state.emit('Authorize..')
        Logger.LOGGER.info("Wait for Cloudflare Authorization..")
        self.driver.get(URL_HIYOBI)
        while "Just a moment..." in self.driver.page_source:
            pass
        user_agent = self.driver.execute_script("return navigator.userAgent;")

        try:
            cookie_value = '__cfduid=' + self.driver.get_cookie('__cfduid')['value'] + \
                           '; cf_clearance=' + self.driver.get_cookie('cf_clearance')['value']
            headers = {'User-Agent': user_agent}
            cookies = {'session_id': cookie_value}
        except TypeError:
            Logger.LOGGER.warning("Not apply cookies to requests")
            headers = None
            cookies = None

        # Fetch image data from gallery page
        self.state.emit('Fetch..')
        Logger.LOGGER.info("Connect to Gallery page..")
        self.driver.get(self.gallery.url)
        sleep(1)
        soup = BeautifulSoup(self.driver.page_source, "html.parser")

        # Start download multi-thread
        Logger.LOGGER.info("Download Start..")
        img_urls = soup.find_all('div', class_="img-url")
        self.total_cnt = len(img_urls)
        session = FuturesSession(max_workers=pref_max_pool_cnt)
        if headers is not None:
            session.headers = headers
        if cookies is not None:
            session.cookies = cookies
        responses = {}
        for url_path in img_urls:
            url = READER_URL+url_path.text
            name = url.split('/')[-1]
            responses[name] = session.get(url)
        for filename in responses:
            self.response_to_file(response=responses[filename].result(), name=filename, path=gallery_save_path)
        session.close()

        # Compress Zip Files
        self.state.emit('Compressing..')
        if self.gallery.original != "":
            zip_path = pref_target_path+'/'+self.gallery.type+'/'+self.gallery.original+'/'+self.gallery.path+'.zip'
        else:
            zip_path = pref_target_path+'/'+self.gallery.type+'/'+self.gallery.path+'.zip'

        try:
            if not os.path.exists(zip_path[:zip_path.rfind('/')]):
                os.makedirs(zip_path[:zip_path.rfind('/')])
            FileUtil.make_zip(gallery_save_path, zip_path)
            shutil.rmtree(gallery_save_path)
        except:
            print(traceback.format_exc())
            Logger.LOGGER.error("Compressing Process Error... pass")
        # Save to Firebase
        # TODO Enable next line on Build
        FirebaseClient.fbclient.insert_data(self.gallery)
Ejemplo n.º 17
0
class THttpClient(TTransportBase):
    '''Http implementation of TTransport base.'''

    def __init__(self, uri_or_host, port=None, path=None, customThrift=False, request='httplib', http2=False, proxy_host=None, proxy_port=None, proxy_auth=None):
        '''THttpClient supports two different types constructor parameters.

        THttpClient(host, port, path) - deprecated
        THttpClient(uri)

        Only the second supports https.
        '''
        if port is not None:
            warnings.warn(
                'Please use the THttpClient("http://host:port/path") syntax',
                DeprecationWarning,
                stacklevel=2
            )
            self.host = uri_or_host
            self.port = port
            assert path
            self.path = path
            self.scheme = 'http'
        else:
            parsed = urllib.parse.urlparse(uri_or_host)
            self.scheme = parsed.scheme
            assert self.scheme in ('http', 'https')
            if self.scheme == 'http':
                self.port = parsed.port or http_client.HTTP_PORT
            elif self.scheme == 'https':
                self.port = parsed.port or http_client.HTTPS_PORT
            self.host = parsed.hostname
            self.path = parsed.path
            if parsed.query:
                self.path += '?%s' % parsed.query
        proxy = None
        self.request = request
        self.http2 = http2
        self.realhost = proxy_host
        self.realport = proxy_port
        self.proxy_auth = proxy_auth
        self.__wbuf = BytesIO()
        if self.scheme == 'https' and self.using_proxy() and self.proxy_auth:
            self.proxy_headers = {'Proxy-Authorization': self.proxy_auth}
        else:
            self.proxy_headers = None
        self.url = '%s://%s:%s%s' % (self.scheme, self.host, self.port, self.path)
        if customThrift:
            if self.request == 'hyper':
                if self.http2:
                    self.__http = hyper.HTTP20Connection(self.host, self.port, proxy_host=self.realhost, proxy_port=self.realport, proxy_headers=self.proxy_headers)
                else:
                    self.__http = hyper.HTTPConnection(self.host, self.port, proxy_host=self.realhost, proxy_port=self.realport, proxy_headers=self.proxy_headers)
            elif self.request == 'httpx':
                if self.http2:
                    self.__http = httpx.AsyncClient(base_url='%s://%s' % (self.scheme, self.host), http2=self.http2)
                else:
                    self.__http = httpx.Client(base_url='%s://%s' % (self.scheme, self.host))
            elif self.request == 'requests':
                self.__http = requests.Session()
                if self.using_proxy():
                    self.__http.proxies = urllib.request.getproxies()
            elif self.request == 'requests-futures':
                self.__http = FuturesSession()
                if self.using_proxy():
                    self.__http.proxies = urllib.request.getproxies()
            elif self.request == 'httplib2':
                self.__http = httplib2.Http()
            else:
                if self.scheme == 'http':
                    self.__http = http_client.HTTPConnection(self.host, self.port)
                elif self.scheme == 'https':
                    self.__http = http_client.HTTPSConnection(self.host, self.port)
                    if self.using_proxy():
                        self.__http.set_tunnel(self.realhost, self.realport, self.proxy_headers)
        else:
             self.__http = None
        self.__async_loop = asyncio.get_event_loop() if self.request == 'httpx' and self.http2 else None
        self.__http_response = None
        self.__response_data = None
        self.__last_read = 0
        self.__timeout = None
        self.__custom_headers = None
        self.__time = time.time()
        self.__custom_thrift = customThrift
        self.__loop = 0

    @staticmethod
    def basic_proxy_auth_header(proxy):
        if proxy is None or not proxy.username:
            return None
        ap = '%s:%s' % (urllib.parse.unquote(proxy.username),
                        urllib.parse.unquote(proxy.password))
        cr = base64.b64encode(ap).strip()
        return 'Basic ' + cr

    def using_proxy(self):
        return self.realhost is not None

    def open(self):
        if self.request == 'hyper':
            if self.http2:
                self.__http = hyper.HTTP20Connection(self.host, self.port, proxy_host=self.realhost, proxy_port=self.realport, proxy_headers=self.proxy_headers)
            else:
                self.__http = hyper.HTTPConnection(self.host, self.port, proxy_host=self.realhost, proxy_port=self.realport, proxy_headers=self.proxy_headers)
        elif self.request == 'httpx':
            if self.http2:
                self.__http = httpx.AsyncClient(base_url='%s://%s' % (self.scheme, self.host), http2=self.http2)
            else:
                self.__http = httpx.Client(base_url='%s://%s' % (self.scheme, self.host))
        elif self.request == 'requests':
            self.__http = requests.Session()
            if self.using_proxy():
                self.__http.proxies = urllib.request.getproxies()
        elif self.request == 'requests-futures':
            self.__http = FuturesSession()
            if self.using_proxy():
                self.__http.proxies = urllib.request.getproxies()
        elif self.request == 'httplib2':
            self.__http = httplib2.Http()
        else:
            if self.scheme == 'http':
                self.__http = http_client.HTTPConnection(self.host, self.port)
            elif self.scheme == 'https':
                self.__http = http_client.HTTPSConnection(self.host, self.port)
                if self.using_proxy():
                    self.__http.set_tunnel(self.realhost, self.realport, self.proxy_headers)

    def close(self):
        if self.request != 'httpx':
            self.__http.close()
        self.__http = None
        self.reset()

    def reset(self):
        self.__http_response = None
        self.__response_data = None
        self.__last_read = 0

    def getHeaders(self):
        return self.headers

    def isOpen(self):
        return self.__http is not None

    def setTimeout(self, ms):
        if not hasattr(socket, 'getdefaulttimeout'):
            raise NotImplementedError

        if ms is None:
            self.__timeout = None
        else:
            self.__timeout = ms / 1000.0

    def setCustomHeaders(self, headers):
        self.__custom_headers = headers

    def read(self, sz):
        if self.request in ['httpx', 'httplib2', 'requests', 'requests-futures']:
            max_sz = self.__last_read + sz
            min_sz = self.__last_read
            self.__last_read = max_sz
            content = self.__response_data[min_sz:max_sz]
        else:
            content = self.__http_response.read(sz)
        return content

    def write(self, buf):
        self.__wbuf.write(buf)

    def __withTimeout(f):
        def _f(*args, **kwargs):
            orig_timeout = socket.getdefaulttimeout()
            socket.setdefaulttimeout(args[0].__timeout)
            try:
                result = f(*args, **kwargs)
            finally:
                socket.setdefaulttimeout(orig_timeout)
            return result
        return _f

    async def httpx_flush(self, data, headers):
        # Building httpx request
        request = self.__http.build_request('POST', self.path, data=data, headers=headers)

        # Sending httpx request
        self.__http_response = await self.__http.send(request)
        self.code = self.__http_response.status_code
        self.message = self.__http_response.reason_phrase
        self.headers = self.__http_response.headers
        self.__response_data = self.__http_response.read()
        self.__last_read = 0

    def flush(self):
        if self.request == 'httplib': # Only when using httplib request
            if self.__custom_thrift:
                if self.__loop <= 2:
                    if self.isOpen(): self.close()
                    self.open(); self.__loop += 1
                elif time.time() - self.__time > 90:
                    self.close(); self.open(); self.__time = time.time()
            else:
                if self.isOpen():
                    self.close()
                self.open()
        else:
            self.reset()

        # Pull data out of buffer
        data = self.__wbuf.getvalue()
        self.__wbuf = BytesIO()

        if not self.__custom_headers or 'User-Agent' not in self.__custom_headers:
            user_agent = 'Python/THttpClient'
            script = os.path.basename(sys.argv[0])
            if script:
                user_agent = '%s (%s)' % (user_agent, urllib.parse.quote(script))
        else:
            user_agent = None
        if self.request == 'hyper':
            headers = {'Content-Type': 'application/x-thrift', 'Content-Length': str(len(data)), 'User-Agent': user_agent}
            if self.__custom_headers:
                headers.update(self.__custom_headers)

            # Sending request with payload
            request = self.__http.request('POST', self.path, data, headers)

            # Get reply to flush the request
            self.__http_response = self.__http.get_response(request)
            self.code = self.__http_response.status
            self.message = self.__http_response.reason
            self.headers = self.__http_response.headers
        elif self.request == 'httpx':
            headers = {'Content-Type': 'application/x-thrift', 'Content-Length': str(len(data)), 'User-Agent': user_agent}
            if self.__custom_headers:
                headers.update(self.__custom_headers)

            if self.http2:
                self.__async_loop.run_until_complete(self.httpx_flush(data, headers))
            else:
                # Building httpx request
                request = self.__http.build_request('POST', self.path, data=data, headers=headers)

                # Sending httpx request
                self.__http_response = self.__http.send(request)
                self.__response_data = self.__http_response.read()
                self.__last_read = 0
                self.code = self.__http_response.status_code
                self.message = self.__http_response.reason_phrase
                self.headers = self.__http_response.headers
        elif self.request == 'httplib2':
            headers = {'Content-Type': 'application/x-thrift', 'Content-Length': str(len(data)), 'User-Agent': user_agent}
            if self.__custom_headers:
                headers.update(self.__custom_headers)

            # Sending and get reply to request
            self.__http_response, self.__response_data = self.__http.request(self.url, 'POST', headers=headers, body=data)
            self.__last_read = 0
            self.code = self.__http_response.status
            self.message = self.__http_response.reason
            self.headers = self.__http_response
        elif self.request == 'requests':
            headers = {'Content-Type': 'application/x-thrift', 'Content-Length': str(len(data)), 'User-Agent': user_agent}
            if self.__custom_headers:
                headers.update(self.__custom_headers)

            # Sending and get reply to request
            self.__http_response = self.__http.request('POST', self.url, data=data, headers=headers)
            self.__response_data = self.__http_response.content
            self.__last_read = 0
            self.code = self.__http_response.status_code
            self.message = self.__http_response.reason
            self.headers = self.__http_response.headers
        elif self.request == 'requests-futures':
            headers = {'Content-Type': 'application/x-thrift', 'Content-Length': str(len(data)), 'User-Agent': user_agent}
            if self.__custom_headers:
                headers.update(self.__custom_headers)

            # Sending request with payload
            request = self.__http.request('POST', self.url, data=data, headers=headers)

            # Get reply to flush the request
            self.__http_response = request.result()
            self.__response_data = self.__http_response.content
            self.__last_read = 0
            self.code = self.__http_response.status_code
            self.message = self.__http_response.reason
            self.headers = self.__http_response.headers
        else:
            # HTTP request
            if self.using_proxy() and self.scheme == 'http':
                # need full URL of real host for HTTP proxy here (HTTPS uses CONNECT tunnel)
                self.__http.putrequest('POST', 'http://%s:%s%s' %
                                    (self.realhost, self.realport, self.path))
            else:
                self.__http.putrequest('POST', self.path)

            # Write headers
            self.__http.putheader('Content-Type', 'application/x-thrift')
            self.__http.putheader('Content-Length', str(len(data)))
            if not self.__custom_headers or 'User-Agent' not in self.__custom_headers:
                self.__http.putheader('User-Agent', user_agent)

            if self.__custom_headers:
                for key, val in six.iteritems(self.__custom_headers):
                    self.__http.putheader(key, val)

            self.__http.endheaders()

            # Write payload
            self.__http.send(data)

            # Get reply to flush the request
            self.__http_response = self.__http.getresponse()
            self.code = self.__http_response.status
            self.message = self.__http_response.reason
            self.headers = self.__http_response.msg

    # Decorate if we know how to timeout
    if hasattr(socket, 'getdefaulttimeout'):
        flush = __withTimeout(flush)
class DatadogHTTPClient(object):
    """
    Client that sends a batch of logs over HTTP.
    """

    _POST = "POST"
    if DD_USE_COMPRESSION:
        _HEADERS = {
            "Content-type": "application/json",
            "Content-Encoding": "gzip"
        }
    else:
        _HEADERS = {"Content-type": "application/json"}

    def __init__(self,
                 host,
                 port,
                 no_ssl,
                 skip_ssl_validation,
                 api_key,
                 scrubber,
                 timeout=10):
        protocol = "http" if no_ssl else "https"
        self._url = "{}://{}:{}/v1/input/{}".format(protocol, host, port,
                                                    api_key)
        self._scrubber = scrubber
        self._timeout = timeout
        self._session = None
        self._ssl_validation = not skip_ssl_validation
        self._futures = []
        if logger.isEnabledFor(logging.DEBUG):
            logger.debug(
                f"Initialized http client for logs intake: "
                f"<host: {host}, port: {port}, url: {self._url}, no_ssl: {no_ssl}, "
                f"skip_ssl_validation: {skip_ssl_validation}, timeout: {timeout}>"
            )

    def _connect(self):
        self._session = FuturesSession(max_workers=DD_MAX_WORKERS)
        self._session.headers.update(self._HEADERS)

    def _close(self):
        # Resolve all the futures and log exceptions if any
        for future in as_completed(self._futures):
            try:
                future.result()
            except Exception:
                logger.exception("Exception while forwarding logs")

        self._session.close()

    def send(self, logs):
        """
        Sends a batch of log, only retry on server and network errors.
        """
        try:
            data = self._scrubber.scrub("[{}]".format(",".join(logs)))
        except ScrubbingException:
            raise Exception("could not scrub the payload")
        if DD_USE_COMPRESSION:
            data = compress_logs(data, DD_COMPRESSION_LEVEL)

        # FuturesSession returns immediately with a future object
        future = self._session.post(self._url,
                                    data,
                                    timeout=self._timeout,
                                    verify=self._ssl_validation)
        self._futures.append(future)

    def __enter__(self):
        self._connect()
        return self

    def __exit__(self, ex_type, ex_value, traceback):
        self._close()
Ejemplo n.º 19
0
        small_frame = cv2.resize(frame, (256,256), 0, 0, cv2.INTER_CUBIC) 
        _, png = cv2.imencode(".jpg", small_frame)
        saved_data = base64.b64encode(png).decode('ascii') 
        payload = json.dumps({"image": saved_data}) 
        future = session.post(url, data=payload, timeout=5, hooks = {
            "response": response_hook
        })
        waiting = True 

    if waiting: 
        if future.done(): 
            try: 
                response = future.result() 
            except: 
                session.close() 
                waiting = False
                print("Stopped again")
                continue
                # clean up and re-try 
            waiting = False 
            try: 
                res = response.data['images'][0]['faces'][0]['attributes']
                res.pop('age') 
                res.pop('glasses')
                res.pop('gender')
                res.pop('lips')
                order = list(sorted(res.items(), key=lambda x: x[1], reverse=True))
                if order[0][0] == 'white': 
                    if random.random() < .9: 
                        slap_enable = True