Exemplo n.º 1
0
    def update(self, obj, callback=None):
        """
        Updates the value for a key atomically. Typical usage would be:

        c = etcd.Client()
        o = c.read("/somekey")
        o.value += 1
        c.update(o)

        Args:
            obj (etcd.EtcdResult):  The object that needs updating.

        """

        assert isinstance(obj, EtcdResult), "obj not a EtcdResult."

        _log.debug("Updating %s to %s.", obj.key, obj.value)
        kwdargs = {
            'dir': obj.dir,
            'ttl': obj.ttl,
            'prevExist': True
        }

        if not obj.dir:
            # prevIndex on a dir causes a 'not a file' error. d'oh!
            kwdargs['prevIndex'] = obj.modifiedIndex
        return self.write(obj.key, obj.value, callback=callback, **kwdargs)
Exemplo n.º 2
0
 def init_mysql_pool(self, jdbc_url, max_idle_conn=1, max_open_conn=10, max_recycle_sec=60):
     """
     :param jdbc_url: mysql://root:xxx@xiaoqiang-zdm:3306/mysql
     :return:
     """
     if not jdbc_url:
         return
 
     gen_log.debug("jdbc_url: %s", jdbc_url)
     conf = urlparse.urlparse(jdbc_url)
     gen_log.debug("hostname: %s, db: %s, user: %s, passwd: %s, port: %s",
                   conf.hostname, conf.path, conf.username, conf.password, conf.port)
 
     db = ''
     if len(conf.path) > 1:
         db = conf.path[1:]
 
     return tornado_mysql.pools.Pool(
         dict(
             host=conf.hostname, port=conf.port, db=db,
             user=conf.username, passwd=conf.password,
             charset='utf8',
             cursorclass=tornado_mysql.cursors.DictCursor
         ),
         max_idle_connections=max_idle_conn,
         max_open_connections=max_open_conn,
         max_recycle_sec=max_recycle_sec
     )
Exemplo n.º 3
0
    def watch(self, key, index=None, timeout=None, recursive=None, callback=None):
        # todo
        """
        Blocks until a new event has been received, starting at index 'index'

        Args:
            key (str):  Key.

            index (int): Index to start from.

            timeout (int):  max seconds to wait for a read.

        Returns:
            client.EtcdResult

        Raises:
            KeyValue:  If the key doesn't exists.

            urllib3.exceptions.TimeoutError: If timeout is reached.

        >>> print client.watch('/key').value
        'value'

        """
        _log.debug("About to wait on key %s, index %s", key, index)
        if index:
            return self.read(key, wait=True, waitIndex=index, timeout=timeout,
                             recursive=recursive, callback=callback)
        else:
            return self.read(key, wait=True, timeout=timeout,
                             recursive=recursive, callback=callback)
Exemplo n.º 4
0
 def remove_handler(self, fd):
     self._handlers.pop(fd, None)
     self._events.pop(fd, None)
     try:
         self._impl.unregister(fd)
     except Exception:
         gen_log.debug("Error deleting fd from IOLoop", exc_info=True)
Exemplo n.º 5
0
 def _callback(fut):
     exc = fut.exc_info()
     if exc:
         if not isinstance(exc[1], etcdexcept.EtcdException):
             # We can't get the list of machines, if one server is in the
             # machines cache, try on it
             _log.error("Failed to get list of machines from %s%s: %r and retry it.",
                        uri, self.version_prefix, exc)
             if self._machines_cache:
                 self._base_url = self._machines_cache.pop(0)
                 _log.debug("Retrying on %s", self._base_url)
                 # Call myself
                 self.ioloop.add_future(self.search_machine(), _callback)
                 return
             else:
                 raise etcdexcept.EtcdException("Could not get the list of servers, "
                                                "maybe you provided the wrong "
                                                "host(s) to connect to?")
     else:
         response = fut.result()
         machines = [
             node.strip() for node in
             self._handle_server_response(response).body.decode('utf-8').split(',')
             ]
         _log.debug("Retrieved list of machines: %s", machines)
         self._machines_cache = machines
         if self._base_url not in self._machines_cache:
             self._base_url = self._choice_machine()
     callback(fut.result())
Exemplo n.º 6
0
    def close(self, all_fds=False):
        """Closes the IOLoop, freeing any resources used.

        If ``all_fds`` is true, all file descriptors registered on the
        IOLoop will be closed (not just the ones created by the IOLoop itself).

        Many applications will only use a single IOLoop that runs for the
        entire lifetime of the process.  In that case closing the IOLoop
        is not necessary since everything will be cleaned up when the
        process exits.  `IOLoop.close` is provided mainly for scenarios
        such as unit tests, which create and destroy a large number of
        IOLoops.

        An IOLoop must be completely stopped before it can be closed.  This
        means that `IOLoop.stop()` must be called *and* `IOLoop.start()` must
        be allowed to return before attempting to call `IOLoop.close()`.
        Therefore the call to `close` will usually appear just after
        the call to `start` rather than near the call to `stop`.
        """
        self.remove_handler(self._waker.fileno())
        if all_fds:
            for fd in self._handlers.keys()[:]:
                try:
                    os.close(fd)
                except Exception:
                    gen_log.debug("error closing fd %s", fd, exc_info=True)
        self._waker.close()
        self._impl.close()
Exemplo n.º 7
0
 def fetch_impl(self, request, callback):
     self.queue.append((request, callback))
     self._process_queue()
     if self.queue:
         gen_log.debug("max_clients limit reached, request queued. "
                       "%d active, %d queued requests." % (
                           len(self.active), len(self.queue)))
Exemplo n.º 8
0
 def _on_content_headers(self, data, buf=b""):
     self._content_length_left -= len(data)
     data = self._boundary_buffer + data
     gen_log.debug("file header is %r", data)
     self._boundary_buffer = buf
     header_data = data[self._boundary_len + 2 :].decode("utf-8")
     headers = tornado.httputil.HTTPHeaders.parse(header_data)
     disp_header = headers.get("Content-Disposition", "")
     disposition, disp_params = tornado.httputil._parse_header(disp_header)
     if disposition != "form-data":
         gen_log.warning("Invalid multipart/form-data")
         self._read_content_body(None)
     if not disp_params.get("name"):
         gen_log.warning("multipart/form-data value missing name")
         self._read_content_body(None)
     name = disp_params["name"]
     if disp_params.get("filename"):
         ctype = headers.get("Content-Type", "application/unknown")
         fd, tmp_filename = tempfile.mkstemp(suffix=".tmp", prefix="tornado")
         self._request.files.setdefault(name, []).append(
             tornado.httputil.HTTPFile(
                 filename=disp_params["filename"], tmp_filename=tmp_filename, content_type=ctype
             )
         )
         self._read_content_body(os.fdopen(fd, "wb"))
     else:
         gen_log.warning("multipart/form-data is not file upload, skipping...")
         self._read_content_body(None)
Exemplo n.º 9
0
    def process_message(self, client, msg):
        try:
            gen_log.debug("Received from WS: %s" % msg)
            msg = json.loads(msg)
        except ValueError:
            gen_log.warning("Received message not in json from client %s: %s"
                            % (client.remote_address, msg))
            return

        try:
            service_name = msg['service']
        except KeyError:
            gen_log.warning("Malformed message from client %s: %s"
                           % (client.remote_address, msg))
            return

        try:
            service = self.services[service_name]
        except KeyError:
            gen_log.warning(
                'Message for non existing service "%s" from client %s'
                % (service_name, client.remote_address))
            return

        service.process_message(client, msg)
Exemplo n.º 10
0
    def init_mysql_pool(self, jdbc_url, max_connections=10, idle_seconds=60, wait_connection_timeout=3):
        """
        :param jdbc_url: mysql://root:xxx@xiaoqiang-zdm:3306/mysql
        :return:
        """
        if not jdbc_url:
            return
    
        gen_log.debug("jdbc_url: %s", jdbc_url)
        conf = urlparse.urlparse(jdbc_url)
        gen_log.debug("hostname: %s, db: %s, user: %s, passwd: %s, port: %s",
                      conf.hostname, conf.path, conf.username, conf.password, conf.port)
    
        db = ''
        if len(conf.path) > 1:
            db = conf.path[1:]

        return tormysql.ConnectionPool(
            max_connections=int(max_connections),  # max open connections
            idle_seconds=int(idle_seconds),  # conntion idle timeout time, 0 is not timeout
            wait_connection_timeout=int(wait_connection_timeout),  # wait connection timeout
            host=conf.hostname,
            user=conf.username,
            passwd=conf.password,
            db=db,
            charset="utf8"
        )
Exemplo n.º 11
0
 def resume_reading(self):
     """
     Alias for adding the read interest to the event handler.
     """
     if _SHOULD_LOG_DEBUG_OUTPUT:
         gen_log.debug('Resuming recv events for stream(fd:{0})'.format(
             self.fd))
     self._add_event_interest(self._io_loop.READ)
Exemplo n.º 12
0
 def enable_errors(self):
     """
     Alias for adding the error interest from the event handler.
     """
     if _SHOULD_LOG_DEBUG_OUTPUT:
         gen_log.debug('Resuming error events for stream(fd:{})'.format(
             self.fileno))
     self._add_event_interest(ERROR)
Exemplo n.º 13
0
 def enable_writes(self):
     """
     Alias for adding the send interest to the event handler.
     """
     if _SHOULD_LOG_DEBUG_OUTPUT:
         gen_log.debug('Resuming write events for stream(fd:{})'.format(
             self.fileno))
     self._add_event_interest(WRITE)
Exemplo n.º 14
0
 def _handle_challenge(self, challenge):
     try:
         challenge_response = self.challenge_response(challenge)
     except ValueError:
         gen_log.debug("Malformed key data in WebSocket request")
         self._abort()
         return
     self._write_response(challenge_response)
Exemplo n.º 15
0
 def enable_reads(self):
     """
     Alias for adding the read interest to the event handler.
     """
     if _SHOULD_LOG_DEBUG_OUTPUT:
         gen_log.debug('Resuming read events for stream(fd:{0})'.format(
             self.fileno))
     self._add_event_interest(READ)
Exemplo n.º 16
0
 def accept_connection(self):
     try:
         self._handle_websocket_headers()
         self._accept_connection()
     except ValueError:
         gen_log.debug("Malformed WebSocket request received", exc_info=True)
         self._abort()
         return
Exemplo n.º 17
0
 def disable_reading(self):
     """
     Alias for removing the read interest from the event handler.
     """
     if _SHOULD_LOG_DEBUG_OUTPUT:
         gen_log.debug('Halting read events for stream(fd:{})'.format(
             self.fd))
     self._drop_event_interest(self._io_loop.READ)
Exemplo n.º 18
0
 def remove_handler(self, fd):
     """Stop listening for events on fd."""
     self._handlers.pop(fd, None)
     self._events.pop(fd, None)
     try:
         self._impl.unregister(fd)
     except (OSError, IOError):
         gen_log.debug("Error deleting fd from IOLoop", exc_info=True)
Exemplo n.º 19
0
 def disable_errors(self):
     """
     Alias for removing the error interest from the event handler.
     """
     if _SHOULD_LOG_DEBUG_OUTPUT:
         gen_log.debug('Halting error events for stream(fd:{})'.format(
             self.fileno))
     self._drop_event_interest(ERROR)
Exemplo n.º 20
0
 def disable_writes(self):
     """
     Alias for removing the send interest from the event handler.
     """
     if _SHOULD_LOG_DEBUG_OUTPUT:
         gen_log.debug('Halting write events for stream(fd:{})'.format(
             self.fileno))
     self._drop_event_interest(WRITE)
Exemplo n.º 21
0
 def resume_writing(self):
     """
     Alias for adding the send interest to the event handler.
     """
     if _SHOULD_LOG_DEBUG_OUTPUT:
         gen_log.debug('Resuming send events for stream(fd:{})'.format(
             self.fd))
     self._add_event_interest(self._io_loop.WRITE)
Exemplo n.º 22
0
    def on_finish(self):
        _id = self.get_query_argument('id')

        try:
            del self.application.storage[_id]
        except KeyError:
            pass

        gen_log.debug('#{_id}: cleanup.'.format(_id=_id))
Exemplo n.º 23
0
 def _execute(self, sql, args=None):
     pool = choice(SLAVE_TOR_MYSQL_POOL)
     gen_log.debug("TorMysqlClient _execute sql: %s, args: %s, pool: %s", sql, str(args), pool)
     with (yield pool.Connection()) as conn:
         with conn.cursor() as cursor:
             res = yield cursor.execute(sql, args)
             yield conn.commit()
             gen_log.debug("TorMysqlClient _execute res: %s", res)
             raise gen.Return(cursor)
Exemplo n.º 24
0
 def close(self, all_fds=False):
     self.remove_handler(self._waker.fileno())
     if all_fds:
         for fd in self._handlers.keys()[:]:
             try:
                 os.close(fd)
             except Exception:
                 gen_log.debug("error closing fd %s", fd, exc_info=True)
     self._waker.close()
     self._impl.close()
Exemplo n.º 25
0
 def api_request(self, *path, **kwargs):
     """Make an API request"""
     url = url_path_join(self.url, 'api', *path)
     kwargs.setdefault('method', 'GET')
     kwargs['url'] = url
     gen_log.debug("%s %s", kwargs['method'], url)
     r = requests.request(**kwargs)
     r.raise_for_status()
     if r.text != '':
         return r.json()
Exemplo n.º 26
0
 def get_access_token(self):
     token = self.get_argument("access_token", "")
     if not token:
         try:
             token_str = self.request.headers.get("Authorization")
             token = token_str.replace("token ", "")
         except:
             token = ""
     gen_log.debug('token: "{}"'.format(token))
     return token
Exemplo n.º 27
0
 def fetchall(self, sql, args=None):
     """
     desc: 获取所有的数据
     :param sql:
     :param args:
     :return:
     """
     with (yield self._execute(sql, args)) as cursor:
         msg = cursor.fetchall()
         gen_log.debug("TorMysqlClient fetchall msg: %s", msg)
         raise gen.Return(msg)
Exemplo n.º 28
0
 def remove_handler(self, fd):
     """
     移除主循环中已存在的socket
     """
     fd, obj = self.split_fd(fd)
     self._handlers.pop(fd, None)
     self._events.pop(fd, None)
     try:
         self._impl.unregister(fd)
     except Exception:
         gen_log.debug("Error deleting fd from IOLoop", exc_info=True)
Exemplo n.º 29
0
 def execute(self, sql, args=None):
     """
     desc: 插入/更新/删除操作
     :param sql:
     :param args:
     :return:
     """
     pool = choice(MASTER_TORNADO_MYSQL_POOL)
     res = yield pool.execute(sql, args)
     gen_log.debug("TornadoMysqlClient insert res: %s", res)
     raise gen.Return(res)
Exemplo n.º 30
0
    def _close(self):
        """Close this stream."""
        if self._socket is not None:
            gen_log.debug('Closing stream(fd: {})'.format(self.handle.fd))

            self.handle.remove_handler()

            self._socket.close()
            self._socket = None

            if self._close_cb:
                self._run_callback(self._close_cb)
Exemplo n.º 31
0
 def close(self, all_fds=False):
     with self._callback_lock:
         self._closing = True
     self.remove_handler(self._waker.fileno())
     if all_fds:
         for fd in self._handlers.keys():
             try:
                 close_method = getattr(fd, 'close', None)
                 if close_method is not None:
                     close_method()
                 else:
                     os.close(fd)
             except Exception:
                 gen_log.debug("error closing fd %s", fd, exc_info=True)
     self._waker.close()
     self._impl.close()
Exemplo n.º 32
0
 def fetch_impl(self, request, callback):
     key = object()
     self.queue.append((key, request, callback))
     if not len(self.active) < self.max_clients:
         timeout_handle = self.io_loop.add_timeout(
             self.io_loop.time() +
             min(request.connect_timeout, request.request_timeout),
             functools.partial(self._on_timeout, key, "in request queue"))
     else:
         timeout_handle = None
     self.waiting[key] = (request, callback, timeout_handle)
     self._process_queue()
     if self.queue:
         gen_log.debug("max_clients limit reached, request queued. "
                       "%d active, %d queued requests." %
                       (len(self.active), len(self.queue)))
Exemplo n.º 33
0
    def _on_headers(self, data):
      try:
        data = data.decode('latin1')
        eol = data.find("\r\n")
        start_line = data[:eol]
        try:
          method, uri, version = start_line.split(" ")
        except ValueError:
          raise tornado.httpserver._BadRequestException("Malformed HTTP request line")
        if not version.startswith("HTTP/"):
          raise tornado.httpserver._BadRequestException("Malformed HTTP version in HTTP Request-Line")
        headers = tornado.httputil.HTTPHeaders.parse(data[eol:])

        # HTTPRequest wants an IP, not a full socket address
        if self.address_family in (socket.AF_INET, socket.AF_INET6):
          remote_ip = self.address[0]
        else:
          # Unix (or other) socket; fake the remote address
          remote_ip = '0.0.0.0'

        self._request = tornado.httpserver.HTTPRequest(
          connection=self, method=method, uri=uri, version=version,
          headers=headers, remote_ip=remote_ip, protocol=self.protocol)

        content_length = headers.get("Content-Length")
        if content_length:
          content_length = int(content_length)
          use_tmp_files = self._get_handler_info()
          if not use_tmp_files and content_length > self.stream.max_buffer_size:
            raise tornado.httpserver._BadRequestException("Content-Length too long")
          if headers.get("Expect") == "100-continue":
            self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")
          if use_tmp_files:
            gen_log.debug('using temporary files for uploading')
            self._receive_content(content_length)
          else:
            gen_log.debug('using memory for uploading')
            self.stream.read_bytes(content_length, self._on_request_body)
          return

        self.request_callback(self._request)
      except tornado.httpserver._BadRequestException as e:
        gen_log.info("Malformed HTTP request from %s: %s",
              self.address[0], e)
        self.close()
        return
Exemplo n.º 34
0
    def _handle_server_response(self, response):
        """ Handles the server response """
        if response.code in [200, 201]:
            return response

        else:
            _log.debug("Response %s", response.body)
            resp = response.body.decode('utf-8')

            # throw the appropriate exception
            try:
                r = json.loads(resp)
                r['status'] = response.code
            except (TypeError, ValueError):
                # Bad JSON, make a response locally.
                r = {"message": "Bad response", "cause": str(resp)}
            etcdexcept.EtcdError.handle(r)
Exemplo n.º 35
0
            def _callback(fut):
                exc = fut.exc_info()
                if exc:
                    exc_obj = exc[1]
                    if isinstance(exc_obj, HTTPError) and exc_obj.code < 500:
                        result = self._handle_server_response(exc_obj.response)
                        new_future.set_result(result)
                        return
                    elif (isinstance(exc_obj, HTTPError)
                          and exc_obj.code > 500) or isinstance(
                              exc_obj, socket.error):
                        _log.error("Request to server %s failed: %r",
                                   self._base_url, exc_obj)
                        if isinstance(params,
                                      dict) and params.get("wait") == "true":
                            _log.debug("Watch timed out.")
                            raise etcdexcept.EtcdWatchTimedOut(
                                "Watch timed out: %r" % exc_obj, cause=exc_obj)
                        if self._allow_reconnect and fut._retry:
                            _log.info(
                                "Reconnection allowed, looking for another "
                                "server.")
                            self._base_url = fut._retry.pop(0)
                            fut_reconnect = func(self,
                                                 path,
                                                 method,
                                                 params=params,
                                                 timeout=timeout)
                            fut_reconnect._retry = fut._retry
                            self.ioloop.add_future(fut_reconnect, _callback)
                            return

                        else:
                            _log.debug("Reconnection disabled, giving up.")
                            raise etcdexcept.EtcdConnectionFailed(
                                "Connection to etcd failed due to %r" %
                                exc_obj,
                                cause=exc_obj)

                    else:
                        _log.exception("Unexpected request failure.")

                result = fut.result()
                self._check_cluster_id(result)
                result = self._handle_server_response(fut.result())
                new_future.set_result(result)
Exemplo n.º 36
0
    def get(self):
        """
        Return file upload progress
        """
        _id = self.get_query_argument('id')

        gen_log.debug('#{_id}: progress request.'.format(_id=_id))

        try:
            progress = self.application.storage[_id]

            self.write({
                'id': _id,
                'progress': progress,
            })
        except KeyError:
            self.send_error(404)
Exemplo n.º 37
0
 async def init_container_info(self) -> bool:
     """
     必须在执行容器客户端之前显示调用
     初始化容器信息
     """
     if self.container_id is None:
         self.container_is = False
         return True
     try:
         self.container_client = await self.docker_client.containers.get(
             self.container_id)
         return True
     except Exception as e:
         # 如果提供的id无法查询到容器则直接拒绝访问
         gen_log.debug("The container could not be found")
         await self.send_message(code="400")
         return False
Exemplo n.º 38
0
    def accept_connection(self):
        try:
            self._handle_websocket_headers()
        except ValueError:
            self.handler.set_status(400)
            log_msg = "Missing/Invalid WebSocket headers"
            self.handler.finish(log_msg)
            gen_log.debug(log_msg)
            return

        try:
            self._accept_connection()
        except ValueError:
            gen_log.debug("Malformed WebSocket request received",
                          exc_info=True)
            self._abort()
            return
Exemplo n.º 39
0
def load_gettext_translations(directory: str, domain: str) -> None:
    """Loads translations from `gettext`'s locale tree

    Locale tree is similar to system's ``/usr/share/locale``, like::

        {directory}/{lang}/LC_MESSAGES/{domain}.mo

    Three steps are required to have your app translated:

    1. Generate POT translation file::

        xgettext --language=Python --keyword=_:1,2 -d mydomain file1.py file2.html etc

    2. Merge against existing POT file::

        msgmerge old.po mydomain.po > new.po

    3. Compile::

        msgfmt mydomain.po -o {directory}/pt_BR/LC_MESSAGES/mydomain.mo
    """
    import gettext

    global _translations
    global _supported_locales
    global _use_gettext
    _translations = {}
    for lang in os.listdir(directory):
        if lang.startswith("."):
            continue  # skip .svn, etc
        if os.path.isfile(os.path.join(directory, lang)):
            continue
        try:
            os.stat(
                os.path.join(directory, lang, "LC_MESSAGES", domain + ".mo"))
            _translations[lang] = gettext.translation(domain,
                                                      directory,
                                                      languages=[lang])
        except Exception as e:
            gen_log.error("Cannot load translation for '%s': %s", lang, str(e))
            continue
    _supported_locales = frozenset(
        list(_translations.keys()) + [_default_locale])
    _use_gettext = True
    gen_log.debug("Supported locales: %s", sorted(_supported_locales))
Exemplo n.º 40
0
def load_gettext_translations(directory, domain):
    """从 `gettext` 的区域树加载翻译

    区域树和系统的 ``/usr/share/locale`` 很类似, 例如::

        {directory}/{lang}/LC_MESSAGES/{domain}.mo

    让你的应用程序翻译有三步是必须的:

    1. 生成POT翻译文件::

        xgettext --language=Python --keyword=_:1,2 -d mydomain file1.py file2.html etc

    2. 合并现有的POT文件::

        msgmerge old.po mydomain.po > new.po

    3. 编译::

        msgfmt mydomain.po -o {directory}/pt_BR/LC_MESSAGES/mydomain.mo
    """
    import gettext
    global _translations
    global _supported_locales
    global _use_gettext
    _translations = {}
    for lang in os.listdir(directory):
        if lang.startswith('.'):
            continue  # skip .svn, etc
        if os.path.isfile(os.path.join(directory, lang)):
            continue
        try:
            os.stat(
                os.path.join(directory, lang, "LC_MESSAGES", domain + ".mo"))
            _translations[lang] = gettext.translation(domain,
                                                      directory,
                                                      languages=[lang])
        except Exception as e:
            gen_log.error("Cannot load translation for '%s': %s", lang, str(e))
            continue
    _supported_locales = frozenset(
        list(_translations.keys()) + [_default_locale])
    _use_gettext = True
    gen_log.debug("Supported locales: %s", sorted(_supported_locales))
Exemplo n.º 41
0
    def send(self,
             request,
             stream=False,
             timeout=None,
             verify=True,
             cert=None,
             proxies=None):
        """Sends Request object. Returns Response object.

        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple
        :param verify: (optional) Whether to verify SSL certificates.
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: trip.adapters.MessageDelegate
        """
        future = Future()

        def callback(response):
            if isinstance(response, Exception):
                future.set_exception(response)
            else:
                future.set_result(response)

        key = object()
        request = (request, stream, timeout, verify, cert, proxies)
        self.queue.append((key, request, callback))
        if not len(self.active) < self.max_clients:
            timeout_handle = self.io_loop.add_timeout(
                self.io_loop.time() + min(parse_timeout(timeout)),
                functools.partial(self._on_timeout, key, 'in request queue'))
        else:
            timeout_handle = None
        self.waiting[key] = (request, callback, timeout_handle)
        self._process_queue()
        if self.queue:
            gen_log.debug('max_clients limit reached, request queued. '
                          '%d active, %d queued requests.' %
                          (len(self.active), len(self.queue)))
        return future
Exemplo n.º 42
0
    def fetch_impl(self, request, callback):
        key = object()
        self.queue.append((key, request, callback))  # 给队列里面添加一个相关的回调

        if not len(self.active) < self.max_clients:  # 如果活跃的太多
            # 如果活跃的太多,保存一部分稍后处理
            timeout_handle = self.io_loop.add_timeout(
                self.io_loop.time() +
                min(request.connect_timeout, request.request_timeout),
                functools.partial(self._on_timeout, key))
        else:  # 如果活跃的太少
            timeout_handle = None
        # 添加一个到waiting
        self.waiting[key] = (request, callback, timeout_handle)
        self._process_queue()
        if self.queue:  # 如果还有没有处理的,这里就是显示太多了
            gen_log.debug("max_clients limit reached, request queued. "
                          "%d active, %d queued requests." %
                          (len(self.active), len(self.queue)))
Exemplo n.º 43
0
    def watch(self,
              key,
              index=None,
              timeout=None,
              recursive=None,
              callback=None):
        # todo
        """
        Blocks until a new event has been received, starting at index 'index'

        Args:
            key (str):  Key.

            index (int): Index to start from.

            timeout (int):  max seconds to wait for a read.

        Returns:
            client.EtcdResult

        Raises:
            KeyValue:  If the key doesn't exists.

            urllib3.exceptions.TimeoutError: If timeout is reached.

        >>> print client.watch('/key').value
        'value'

        """
        _log.debug("About to wait on key %s, index %s", key, index)
        if index:
            return self.read(key,
                             wait=True,
                             waitIndex=index,
                             timeout=timeout,
                             recursive=recursive,
                             callback=callback)
        else:
            return self.read(key,
                             wait=True,
                             timeout=timeout,
                             recursive=recursive,
                             callback=callback)
Exemplo n.º 44
0
 def _insert(cls, entity: object) -> int:
     '''插入封装
     :param cls:
     :param entity: :class:`BaseModel`
     '''
     if not SESSION:
         gen_log.error('session is null')
         return ResultCode(0, '无法获取session对象,数据库可能连接错误')
     session = SESSION()
     try:
         session.add(entity)
         session.commit()
         session.close()
         gen_log.debug('add {entity} successed'.format(entity=entity))
         return ResultCode(1, '插入成功')
     except Exception as e:
         gen_log.warning('add {entity} failed, error: {e}'.format(
             entity=entity, e=e))
     session.close()
     return ResultCode(0, '插入失败')
Exemplo n.º 45
0
    def post(self):
        def _get_stix_package(ioc_xml):
            if ioc_xml is not None and len(ioc_xml) > 0:
                ns = namespaces.Namespace("http://openioc.org/openioc",
                                          "openioc", "")
                idgen.set_id_namespace(ns)

                stix_obj = to_stix(BytesIO(ioc_xml))

                stix_package = STIXPackage_v1.from_xml(
                    etree.fromstring(stix_obj.to_xml()))

                return stix_package

            else:
                raise RuntimeError('request body is empty.')

        try:
            msg = None
            for field_name, files in self.request.files.items():
                for file in files:
                    filename, content_type = file['filename'], file[
                        'content_type']
                    body = file['body']

                    gen_log.debug('POST "%s" "%s" %d bytes', filename,
                                  content_type, len(body))
                    gen_log.debug('POST file body:\n"%s"', body)

                    stix_package = _get_stix_package(body)
                    if stix_package is not None:
                        patterns = common.get_search_items(stix_package)
                        msg = {'fields': patterns}

            self.http_normal(200, msg=msg if msg is not None else u'OK')

        except:
            trace_msg = traceback.format_exc().decode('utf-8')
            emsg = u'request_msg:{0} {1}'.format(self.request.body, trace_msg)
            gen_log.error(u',[session-id:{0}],{1}'.format(None, emsg))
            self.http_error(400, msg=trace_msg)
Exemplo n.º 46
0
 def fetch_impl(self, request: HTTPRequest,
                callback: Callable[[HTTPResponse],
                                   None], future: asyncio.Future) -> None:
     key = object()
     self.queue.append((key, request, callback))
     if not len(self.active) < self.max_clients:
         assert request.connect_timeout is not None
         assert request.request_timeout is not None
         timeout_handle = self.io_loop.add_timeout(
             self.io_loop.time() +
             min(request.connect_timeout, request.request_timeout),
             functools.partial(self._on_timeout, key, "in request queue"),
         )
     else:
         timeout_handle = None
     self.waiting[key] = (request, callback, timeout_handle)
     self._process_queue()
     if self.queue:
         gen_log.debug("max_clients limit reached, request queued. "
                       "%d active, %d queued requests." %
                       (len(self.active), len(self.queue)))
Exemplo n.º 47
0
def execute(cell, kernel, session):
    """Run a single cell, waiting for its output"""
    msg = session.msg('execute_request',
                      content={
                          'code': cell.source,
                          'user_expressions': [],
                          'silent': False,
                          'allow_stdin': False,
                      })
    msg['channel'] = 'shell'

    parent_id = msg['header']['msg_id']

    ws = kernel['channels']
    gen_log.debug("Executing:\n%s", cell.source)
    ws.write_message(json.dumps(msg, default=date_default))

    output_done = False
    shell_done = False
    while not (output_done and shell_done):
        jmsg = yield ws.read_message()
        msg = json.loads(jmsg)
        if msg['channel'] == 'iopub':
            gen_log.debug("output:\n%s", json.dumps(msg['content'], indent=1))
            if msg['msg_type'] == 'status' \
                and msg['content']['execution_state'] == 'idle' \
                and msg['parent_header']['msg_id'] == parent_id:
                output_done = True
        elif msg['channel'] == 'shell':
            gen_log.debug("reply:\n%s", json.dumps(msg['content'], indent=1))
            shell_done = True
        else:
            gen_log.warn("Unrecognized channel: %s\n%s", msg['channel'],
                         json.dumps(msg['content'], indent=1))
Exemplo n.º 48
0
 def password(cls, user_name, user_upwd, user_npwd, reset=0):
     """
     #修改密码
     :param user_name: 帐号
     :param user_upwd: 原密码
     :param user_npwd: 新密码
     :param reset: 是否重置密码
     """
     if not SESSION:
         return 0, '数据库连接错误,请联系后台管理员', None
     err_msg = ''
     session = SESSION()
     try:
         if reset:  # 重置密码
             ret = session.query(cls).filter(user_name=user_name).update(
                 {
                     cls.user_upwd: user_npwd,
                     cls.user_dpwd: user_npwd
                 },
                 synchronize_session=False)
         else:  # 修改密码
             ret = session.query(cls).filter(
                 and_(
                     cls.user_name == user_name,
                     or_(cls.user_upwd == user_upwd,
                         cls.user_dpwd == user_upwd))).update(
                             {
                                 cls.user_upwd: user_npwd,
                                 cls.user_dpwd: user_npwd
                             },
                             synchronize_session=False)
         session.commit()
         session.close()
         gen_log.debug('ret: {0}'.format(ret))
         return 200, '密码修改成功,请牢记,如果忘记请联系系统管理员', ret
     except Exception as e:
         err_msg = str(e)
         gen_log.error(str(e))
     session.close()
     return 0, '密码修改失败: {0}'.format(err_msg), None
    def accept_connection(self):
        try:
            self._handle_websocket_headers()
        except ValueError:
            gen_log.debug("Malformed WebSocket request received")
            self._abort()
            return

        scheme = self.handler.get_websocket_scheme()

        # draft76 only allows a single subprotocol
        subprotocol_header = ''
        subprotocol = self.request.headers.get("Sec-WebSocket-Protocol", None)
        if subprotocol:
            selected = self.handler.select_subprotocol([subprotocol])
            if selected:
                assert selected == subprotocol
                subprotocol_header = "Sec-WebSocket-Protocol: %s\r\n" % selected

        # Write the initial headers before attempting to read the challenge.
        # This is necessary when using proxies (such as HAProxy), which
        # need to see the Upgrade headers before passing through the
        # non-HTTP traffic that follows.
        self.stream.write(
            tornado.escape.utf8(
                "HTTP/1.1 101 WebSocket Protocol Handshake\r\n"
                "Upgrade: WebSocket\r\n"
                "Connection: Upgrade\r\n"
                "Server: TornadoServer/%(version)s\r\n"
                "Sec-WebSocket-Origin: %(origin)s\r\n"
                "Sec-WebSocket-Location: %(scheme)s://%(host)s%(uri)s\r\n"
                "%(subprotocol)s"
                "\r\n" % (dict(version=tornado.version,
                               origin=self.request.headers["Origin"],
                               scheme=scheme,
                               host=self.request.host,
                               uri=self.request.uri,
                               subprotocol=subprotocol_header))))
        self.stream.read_bytes(8, self._handle_challenge)
Exemplo n.º 50
0
    def get(self, *args, **kwargs):
        self.open_args = args
        self.open_kwargs = kwargs

        # Upgrade header should be present and should be equal to WebSocket
        if self.request.headers.get("Upgrade", "").lower() != 'websocket':
            self.set_status(400)
            log_msg = "Can \"Upgrade\" only to \"WebSocket\"."
            self.finish(log_msg)
            gen_log.debug(log_msg)
            return

        # Connection header should be upgrade.
        # Some proxy servers/load balancers
        # might mess with it.
        # websockets握手
        headers = self.request.headers
        connection = map(lambda s: s.strip().lower(),
                         headers.get("Connection", "").split(","))
        if 'upgrade' not in connection:
            self.set_status(400)
            log_msg = "\"Connection\" must be \"Upgrade\"."
            self.finish(log_msg)
            gen_log.debug(log_msg)
            return

        # Handle WebSocket Origin naming convention differences
        # The difference between version 8 and 13 is that in 8 the
        # client sends a "Sec-Websocket-Origin" header and in 13 it's
        # simply "Origin".
        if "Origin" in self.request.headers:
            origin = self.request.headers.get("Origin")
        else:
            origin = self.request.headers.get("Sec-Websocket-Origin", None)

        # If there was an origin header, check to make sure it matches
        # according to check_origin. When the origin is None, we assume it
        # did not come from a browser and that it can be passed on.
        if origin is not None and not self.check_origin(origin):
            self.set_status(403)
            log_msg = "Cross origin websockets not allowed"
            self.finish(log_msg)
            gen_log.debug(log_msg)
            return
        # 将此http连接detach,转为websocket使用
        self.stream = self.request.connection.detach()
        self.stream.set_close_callback(self.on_connection_close)

        self.ws_connection = self.get_websocket_protocol()
        if self.ws_connection:
            self.ws_connection.accept_connection()
        else:
            if not self.stream.closed():
                self.stream.write(
                    tornado.escape.utf8(
                        "HTTP/1.1 426 Upgrade Required\r\n"
                        "Sec-WebSocket-Version: 7, 8, 13\r\n\r\n"))
                self.stream.close()
Exemplo n.º 51
0
def main() -> None:

    parse_command_line()

    io_loop = IOLoop.instance()

    if options.debug:

        tornado.autoreload.start()

        for option, value in sorted(options.as_dict().items()):
            gen_log.debug(f"Option: {option}: {value}")

    ari_client = AriClient()

    io_loop.add_callback(ari_client.run)

    if options.ping_interval:
        ping_periodic_callback = PeriodicCallback(do_ping,
                                                  options.ping_interval * 1000)
        ping_periodic_callback.start()

    io_loop.start()
Exemplo n.º 52
0
    def update(self, obj, callback=None):
        """
        Updates the value for a key atomically. Typical usage would be:

        c = etcd.Client()
        o = c.read("/somekey")
        o.value += 1
        c.update(o)

        Args:
            obj (etcd.EtcdResult):  The object that needs updating.

        """

        assert isinstance(obj, EtcdResult), "obj not a EtcdResult."

        _log.debug("Updating %s to %s.", obj.key, obj.value)
        kwdargs = {'dir': obj.dir, 'ttl': obj.ttl, 'prevExist': True}

        if not obj.dir:
            # prevIndex on a dir causes a 'not a file' error. d'oh!
            kwdargs['prevIndex'] = obj.modifiedIndex
        return self.write(obj.key, obj.value, callback=callback, **kwdargs)
Exemplo n.º 53
0
 def fetch_impl(self, request: HTTPRequest,
                callback: Callable[[HTTPResponse], None]) -> None:
     key = object()
     self.queue.append((key, request, callback))
     assert request.connect_timeout is not None
     assert request.request_timeout is not None
     timeout_handle = None
     if len(self.active) >= self.max_clients:
         timeout = (min(request.connect_timeout, request.request_timeout)
                    or request.connect_timeout
                    or request.request_timeout)  # min but skip zero
         if timeout:
             timeout_handle = self.io_loop.add_timeout(
                 self.io_loop.time() + timeout,
                 functools.partial(self._on_timeout, key,
                                   "in request queue"),
             )
     self.waiting[key] = (request, callback, timeout_handle)
     self._process_queue()
     if self.queue:
         gen_log.debug("max_clients limit reached, request queued. "
                       "%d active, %d queued requests." %
                       (len(self.active), len(self.queue)))
Exemplo n.º 54
0
 def close(self, all_fds=False):
     """override to use *method* to close FDs
     
     instead of os.close on everything, which doesn't work on zmq Sockets.
     
     Should be fixed in a future tornado release.
     """
     with self._callback_lock:
         self._closing = True
     self.remove_handler(self._waker.fileno())
     if all_fds:
         for fd in self._handlers.keys():
             try:
                 # begin patch
                 try:
                     fd.close()
                 except AttributeError:
                     os.close(fd)
                 # end patch
             except Exception:
                 gen_log.debug("error closing fd %s", fd, exc_info=True)
     self._waker.close()
     self._impl.close()
Exemplo n.º 55
0
    def close(self, all_fds=False):
        with self._callback_lock:
            self._closing = True
        if all_fds:
            for fd in self._handlers:
                obj, _ = self._handlers[fd]
                if obj is not None and hasattr(obj, 'close'):
                    try:
                        obj.close()
                    except Exception:
                        gen_log.debug("error closing socket object %s",
                                      obj,
                                      exc_info=True)
                try:
                    os.close(fd)
                except Exception:
                    gen_log.debug("error closing fd %s", fd, exc_info=True)

        self._fdwaker.close()
        self._close_loop_handles()
        # Run the loop so the close callbacks are fired and memory is freed
        self._loop.run()
        self._loop = None
Exemplo n.º 56
0
    def post(self):
        """
        Upload file
        """
        _id = self.get_query_argument('id')
        uploaded_dir = self.application.settings['uploaded_dir']
        buf_size = 4096

        try:
            self.parser.finish_receive()

            for part in self.parser.parts:
                filename = part['headers'][0]['params']['filename']
                tmpfile = part['tmpfile']

                full_path = os.path.join(uploaded_dir, filename)

                with open(full_path, 'wb') as fp:
                    tmpfile.seek(0)
                    while True:
                        buf = tmpfile.read(buf_size)
                        if not buf:
                            break
                        fp.write(buf)

            self.set_header('Content-type', 'text/plain; charset=UTF-8')
            self.write(
                json.dumps({
                    'id': _id,
                    'status': 'complete',
                    'download_url': u'/uploaded/{}'.format(filename)
                }))
            self.finish()
        finally:
            self.parser.release_parts()

        gen_log.debug('#{_id}: upload complete.'.format(_id=_id))
def _curl_debug(debug_type, debug_msg):
    debug_types = ('I', '<', '>', '<', '>')
    if debug_type == 0:
        gen_log.debug('%s', debug_msg.strip())
    elif debug_type in (1, 2):
        for line in debug_msg.splitlines():
            gen_log.debug('%s %s', debug_types[debug_type], line)
    elif debug_type == 4:
        gen_log.debug('%s %r', debug_types[debug_type], debug_msg)
Exemplo n.º 58
0
 def lock(cls, user_name, user_state):
     """
     #加解锁
     :param cls:
     :param user_name: 帐号
     :param user_state: 1-解锁,0-加锁
     """
     if not SESSION:
         return '数据库连接错误,请联系后台管理员', None
     err_msg = ''
     session = SESSION()
     try:
         # 更新状态
         ret = session.query(cls).filter(user_name=user_name).update(
             {cls.user_state: user_state}, synchronize_session=False)
         session.commit()
         session.close()
         gen_log.debug('ret: {0}'.format(ret))
         return '加解锁成功', ret
     except Exception as e:
         err_msg = str(e)
         gen_log.error(str(e))
     session.close()
     return '加解锁失败: {0}'.format(err_msg), None
def _curl_setup_request(curl, request, buffer, headers):
    curl.setopt(pycurl.URL, native_str(request.url))

    # libcurl's magic "Expect: 100-continue" behavior causes delays
    # with servers that don't support it (which include, among others,
    # Google's OpenID endpoint).  Additionally, this behavior has
    # a bug in conjunction with the curl_multi_socket_action API
    # (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976),
    # which increases the delays.  It's more trouble than it's worth,
    # so just turn off the feature (yes, setting Expect: to an empty
    # value is the official way to disable this)
    if "Expect" not in request.headers:
        request.headers["Expect"] = ""

    # libcurl adds Pragma: no-cache by default; disable that too
    if "Pragma" not in request.headers:
        request.headers["Pragma"] = ""

    # Request headers may be either a regular dict or HTTPHeaders object
    if isinstance(request.headers, httputil.HTTPHeaders):
        curl.setopt(pycurl.HTTPHEADER,
                    [native_str("%s: %s" % i) for i in request.headers.get_all()])
    else:
        curl.setopt(pycurl.HTTPHEADER,
                    [native_str("%s: %s" % i) for i in request.headers.items()])

    if request.header_callback:
        curl.setopt(pycurl.HEADERFUNCTION,
                    lambda line: request.header_callback(native_str(line)))
    else:
        curl.setopt(pycurl.HEADERFUNCTION,
                    lambda line: _curl_header_callback(headers,
                                                       native_str(line)))
    if request.streaming_callback:
        write_function = request.streaming_callback
    else:
        write_function = buffer.write
    if bytes_type is str:  # py2
        curl.setopt(pycurl.WRITEFUNCTION, write_function)
    else:  # py3
        # Upstream pycurl doesn't support py3, but ubuntu 12.10 includes
        # a fork/port.  That version has a bug in which it passes unicode
        # strings instead of bytes to the WRITEFUNCTION.  This means that
        # if you use a WRITEFUNCTION (which tornado always does), you cannot
        # download arbitrary binary data.  This needs to be fixed in the
        # ported pycurl package, but in the meantime this lambda will
        # make it work for downloading (utf8) text.
        curl.setopt(pycurl.WRITEFUNCTION, lambda s: write_function(utf8(s)))
    curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
    curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
    curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))
    curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
    if request.user_agent:
        curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))
    else:
        curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
    if request.network_interface:
        curl.setopt(pycurl.INTERFACE, request.network_interface)
    if request.use_gzip:
        curl.setopt(pycurl.ENCODING, "gzip,deflate")
    else:
        curl.setopt(pycurl.ENCODING, "none")
    if request.proxy_host and request.proxy_port:
        curl.setopt(pycurl.PROXY, request.proxy_host)
        curl.setopt(pycurl.PROXYPORT, request.proxy_port)
        if request.proxy_username:
            credentials = '%s:%s' % (request.proxy_username,
                                     request.proxy_password)
            curl.setopt(pycurl.PROXYUSERPWD, credentials)
    else:
        curl.setopt(pycurl.PROXY, '')
        curl.unsetopt(pycurl.PROXYUSERPWD)
    if request.validate_cert:
        curl.setopt(pycurl.SSL_VERIFYPEER, 1)
        curl.setopt(pycurl.SSL_VERIFYHOST, 2)
    else:
        curl.setopt(pycurl.SSL_VERIFYPEER, 0)
        curl.setopt(pycurl.SSL_VERIFYHOST, 0)
    if request.ca_certs is not None:
        curl.setopt(pycurl.CAINFO, request.ca_certs)
    else:
        # There is no way to restore pycurl.CAINFO to its default value
        # (Using unsetopt makes it reject all certificates).
        # I don't see any way to read the default value from python so it
        # can be restored later.  We'll have to just leave CAINFO untouched
        # if no ca_certs file was specified, and require that if any
        # request uses a custom ca_certs file, they all must.
        pass

    if request.allow_ipv6 is False:
        # Curl behaves reasonably when DNS resolution gives an ipv6 address
        # that we can't reach, so allow ipv6 unless the user asks to disable.
        # (but see version check in _process_queue above)
        curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
    else:
        curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)

    # Set the request method through curl's irritating interface which makes
    # up names for almost every single method
    curl_options = {
        "GET": pycurl.HTTPGET,
        "POST": pycurl.POST,
        "PUT": pycurl.UPLOAD,
        "HEAD": pycurl.NOBODY,
    }
    custom_methods = set(["DELETE", "OPTIONS", "PATCH"])
    for o in curl_options.values():
        curl.setopt(o, False)
    if request.method in curl_options:
        curl.unsetopt(pycurl.CUSTOMREQUEST)
        curl.setopt(curl_options[request.method], True)
    elif request.allow_nonstandard_methods or request.method in custom_methods:
        curl.setopt(pycurl.CUSTOMREQUEST, request.method)
    else:
        raise KeyError('unknown method ' + request.method)

    # Handle curl's cryptic options for every individual HTTP method
    if request.method in ("POST", "PUT"):
        if request.body is None:
            raise AssertionError(
                'Body must not be empty for "%s" request'
                % request.method)

        request_buffer = BytesIO(utf8(request.body))
        curl.setopt(pycurl.READFUNCTION, request_buffer.read)
        if request.method == "POST":
            def ioctl(cmd):
                if cmd == curl.IOCMD_RESTARTREAD:
                    request_buffer.seek(0)
            curl.setopt(pycurl.IOCTLFUNCTION, ioctl)
            curl.setopt(pycurl.POSTFIELDSIZE, len(request.body))
        else:
            curl.setopt(pycurl.INFILESIZE, len(request.body))
    elif request.method == "GET":
        if request.body is not None:
            raise AssertionError('Body must be empty for GET request')

    if request.auth_username is not None:
        userpwd = "%s:%s" % (request.auth_username, request.auth_password or '')

        if request.auth_mode is None or request.auth_mode == "basic":
            curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
        elif request.auth_mode == "digest":
            curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
        else:
            raise ValueError("Unsupported auth_mode %s" % request.auth_mode)

        curl.setopt(pycurl.USERPWD, native_str(userpwd))
        gen_log.debug("%s %s (username: %r)", request.method, request.url,
                      request.auth_username)
    else:
        curl.unsetopt(pycurl.USERPWD)
        gen_log.debug("%s %s", request.method, request.url)

    if request.client_cert is not None:
        curl.setopt(pycurl.SSLCERT, request.client_cert)

    if request.client_key is not None:
        curl.setopt(pycurl.SSLKEY, request.client_key)

    if threading.activeCount() > 1:
        # libcurl/pycurl is not thread-safe by default.  When multiple threads
        # are used, signals should be disabled.  This has the side effect
        # of disabling DNS timeouts in some environments (when libcurl is
        # not linked against ares), so we don't do it when there is only one
        # thread.  Applications that use many short-lived threads may need
        # to set NOSIGNAL manually in a prepare_curl_callback since
        # there may not be any other threads running at the time we call
        # threading.activeCount.
        curl.setopt(pycurl.NOSIGNAL, 1)
    if request.prepare_curl_callback is not None:
        request.prepare_curl_callback(curl)
Exemplo n.º 60
0
    def post(self):
        def _get_stix_package(in_json):
            if in_json is not None and len(in_json) > 0:
                initialize_options()
                set_option_value("version_of_stix2x", stix_version)

                container = stixmarx.new()
                stix_package = container.package
                json_content = json.loads(in_json.decode())  #.decode('utf-8'))
                if type(json_content) == list:
                    for json_data in json_content:
                        if "type" in json_data and json_data[
                                "type"] == "indicator":
                            indicator = convert_indicator(json_data)
                            stix_package.add_indicator(indicator)
                else:
                    if "type" in json_content and json_content[
                            "type"] == "bundle":
                        if "objects" in json_content and json_content[
                                "objects"] and type(
                                    json_content["objects"]) == list:
                            for json_data in json_content["objects"]:
                                #print("[json_data]")
                                #print(json_data)
                                if "type" in json_data and json_data[
                                        "type"] == "indicator":
                                    indicator = convert_indicator(json_data)
                                    stix_package.add_indicator(indicator)

                    elif "type" in json_content and json_content[
                            "type"] == "indicator":
                        indicator = convert_indicator(json_content)
                        stix_package.add_indicator(indicator)

                container.flush()
                container = None

                return stix_package

            else:
                raise RuntimeError('request body is empty.')

        try:
            msg = None
            for field_name, files in self.request.files.items():
                for file in files:
                    filename, content_type = file['filename'], file[
                        'content_type']
                    body = file['body']
                    #print(body)
                    gen_log.debug('POST "%s" "%s" %d bytes', filename,
                                  content_type, len(body))
                    gen_log.debug('POST file body:\n"%s"', body)

                    stix_package = _get_stix_package(body)
                    #print(stix_package)
                    if stix_package is not None:
                        patterns = common.get_search_items(stix_package)
                        msg = {'fields': patterns}
            #print(msg)
            self.http_normal(200, msg=msg if msg is not None else u'OK')

        except:
            trace_msg = traceback.format_exc()  #.decode('utf-8')
            emsg = u'request_msg:{0} {1}'.format(self.request.body, trace_msg)
            gen_log.error(u',[session-id:{0}],{1}'.format(None, emsg))
            self.http_error(400, msg=trace_msg)