Exemple #1
0
def _next():
    from models.users import User

    token = request.args.get("token")
    nsfw = request.args.get("nsfw")
    nsfw = nsfw == 'true'
    if not token:
        return Response(status=403)
    user = User.q.filter_by(token=token).first()
    if not user:
        return Response(status=403)

    if not len(user.likes) >= 3:
        # IF USER LIKED ARTICLES ARE NOT MORE THAN 5
        # RETURN RANDOM
        article = user.random_article(nsfw=nsfw)
        if not article:
            return Response(status=404)

        user.visit(article)
        return Response(json_encode({'article': article.serialize()}),
                        mimetype="application/json")

    suggested_article = user.suggested_articles(nsfw=nsfw)
    if not suggested_article:
        article = user.random_article(nsfw=nsfw)
        if not article:
            return Response(status=404)
        user.visit(article)
        return Response(json_encode({'article': article.serialize()}),
                        mimetype="application/json")

    visited_article = user.visit(suggested_article)
    return Response(json_encode({"article": visited_article.serialize()}),
                    mimetype="application/json")
Exemple #2
0
def example_websocket_action(self, message):
    """
    This `WebSocket <https://developer.mozilla.org/en/WebSockets/WebSockets_reference/WebSocket>`_
    action gets exposed to the client automatically by way of the 'WebSocket'
    hook at the bottom of this file.  The way it works is like this:

    .. rubric:: How The WebSocket Hook Works

    Whenever a message is received via the `WebSocket <https://developer.mozilla.org/en/WebSockets/WebSockets_reference/WebSocket>`_
    Gate One will automatically
    decode it into a Python :class:`dict` (only JSON-encoded messages are accepted).
    Any and all keys in that :class:`dict` will be assumed to be 'actions' (just
    like :js:attr:`GateOne.Net.actions` but on the server) such as this one.  If
    the incoming key matches a registered action that action will be called
    like so::

        key(value)
        # ...or just:
        key() # If the value is None ('null' in JavaScript)

    ...where *key* is the action and *value* is what will be passed to said
    action as an argument.  Since Gate One will automatically decode the message
    as JSON the *value* will typically be passed to actions as a single :class:`dict`.
    You can provide different kinds of arguments of course but be aware that
    their ordering is unpredictable so always be sure to either pass *one*
    argument to your function (assuming it is a :class:`dict`) or 100% keyword
    arguments.

    The *self* argument here is automatically assigned by
    :class:`TerminalApplication` using the `utils.bind` method.

    The typical naming convention for `WebSocket <https://developer.mozilla.org/en/WebSockets/WebSockets_reference/WebSocket>`_
    actions is: `<plugin name>_<action>`.  Whether or not your action names
    match your function names is up to you.  All that matters is that you line
    up an *action* (string) with a *function* in `hooks['WebSocket']` (see below).

    This `WebSocket <https://developer.mozilla.org/en/WebSockets/WebSockets_reference/WebSocket>`_
    *action* duplicates the functionality of Gate One's built-in
    :func:`gateone.TerminalWebSocket.pong` function.  You can see how it is
    called by the client (browser) inside of example.js (which is in this
    plugin's 'static' dir).
    """
    message = {'terminal:example_pong': timestamp}
    self.write_message(json_encode(message))
    # WebSockets are asynchronous so you can send as many messages as you want
    message2 = {'go:notice': 'You just executed the "example_action" action.'}
    self.write_message(json_encode(message2))
    # Alternatively, you can combine multiple messages/actions into one message:
    combined = {
        'go:notice': 'Hurray!',
        'terminal:bell': {
            'term': self.current_term
        }
    }
    self.write_message(json_encode(combined))
Exemple #3
0
def example_websocket_action(self, message):
    """
    This `WebSocket <https://developer.mozilla.org/en/WebSockets/WebSockets_reference/WebSocket>`_
    action gets exposed to the client automatically by way of the 'WebSocket'
    hook at the bottom of this file.  The way it works is like this:

    .. rubric:: How The WebSocket Hook Works

    Whenever a message is received via the `WebSocket <https://developer.mozilla.org/en/WebSockets/WebSockets_reference/WebSocket>`_
    Gate One will automatically
    decode it into a Python :class:`dict` (only JSON-encoded messages are accepted).
    Any and all keys in that :class:`dict` will be assumed to be 'actions' (just
    like :js:attr:`GateOne.Net.actions` but on the server) such as this one.  If
    the incoming key matches a registered action that action will be called
    like so::

        key(value)
        # ...or just:
        key() # If the value is None ('null' in JavaScript)

    ...where *key* is the action and *value* is what will be passed to said
    action as an argument.  Since Gate One will automatically decode the message
    as JSON the *value* will typically be passed to actions as a single :class:`dict`.
    You can provide different kinds of arguments of course but be aware that
    their ordering is unpredictable so always be sure to either pass *one*
    argument to your function (assuming it is a :class:`dict`) or 100% keyword
    arguments.

    The *self* argument here is automatically assigned by
    :class:`TerminalApplication` using the `utils.bind` method.

    The typical naming convention for `WebSocket <https://developer.mozilla.org/en/WebSockets/WebSockets_reference/WebSocket>`_
    actions is: `<plugin name>_<action>`.  Whether or not your action names
    match your function names is up to you.  All that matters is that you line
    up an *action* (string) with a *function* in `hooks['WebSocket']` (see below).

    This `WebSocket <https://developer.mozilla.org/en/WebSockets/WebSockets_reference/WebSocket>`_
    *action* duplicates the functionality of Gate One's built-in
    :func:`gateone.TerminalWebSocket.pong` function.  You can see how it is
    called by the client (browser) inside of example.js (which is in this
    plugin's 'static' dir).
    """
    message = {'terminal:example_pong': timestamp}
    self.write_message(json_encode(message))
    # WebSockets are asynchronous so you can send as many messages as you want
    message2 = {'go:notice': 'You just executed the "example_action" action.'}
    self.write_message(json_encode(message2))
    # Alternatively, you can combine multiple messages/actions into one message:
    combined = {
        'go:notice': 'Hurray!',
        'terminal:bell': {'term': self.current_term}
    }
    self.write_message(json_encode(combined))
Exemple #4
0
def get_connect_string(self, term):
    """
    Attached to the (server-side) `terminal:ssh_get_connect_string` WebSocket
    action; writes the connection string associated with *term* to the WebSocket
    like so::

        {'terminal:sshjs_reconnect': {*term*: <connection string>}}

    In ssh.js we attach a WebSocket action to 'terminal:sshjs_reconnect'
    that assigns the connection string sent by this function to
    `GateOne.Terminal.terminals[*term*]['sshConnectString']`.
    """
    logging.debug("get_connect_string() term: %s" % term)
    session = self.ws.session
    session_dir = self.ws.settings['session_dir']
    for f in os.listdir(os.path.join(session_dir, session)):
        if f.startswith('ssh:'):
            terminal, a_colon, connect_string = f[4:].partition(':')
            terminal = int(terminal)
            if terminal == term:
                # TODO: Make it so we don't have to use json_encode below...
                message = {
                    'terminal:sshjs_reconnect': json_encode(
                    {term: connect_string})
                }
                self.write_message(message)
                return # All done
Exemple #5
0
def get_connect_string(term, tws):
    """
    Writes the connection string associated with *term* to the `WebSocket <https://developer.mozilla.org/en/WebSockets/WebSockets_reference/WebSocket>`_
    like so::

        {'sshjs_reconnect': {*term*: <connection string>}}

    In ssh.js we attach an action (aka handler) to :js:attr:`GateOne.Net.actions`
    for 'sshjs_reconnect' messages that attaches the connection string to
    `GateOne.terminals[*term*]['sshConnectString']`
    """
    logging.debug("get_connect_string() term: %s" % term)
    session = tws.session
    session_dir = tws.settings['session_dir']
    for f in os.listdir(os.path.join(session_dir, session)):
        if f.startswith('ssh:'):
            terminal, a_colon, connect_string = f[4:].partition(':')
            terminal = int(terminal)
            if terminal == term:
                # TODO: Make it so we don't have to use json_encode below...
                message = {
                    'sshjs_reconnect': json_encode({term: connect_string})
                }
                tws.write_message(message)
                return  # All done
Exemple #6
0
def get_connect_string(self, term):
    """
    Writes the connection string associated with *term* to the `WebSocket <https://developer.mozilla.org/en/WebSockets/WebSockets_reference/WebSocket>`_
    like so::

        {'sshjs_reconnect': {*term*: <connection string>}}

    In ssh.js we attach an action (aka handler) to :js:attr:`GateOne.Net.actions`
    for 'sshjs_reconnect' messages that attaches the connection string to
    `GateOne.Terminal.terminals[*term*]['sshConnectString']`
    """
    logging.debug("get_connect_string() term: %s" % term)
    session = self.ws.session
    session_dir = self.ws.settings['session_dir']
    for f in os.listdir(os.path.join(session_dir, session)):
        if f.startswith('ssh:'):
            terminal, a_colon, connect_string = f[4:].partition(':')
            terminal = int(terminal)
            if terminal == term:
                # TODO: Make it so we don't have to use json_encode below...
                message = {
                    'terminal:sshjs_reconnect': json_encode(
                    {term: connect_string})
                }
                self.write_message(message)
                return # All done
Exemple #7
0
def get_connect_string(self, term):
    """
    Attached to the (server-side) `terminal:ssh_get_connect_string` WebSocket
    action; writes the connection string associated with *term* to the WebSocket
    like so::

        {'terminal:sshjs_reconnect': {*term*: <connection string>}}

    In ssh.js we attach a WebSocket action to 'terminal:sshjs_reconnect'
    that assigns the connection string sent by this function to
    `GateOne.Terminal.terminals[*term*]['sshConnectString']`.
    """
    self.term_log.debug("get_connect_string() term: %s" % term)
    session = self.ws.session
    session_dir = self.ws.settings['session_dir']
    for f in os.listdir(os.path.join(session_dir, session)):
        if f.startswith('ssh:'):
            terminal, a_colon, connect_string = f[4:].partition(':')
            terminal = int(terminal)
            if terminal == term:
                # TODO: Make it so we don't have to use json_encode below...
                message = {
                    'terminal:sshjs_reconnect': json_encode(
                    {term: connect_string})
                }
                self.write_message(message)
                return # All done
Exemple #8
0
 def send_message(fd, event):
     """
     Sends the log enumeration result to the client.  Necessary because
     IOLoop doesn't pass anything other than *fd* and *event* when it handles
     file descriptor events.
     """
     message = q.get()
     #logging.debug('message: %s' % message)
     if message == 'complete':
         io_loop.remove_handler(fd)
         total_bytes = 0
         logs_dir = os.path.join(users_dir, "logs")
         log_files = os.listdir(logs_dir)
         for log in log_files:
             log_path = os.path.join(logs_dir, log)
             total_bytes += os.stat(log_path).st_size
         out_dict = {
             'total_logs': len(log_files),
             'total_bytes': total_bytes
         }
         # This signals to the client that we're done
         message = {'terminal:logging_logs_complete': out_dict}
         self.write_message(message)
         return
     message = json_encode(message)
     if message not in results:
         # Keep track of how many/how much
         if results:
             results.pop() # No need to keep old stuff hanging around
         results.append(message)
         self.write_message(message)
Exemple #9
0
def datatables():
    data = request.args
    customer_name = data.get('customer_name')
    tracking_number = data.get('tracking_number')
    from_date = data.get('from_date')
    to_date = data.get('to_date')
    limit = data.get('length')
    offset = data.get('start')
    draw = data.get('draw')

    query = OrderListQuery()
    query.apply_filters(customer_name=customer_name,
                        tracking_number=tracking_number,
                        from_date=from_date,
                        to_date=to_date)
    total_records = len(query)
    query.paginate(limit, offset)

    orders = list(query)

    return json_encode({
        'recordsTotal': total_records,
        'recordsFiltered': total_records,
        'data': orders,
        'draw': draw
    })
Exemple #10
0
def save_bookmarks(self, bookmarks):
    """
    Handles saving *bookmarks* for clients.
    """
    out_dict = {
        'updates': [],
        'count': 0,
        'errors': []
    }
    try:
        user = self.current_user['upn']
        bookmarks_db = BookmarksDB(self.ws.settings['user_dir'], user)
        updates = bookmarks_db.sync_bookmarks(bookmarks)
        out_dict.update({
            'updates': updates,
            'count': len(bookmarks),
        })
        out_dict['updateSequenceNum'] = bookmarks_db.get_highest_USN()
    except Exception as e:
        import traceback
        logging.error("Got exception synchronizing bookmarks: %s" % e)
        traceback.print_exc(file=sys.stdout)
        out_dict['errors'].append(str(e))
    if out_dict['errors']:
        out_dict['result'] = "Upload completed but errors were encountered."
    else:
        out_dict['result'] = "Upload successful"
    message = {'terminal:bookmarks_save_result': out_dict}
    self.write_message(json_encode(message))
Exemple #11
0
def json_return(obj='', status=200):
    '''
	api 的 http 返回结果
	'''
    return HttpResponse(json_encode(obj),
                        mimetype='application/json',
                        status=status)
Exemple #12
0
 def send_message(fd, event):
     """
     Sends the log enumeration result to the client.  Necessary because
     IOLoop doesn't pass anything other than *fd* and *event* when it handles
     file descriptor events.
     """
     message = q.get()
     #logging.debug('message: %s' % message)
     if message == 'complete':
         io_loop.remove_handler(fd)
         total_bytes = 0
         logs_dir = os.path.join(users_dir, "logs")
         log_files = os.listdir(logs_dir)
         for log in log_files:
             log_path = os.path.join(logs_dir, log)
             total_bytes += os.stat(log_path).st_size
         out_dict = {
             'total_logs': len(log_files),
             'total_bytes': total_bytes
         }
         # This signals to the client that we're done
         message = {'logging_logs_complete': out_dict}
         tws.write_message(message)
         return
     message = json_encode(message)
     if message not in results:
         # Keep track of how many/how much
         if results:
             results.pop()  # No need to keep old stuff hanging around
         results.append(message)
         tws.write_message(message)
Exemple #13
0
def save_bookmarks(bookmarks, tws):
    """
    Handles saving *bookmarks* for clients.
    """
    out_dict = {
        'updates': [],
        'count': 0,
        'errors': []
    }
    try:
        user = tws.get_current_user()['upn']
        bookmarks_db = BookmarksDB(tws.settings['user_dir'], user)
        updates = bookmarks_db.sync_bookmarks(bookmarks)
        out_dict.update({
            'updates': updates,
            'count': len(bookmarks),
        })
        out_dict['updateSequenceNum'] = bookmarks_db.get_highest_USN()
    except Exception as e:
        import traceback
        logging.error("Got exception synchronizing bookmarks: %s" % e)
        traceback.print_exc(file=sys.stdout)
        out_dict['errors'].append(str(e))
    if out_dict['errors']:
        out_dict['result'] = "Upload completed but errors were encountered."
    else:
        out_dict['result'] = "Upload successful"
    message = {'bookmarks_save_result': out_dict}
    tws.write_message(json_encode(message))
Exemple #14
0
 def send_message(fd, event):
     """
     Sends the log enumeration result to the client.  Necessary because
     IOLoop doesn't pass anything other than *fd* and *event* when it handles
     file descriptor events.
     """
     io_loop.remove_handler(fd)
     message = q.get()
     tws.write_message(json_encode(message))
Exemple #15
0
 def send_message(fd, event):
     """
     Sends the log enumeration result to the client.  Necessary because
     IOLoop doesn't pass anything other than *fd* and *event* when it handles
     file descriptor events.
     """
     io_loop.remove_handler(fd)
     message = q.get()
     tws.write_message(json_encode(message))
Exemple #16
0
def opt_esc_handler(text, tws):
    """
    Handles text passed from the special optional escape sequance handler.  We
    use it to tell ssh.js what the SSH connection string is so it can use that
    information to duplicate sessions (if the user so desires).  For reference,
    the specific string which will call this function from a terminal app is:
        \x1b]_;ssh|<whatever>\x07
    """
    message = {'sshjs_connect': text}
    tws.write_message(json_encode(message))
Exemple #17
0
def opt_esc_handler(text, tws):
    """
    Handles text passed from the special optional escape sequance handler.  We
    use it to tell ssh.js what the SSH connection string is so it can use that
    information to duplicate sessions (if the user so desires).  For reference,
    the specific string which will call this function from a terminal app is:
        \x1b]_;ssh|<whatever>\x07
    """
    message = {'sshjs_connect': text}
    tws.write_message(json_encode(message))
Exemple #18
0
 def get(self):
     err = {}
     user_id = self.get_argument("user_id", None)
     if not user_id:
         err["message"] = "Please append query parameter /?user_id=<int>"
         err["user_id"] = user_id 
         self.write(utils.json_encode(err))
     else:
         pipeline = precompute.EventCountPipeline(user_id)
         pipeline.start()
         self.redirect(pipeline.base_path + "/status?root=" + pipeline.pipeline_id)
Exemple #19
0
    def get(self):
        """Batch delete."""

        start = timer.time()
        count = int( models.Event.all().count() );

        # check if there is something to delete
        if count > 0:
            db.delete([item for item in models.Event.all()])

        if models.Event.all().count() == 0:
            self.write( utils.json_encode({
                                          'message':'All events succesfully deleted.',
                                          'load_time': timer.time() - start
                                          }) )
        else:
            self.write( utils.json_encode({
                                          'message':'Delete failed. Try again.',
                                          'load_time': timer.time() - start
                                          }) )
Exemple #20
0
def rename_tags(self, renamed_tags):
    """
    Handles renaming tags.
    """
    user = self.get_current_user()["upn"]
    bookmarks_db = BookmarksDB(self.ws.settings["user_dir"], user)
    out_dict = {"result": "", "count": 0, "errors": [], "updates": []}
    for pair in renamed_tags:
        old_name, new_name = pair.split(",")
        bookmarks_db.rename_tag(old_name, new_name)
        out_dict["count"] += 1
    message = {"bookmarks_renamed_tags": out_dict}
    self.write_message(json_encode(message))
Exemple #21
0
def rename_tags(self, renamed_tags):
    """
    Handles renaming tags.
    """
    user = self.current_user['upn']
    bookmarks_db = BookmarksDB(self.ws.settings['user_dir'], user)
    out_dict = {'result': "", 'count': 0, 'errors': [], 'updates': []}
    for pair in renamed_tags:
        old_name, new_name = pair.split(',')
        bookmarks_db.rename_tag(old_name, new_name)
        out_dict['count'] += 1
    message = {'terminal:bookmarks_renamed_tags': out_dict}
    self.write_message(json_encode(message))
Exemple #22
0
def get_connect_string(term, tws):
    """
    Writes the connection string associated with *term* to the websocket like
    so:
        {'sshjs_reconnect': json_encode({*term*: <connection string>})}

    In ssh.js we attach an action (aka handler) to GateOne.Net.actions for
    'sshjs_reconnect' messages that attaches the connection string to
    GateOne.terminals[*term*]['sshConnectString']
    """
    session = tws.session
    session_dir = tws.settings['session_dir']
    for f in os.listdir(session_dir + '/' + session):
        if f.startswith('ssh:'):
            terminal, a_colon, connect_string = f[4:].partition(':')
            terminal = int(terminal)
            if terminal == term:
                message = {
                    'sshjs_reconnect': json_encode({term: connect_string})
                }
                tws.write_message(json_encode(message))
                return # All done
Exemple #23
0
def get_connect_string(term, tws):
    """
    Writes the connection string associated with *term* to the websocket like
    so:
        {'sshjs_reconnect': json_encode({*term*: <connection string>})}

    In ssh.js we attach an action (aka handler) to GateOne.Net.actions for
    'sshjs_reconnect' messages that attaches the connection string to
    GateOne.terminals[*term*]['sshConnectString']
    """
    session = tws.session
    session_dir = tws.settings['session_dir']
    for f in os.listdir(session_dir + '/' + session):
        if f.startswith('ssh:'):
            terminal, a_colon, connect_string = f[4:].partition(':')
            terminal = int(terminal)
            if terminal == term:
                message = {
                    'sshjs_reconnect': json_encode({term: connect_string})
                }
                tws.write_message(json_encode(message))
                return  # All done
Exemple #24
0
def run_phantomjs(url, strategy=dict(), phantomjs_config=dict()):
    driver = None
    try:
        driver = get_phantomjs_driver(strategy)
        driver.get(url)
    except Exception, e:
        if b_debug:
            print traceback.format_exc()
        if driver:
            driver.service.process.send_signal(signal.SIGTERM)
            driver.quit()
        print(cf.head + json_encode(
            {'error': 'PhantomjsException', 'message': str(e)}))
        return
 def get(self):
     arguments = self.request.arguments()
     if 'mac' in arguments:
         #GET /clients/:mac
         mac_string = self.request.get("mac")
         client = get_client_with_mac(utils.mac_string_to_int(mac_string))
         if client is None:
             client = []
         self.response.headers['Content-Type'] = 'application/json'
         self.response.out.write(utils.json_encode(utils.query_to_array(client), True))
         return
     #GET /clients/(:t1)(:limit)
     
     pass
Exemple #26
0
def updateserver(request):
    server = QateServerInfo()
    server.ip = request.POST.get('ip').encode("utf-8")
    server.name = request.POST.get('name').encode("utf-8")
    server.env = request.POST.get('env').encode("utf-8")
    server.pd = request.POST.get('pd').encode("utf-8")
    server.role = request.POST.get('role').encode("utf-8")
    server.desc = request.POST.get('desc').encode("utf-8")

    update_server_info(server)

    response = json_encode({
        "result": "ok"
    })
    return HttpResponse(response, content_type="application/json")
Exemple #27
0
def like():
    from models.users import User
    token = request.args.get("token")
    url_id = request.json.get("url")
    if not token:
        return Response(status=403)
    user = User.q.filter_by(token=token).first()
    if not user:
        return Response(status=403)

    from models.users import Article
    article = Article.q.fetch_by_id(url_id)

    user.like(article)
    return Response(json_encode({"message": "liked"}))
Exemple #28
0
    def post(self):
        """ 
        Generate randmomized events for a user.

        :Arguments:
            user_id : int
                User id
            num_of_events : int
                Number of events to generate, max of 100,000 per request
            time : str
                Time represents ``minutes``, ``days``, ``hours``, ``weeks`` in datetime.timedelta() eg. datetime.timedelta(days=7)
            delta : int
                Delta is any int value for datetime.timedelta() eg. datetime.timedelta(days=7)
        """ 
        start = timer.time()
        time = self.get_argument("time", None)
        delta = self.get_argument("delta", 0)
        num_of_events = self.get_argument("num_of_events", 0)
        user_id = self.get_argument("user_id", 0)

        time = str(time) if time in ['minutes','hours','days','weeks'] else None

        if not time:
            raise tornado.web.HTTPError(404)

        user = models.User.all().filter("id =",int(user_id)).get()

        if not user:
            raise tornado.web.HTTPError(404)


        if int(num_of_events) > MAX_NUMBER_OF_EVENTS:
            num_of_events = MAX_NUMBER_OF_EVENTS 

        now = datetime.now()

        for i in xrange(1,int(num_of_events)+1):
            r = random.randrange(1,int(delta))
            e = models.Event(user=user, 
                             name='Event'+str(r), 
                             created=now - utils.timedelta_wrapper(time, int(r)) )
            e.put()

        d = {}
        d["load_time"] = timer.time() - start 
        d["count"] = models.Event.all().count() 

        return self.write(utils.json_encode(d))
Exemple #29
0
def update_records():
    try:
        data = request.json
        duplicate_track = []
        new_orders = []
        for item in data:
            tracking_number = item['trackingNumber']
            existed = Order.query.filter(
                Order.tracking_number ==
                tracking_number).first()  # type: Order

            if existed:
                duplicate_track.append(existed.tracking_number)
            else:
                new_orders.append(item)

        if duplicate_track:
            return json_encode({
                'code': 400,
                'message': 'Invalid data provided',
                'data': duplicate_track
            })

        for item in new_orders:
            order = Order()
            order.tracking_number = item['trackingNumber']
            order.customer_name = item['customer']
            order.size = item['orderSize']
            order.quantity = item['quantity']

            if item['orderDate']:
                order.order_date = datetime.strptime(item['orderDate'],
                                                     '%d/%m/%y')
            order.shop = item['shop']

            db.session.add(order)
            db.session.flush()

        return jsonify({
            'code': 200,
            'message': 'Insert successfully',
            'data': None
        })
    except Exception as e:
        _logger.error(e)
        db.session.rollback()
        raise
Exemple #30
0
def get_bookmarks(self, updateSequenceNum):
    """
    Returns a JSON-encoded list of bookmarks updated since the last
    *updateSequenceNum*.

    If *updateSequenceNum* resolves to False, all bookmarks will be sent to
    the client.
    """
    user = self.current_user['upn']
    bookmarks_db = BookmarksDB(self.settings['user_dir'], user)
    if updateSequenceNum:
        updateSequenceNum = int(updateSequenceNum)
    else: # This will force a full download
        updateSequenceNum = 0
    updated_bookmarks = bookmarks_db.get_bookmarks(updateSequenceNum)
    message = {'terminal:bookmarks_updated': updated_bookmarks}
    self.write_message(json_encode(message))
Exemple #31
0
def likes():
    from models.users import User
    token = request.args.get("token")
    offset = request.args.get("offset", 0)

    if not token:
        return Response(status=403)
    user = User.q.filter_by(token=token).first()
    if not user:
        return Response(status=403)

    from models.articles import ArticleLike
    article_likes = ArticleLike.q.filter({"user": user.id}).\
        sort([("_id", -1)]).skip(offset).all()

    bundle = [{"url": i.url, "title": i.title} for i in article_likes]
    return Response(json_encode({"articles": bundle}))
Exemple #32
0
def get_bookmarks(updateSequenceNum, tws):
    """
    Returns a JSON-encoded list of bookmarks updated since the last
    *updateSequenceNum*.

    If "updateSequenceNum" resolves to False, all bookmarks will be sent to
    the client.
    """
    user = tws.get_current_user()['upn']
    bookmarks_db = BookmarksDB(tws.settings['user_dir'], user)
    if updateSequenceNum:
        updateSequenceNum = int(updateSequenceNum)
    else: # This will force a full download
        updateSequenceNum = 0
    updated_bookmarks = bookmarks_db.get_bookmarks(updateSequenceNum)
    message = {'bookmarks_updated': updated_bookmarks}
    tws.write_message(json_encode(message))
Exemple #33
0
def getpageserverinfos(request):
    env = request.POST.get('env').encode("utf-8")
    pd = request.POST.get('pd').encode("utf-8")
    key = request.POST.get('key').encode("utf-8")
    pageNum = int(request.POST.get('page')) - 1
    pageSize = int(request.POST.get('rows'))

    total, page_server_infos, cpu_total, mem_total, disk_total = get_page_server_infos_from_db(env, pd, key, pageNum, pageSize)
    response = json_encode({
        "total": total,
        "rows": page_server_infos,
        "cpu_total": cpu_total,
        "mem_total": mem_total,
        "disk_total": disk_total
    })

    return HttpResponse(response, content_type="application/json")
Exemple #34
0
 def execute_query_and_send_results(self, query_string, query_options):
     try:
         index = search.Index(name=MAP_INDEX)
         query = search.Query(query_string=query_string,
                              options=query_options)
         results = index.search(query)
         #construct an array of dictionaries for JSON encoding
         output = []
         for scored_document in results:
             # handle scored_document
             output.append(utils.document_to_dict(scored_document))
         #write the response
         self.response.headers['Content-Type'] = 'application/json'
         self.response.out.write(utils.json_encode(output))
     except search.Error:
         logging.exception('Search failed')
         self.response.status = '500 - Search failed'
Exemple #35
0
 def post(self):
     """
     Example Handler for an HTTP PUT request.  Doesn't actually do anything.
     """
     # If data is POSTed to this handler via an XMLHTTPRequest send() it
     # will show up like this:
     posted_as_a_whole = self.request.body # xhr.send()
     # If data was POSTed as arguments (i.e. traditional form) it will show
     # up as individual arguments like this:
     posted_as_argument = self.get_argument("arg") # Form elem 'name="arg"'
     # This is how you can parse JSON:
     parsed = tornado.escape.json_decode(posted_as_an_argument)
     # For writing JSON it is recommended to use the json_encode() function
     # from Gate One's utils.py (since it takes care of Python 3 support):
     from utils import json_encode
     json_output = {'result': 'Success!'}
     self.write(json_encode(json_output))
Exemple #36
0
def get_bookmarks(self, updateSequenceNum):
    """
    Returns a JSON-encoded list of bookmarks updated since the last
    *updateSequenceNum*.

    If *updateSequenceNum* resolves to False, all bookmarks will be sent to
    the client.
    """
    user = self.get_current_user()["upn"]
    bookmarks_db = BookmarksDB(self.settings["user_dir"], user)
    if updateSequenceNum:
        updateSequenceNum = int(updateSequenceNum)
    else:  # This will force a full download
        updateSequenceNum = 0
    updated_bookmarks = bookmarks_db.get_bookmarks(updateSequenceNum)
    message = {"bookmarks_updated": updated_bookmarks}
    self.write_message(json_encode(message))
Exemple #37
0
def main():
    global b_debug
    try:
        if len(sys.argv) >= 3:
            b_debug = True
        if b_debug:
            print json.dumps(
                json_decode(sys.argv[1]), indent=4, ensure_ascii=False)
        params_json = json_decode(sys.argv[1])
        url = params_json['url']
        strategy = params_json['strategy']
        phantomjs_config = params_json['phantomjs_config']
        run_phantomjs(url, strategy, phantomjs_config)
    except:
        if b_debug:
            print traceback.format_exc()
        print(cf.head + json_encode(
            {'error': 'ParmasParse', 'message': traceback.format_exc()}))
Exemple #38
0
 def post(self):
     """
     Example Handler for an `HTTP POST <http://en.wikipedia.org/wiki/POST_(HTTP)>`_
     request.  Doesn't actually do anything.
     """
     # If data is POSTed to this handler via an XMLHTTPRequest send() it
     # will show up like this:
     posted_as_a_whole = self.request.body  # xhr.send()
     # If data was POSTed as arguments (i.e. traditional form) it will show
     # up as individual arguments like this:
     posted_as_argument = self.get_argument("arg")  # Form elem 'name="arg"'
     # This is how you can parse JSON:
     parsed = tornado.escape.json_decode(posted_as_an_argument)
     # For writing JSON it is recommended to use the json_encode() function
     # from Gate One's utils.py (since it takes care of Python 3 support):
     from utils import json_encode
     json_output = {'result': 'Success!'}
     self.write(json_encode(json_output))
Exemple #39
0
def rename_tags(renamed_tags, tws):
    """
    Handles renaming tags.
    """
    user = tws.get_current_user()['upn']
    bookmarks_db = BookmarksDB(tws.settings['user_dir'], user)
    out_dict = {
        'result': "",
        'count': 0,
        'errors': [],
        'updates': []
    }
    for pair in renamed_tags:
        old_name, new_name = pair.split(',')
        bookmarks_db.rename_tag(old_name, new_name)
        out_dict['count'] += 1
    message = {'bookmarks_renamed_tags': out_dict}
    tws.write_message(json_encode(message))
Exemple #40
0
    def get(self, user_id):
        """
        Return the number of events of a user.

        :Arguments:
            user_id : int
                User id
            time : str
                Time represents ``minutes``, ``days``, ``hours``, ``weeks`` in datetime.timedelta() eg. datetime.timedelta(days=7)
            delta : int
                Delta is any int value for datetime.timedelta() eg. datetime.timedelta(days=7)

        If no valid time query argument is provided, it will show the user events count. 
        """

        start = timer.time()
        user = models.User.all().filter("id =",int(user_id)).get()

        if not user:
            raise tornado.web.HTTPError(404)

        time = self.get_argument("time", None)
        delta = self.get_argument("delta", 0)

        # get last x hours, days, weeks, months
        last_x_time = datetime.now() - utils.timedelta_wrapper(time, int(delta))

        # get events from the last x time
        events_from_last_x_time = filter(lambda x: x.created >= last_x_time, [event for event in user.user_events] )

        data = {}

        if not time:
            # show all events for user
            data["description"] = "Number of events for User %s" % (str(user_id))
            data["load_time"] = timer.time() - start
            data["events"] = user.user_events.count()
        else:
            data["description"] = "Number of events for User %s for the last %s %s" % (str(user_id), str(delta), str(time))
            data["load_time"] = timer.time() - start
            data["grouping"] = utils.filter_by(time, events_from_last_x_time, last_x_time)

        self.write(utils.json_encode(data))
Exemple #41
0
def delete_bookmarks(self, deleted_bookmarks):
    """
    Handles deleting bookmars given a *deleted_bookmarks* list.
    """
    user = self.get_current_user()["upn"]
    bookmarks_db = BookmarksDB(self.ws.settings["user_dir"], user)
    out_dict = {"result": "", "count": 0, "errors": []}
    try:
        for bookmark in deleted_bookmarks:
            out_dict["count"] += 1
            bookmarks_db.delete_bookmark(bookmark)
        out_dict["result"] = "Success"
    except Exception as e:  # TODO: Make this more specific
        logging.error("delete_bookmarks error: %s" % e)
        import traceback

        traceback.print_exc(file=sys.stdout)
        out_dict["result"] = "Errors"
        out_dict["errors"].append(str(e))
    message = {"bookmarks_delete_result": out_dict}
    self.write_message(json_encode(message))
Exemple #42
0
def save_term_settings(term, location, session, settings):
    """
    Saves the *settings* associated with the given *term*, *location*, and
    *session* in the 'term_settings.json' file inside the user's session
    directory.

    When complete the given *callback* will be called (if given).
    """
    term = str(term) # JSON wants strings as keys
    term_settings = RUDict()
    term_settings[location] = {term: settings}
    session_dir = options.session_dir
    session_dir = os.path.join(session_dir, session)
    settings_path = os.path.join(session_dir, 'term_settings.json')
    # First we read in the existing settings and then update them.
    if os.path.exists(settings_path):
        with io.open(settings_path, encoding='utf-8') as f:
            term_settings.update(json_decode(f.read()))
        term_settings[location][term].update(settings)
    with io.open(settings_path, 'w', encoding='utf-8') as f:
        f.write(json_encode(term_settings))
Exemple #43
0
def save_term_settings(term, location, session, settings):
    """
    Saves the *settings* associated with the given *term*, *location*, and
    *session* in the 'term_settings.json' file inside the user's session
    directory.

    When complete the given *callback* will be called (if given).
    """
    term = str(term)  # JSON wants strings as keys
    term_settings = RUDict()
    term_settings[location] = {term: settings}
    session_dir = options.session_dir
    session_dir = os.path.join(session_dir, session)
    settings_path = os.path.join(session_dir, 'term_settings.json')
    # First we read in the existing settings and then update them.
    if os.path.exists(settings_path):
        with io.open(settings_path, encoding='utf-8') as f:
            term_settings.update(json_decode(f.read()))
        term_settings[location][term].update(settings)
    with io.open(settings_path, 'w', encoding='utf-8') as f:
        f.write(json_encode(term_settings))
Exemple #44
0
def authenticate():
    from models.users import User
    email = None
    password = None
    if request.json:
        email = request.json.get("email")
        password = request.json.get("password")
    if not (email and password):
        token = "tok-%s" % uuid.uuid4().hex
        user = User(**{})
        user.save()
        user.set_token(token)
    else:
        user = User.q.filter_by(email=email).first()
        if not user:
            return Response(status=403)
        check = user.check_password(password)
        if not check:
            return Response(status=403)
        token = user.token
    return Response(json_encode({"token": token,
                                 "email": email}))
Exemple #45
0
def save_bookmarks(self, bookmarks):
    """
    Handles saving *bookmarks* for clients.
    """
    out_dict = {"updates": [], "count": 0, "errors": []}
    try:
        user = self.get_current_user()["upn"]
        bookmarks_db = BookmarksDB(self.ws.settings["user_dir"], user)
        updates = bookmarks_db.sync_bookmarks(bookmarks)
        out_dict.update({"updates": updates, "count": len(bookmarks)})
        out_dict["updateSequenceNum"] = bookmarks_db.get_highest_USN()
    except Exception as e:
        import traceback

        logging.error("Got exception synchronizing bookmarks: %s" % e)
        traceback.print_exc(file=sys.stdout)
        out_dict["errors"].append(str(e))
    if out_dict["errors"]:
        out_dict["result"] = "Upload completed but errors were encountered."
    else:
        out_dict["result"] = "Upload successful"
    message = {"bookmarks_save_result": out_dict}
    self.write_message(json_encode(message))
Exemple #46
0
def get_connect_string(term, tws):
    """
    Writes the connection string associated with *term* to the websocket like
    so:
        {'sshjs_reconnect': {*term*: <connection string>}}

    In ssh.js we attach an action (aka handler) to GateOne.Net.actions for
    'sshjs_reconnect' messages that attaches the connection string to
    GateOne.terminals[*term*]['sshConnectString']
    """
    logging.debug("get_connect_string() term: %s" % term)
    session = tws.session
    session_dir = tws.settings['session_dir']
    for f in os.listdir(os.path.join(session_dir, session)):
        if f.startswith('ssh:'):
            terminal, a_colon, connect_string = f[4:].partition(':')
            terminal = int(terminal)
            if terminal == term:
                # TODO: Make it so we don't have to use json_encode below...
                message = {
                    'sshjs_reconnect': json_encode({term: connect_string})
                }
                tws.write_message(message)
                return # All done
Exemple #47
0
def delete_bookmarks(deleted_bookmarks, tws):
    """
    Handles deleting bookmars given a *deleted_bookmarks* list.
    """
    user = tws.get_current_user()['upn']
    bookmarks_db = BookmarksDB(tws.settings['user_dir'], user)
    out_dict = {
        'result': "",
        'count': 0,
        'errors': [],
    }
    try:
        for bookmark in deleted_bookmarks:
            out_dict['count'] += 1
            bookmarks_db.delete_bookmark(bookmark)
        out_dict['result'] = "Success"
    except Exception as e: # TODO: Make this more specific
        logging.error("delete_bookmarks error: %s" % e)
        import traceback
        traceback.print_exc(file=sys.stdout)
        out_dict['result'] = "Errors"
        out_dict['errors'].append(str(e))
    message = {'bookmarks_delete_result': out_dict}
    tws.write_message(json_encode(message))
Exemple #48
0
def delete_bookmarks(self, deleted_bookmarks):
    """
    Handles deleting bookmarks given a *deleted_bookmarks* list.
    """
    user = self.current_user['upn']
    bookmarks_db = BookmarksDB(self.ws.settings['user_dir'], user)
    out_dict = {
        'result': "",
        'count': 0,
        'errors': [],
    }
    try:
        for bookmark in deleted_bookmarks:
            out_dict['count'] += 1
            bookmarks_db.delete_bookmark(bookmark)
        out_dict['result'] = "Success"
    except Exception as e: # TODO: Make this more specific
        logging.error("delete_bookmarks error: %s" % e)
        import traceback
        traceback.print_exc(file=sys.stdout)
        out_dict['result'] = "Errors"
        out_dict['errors'].append(str(e))
    message = {'terminal:bookmarks_delete_result': out_dict}
    self.write_message(json_encode(message))
Exemple #49
0
def _retrieve_log_playback(queue, settings):
    """
    Writes a JSON-encoded message to the client containing the log in a
    self-contained HTML format similar to::

        ./logviewer.py log_filename

    *settings* - A dict containing the *log_filename*, *colors*, and *theme* to
    use when generating the HTML output.
    *settings['log_filename']* - The name of the log to display.
    *settings['colors']* - The CSS color scheme to use when generating output.
    *settings['theme']* - The CSS theme to use when generating output.
    *settings['where']* - Whether or not the result should go into a new window or an iframe.

    The output will look like this::

        {
            'result': "Success",
            'log': <HTML rendered output>,
            'metadata': {<metadata of the log>}
        }
    It is expected that the client will create a new window with the result of
    this method.
    """
    #print("Running retrieve_log_playback(%s)" % settings);
    if 'where' not in settings:  # Avoids a KeyError if it is missing
        settings['where'] = None
    out_dict = {
        'result': "",
        'html': "",  # Will be replace with the rendered template
        'metadata': {},
        'where': settings['where']  # Just gets passed as-is back to the client
    }
    # Local variables
    gateone_dir = settings['gateone_dir']
    user = settings['user']
    users_dir = settings['users_dir']
    container = settings['container']
    prefix = settings['prefix']
    url_prefix = settings['url_prefix']
    log_filename = settings['log_filename']
    theme = "%s.css" % settings['theme']
    colors = "%s.css" % settings['colors']
    # Important paths
    # NOTE: Using os.path.join() in case Gate One can actually run on Windows
    # some day.
    logs_dir = os.path.join(users_dir, "logs")
    log_path = os.path.join(logs_dir, log_filename)
    templates_path = os.path.join(gateone_dir, 'templates')
    colors_path = os.path.join(templates_path, 'term_colors')
    themes_path = os.path.join(templates_path, 'themes')
    plugins_path = os.path.join(gateone_dir, 'plugins')
    logging_plugin_path = os.path.join(plugins_path, 'logging')
    template_path = os.path.join(logging_plugin_path, 'templates')
    # recording format:
    # {"screen": [log lines], "time":"2011-12-20T18:00:01.033Z"}
    # Actual method logic
    if os.path.exists(log_path):
        # First we setup the basics
        out_dict['metadata'] = get_or_update_metadata(log_path, user)
        out_dict['metadata']['filename'] = log_filename
        try:
            rows = out_dict['metadata']['rows']
            cols = out_dict['metadata']['cols']
        except KeyError:
            # Log was created before rows/cols metadata was included via termio.py
            # Use some large values to ensure nothing wraps and hope for the best:
            rows = 40
            cols = 500
        out_dict['result'] = "Success"  # TODO: Add more error checking
        # Next we render the theme and color templates so we can pass them to
        # our final template
        with open(os.path.join(colors_path, colors)) as f:
            colors_file = f.read()
        colors_template = tornado.template.Template(colors_file)
        rendered_colors = colors_template.generate(container=container,
                                                   prefix=prefix,
                                                   url_prefix=url_prefix)
        with open(os.path.join(themes_path, theme)) as f:
            theme_file = f.read()
        theme_template = tornado.template.Template(theme_file)
        # Setup our 256-color support CSS:
        colors_256 = ""
        from gateone import COLORS_256
        for i in xrange(256):
            fg = "#%s span.fx%s {color: #%s;}" % (container, i, COLORS_256[i])
            bg = "#%s span.bx%s {background-color: #%s;} " % (container, i,
                                                              COLORS_256[i])
            fg_rev = "#%s span.reverse.fx%s {background-color: #%s; color: inherit;}" % (
                container, i, COLORS_256[i])
            bg_rev = "#%s span.reverse.bx%s {color: #%s; background-color: inherit;} " % (
                container, i, COLORS_256[i])
            colors_256 += "%s %s %s %s\n" % (fg, bg, fg_rev, bg_rev)
        colors_256 += "\n"
        rendered_theme = theme_template.generate(container=container,
                                                 prefix=prefix,
                                                 colors_256=colors_256,
                                                 url_prefix=url_prefix)
        # NOTE: 'colors' are customizable but colors_256 is universal.  That's
        # why they're separate.
        # Lastly we render the actual HTML template file
        # NOTE: Using Loader() directly here because I was getting strange EOF
        # errors trying to do it the other way :)
        loader = tornado.template.Loader(template_path)
        playback_template = loader.load('playback_log.html')
        preview = 'false'
        if settings['where']:
            preview = 'true'
            recording = retrieve_log_frames(log_path, rows, cols, limit=50)
        else:
            recording = retrieve_log_frames(log_path, rows, cols)
        playback_html = playback_template.generate(
            prefix=prefix,
            container=container,
            theme=rendered_theme,
            colors=rendered_colors,
            preview=preview,
            recording=json_encode(recording),
            url_prefix=url_prefix)
        out_dict['html'] = playback_html
    else:
        out_dict['result'] = _("ERROR: Log not found")
    message = {'logging_log_playback': out_dict}
    queue.put(message)
 def get(self):
     arguments = self.request.arguments()
     try:
         limit = int(self.request.get('limit', DEFAULT_LIMIT))
     except ValueError:
         utils.send_400(self.response, "Limit must be an integer")
         return
     if 't1' in arguments and 't2' in arguments:
         try:
             t1 = int(self.request.get('t1'))
             t2 = int(self.request.get('t2'))
         except ValueError:
             utils.send_400(self.response, "t1 and t2 should be integers representing unix epoch in seconds")
             return
         [start_date, end_date] = utils.parse_timestamps(t1, t2)
         query = model.Scan.all() \
             .filter("timestamp >= ", start_date) \
             .filter("timestamp <=", end_date) \
             .order("-timestamp")
         matched_scans = query.fetch(limit)
         self.response.headers['Content-Type'] = 'application/json'
         self.response.out.write(utils.json_encode(utils.query_to_array(matched_scans), True))
         return
     if 'client_mac' in arguments:
         mac_string = self.request.get("client_mac")
         client = clients.get_client_with_mac(utils.mac_string_to_int(mac_string))
         matched_scans = []
         if client is not None:
             query = model.Scan.all() \
                 .filter("client = ", client.key()) \
                 .order("-timestamp")
             matched_scans = query.fetch(limit)
         self.response.headers['Content-Type'] = 'application/json'
         self.response.out.write(utils.json_encode(utils.query_to_array(matched_scans), True))
         return
     if 'base_station_mac' in arguments:
         base_station_mac = self.request.get("base_station_mac")
         base_station = base_stations.get_base_station_with_mac(utils.mac_string_to_int(base_station_mac))
         matched_scans = []
         if base_station is not None:
             query = model.Scan.all() \
                 .filter("base_station = ", base_station.key()) \
                 .order("-timestamp")
             matched_scans = query.fetch(limit)
         self.response.headers['Content-Type'] = 'application/json'
         self.response.out.write(utils.json_encode(utils.query_to_array(matched_scans), True))
         return
     if 'map_id' in arguments:
         map_id = self.request.get("map_id")  #TODO check if map exists
         if 'x' in arguments and 'y' in arguments and 'radius' in arguments:
             try:
                 x = int(self.request.get('x'))
                 y = int(self.request.get('y'))
                 radius = float(self.request.get('radius'))
             except ValueError:
                 utils.send_400(self.response, "x and y must be integers, radius must be float in meters")
                 return
             scale = maps.get_scale(map_id)
             delta = scale * radius
             #TODO FOR NOW THE X/Y DISTANCE FILTERING IS VERY INEFFICIENT!!!
             query = model.Scan.all() \
                 .filter("map_id = ", map_id) \
                 .filter("x <= ", x + delta) \
                 .filter('x >= ', x - delta)
             matched_scans = query.fetch(limit)
             matched_scans[:] = [scan for scan in matched_scans
                                 if utils.point_in_circle(scan.location.x, scan.location.y, x, y, delta)]
             self.response.headers['Content-Type'] = 'application/json'
             self.response.out.write(utils.json_encode(utils.query_to_array(matched_scans), True))
             return
Exemple #51
0
def _save_log_playback(queue, settings):
    """
    Writes a JSON-encoded message to the client containing the log in a
    self-contained HTML format similar to::

        ./logviewer.py log_filename

    The difference between this function and :py:meth:`_retrieve_log_playback`
    is that this one instructs the client to save the file to disk instead of
    opening it in a new window.

    :arg settings['log_filename']: The name of the log to display.
    :arg settings['colors']: The CSS color scheme to use when generating output.
    :arg settings['theme']: The CSS theme to use when generating output.
    :arg settings['where']: Whether or not the result should go into a new window or an iframe.

    The output will look like this::

        {
            'result': "Success",
            'data': <HTML rendered output>,
            'mimetype': 'text/html'
            'filename': <filename of the log recording>
        }

    It is expected that the client will create a new window with the result of
    this method.
    """
    #print("Running retrieve_log_playback(%s)" % settings);
    out_dict = {
        'result': "Success",
        'mimetype': 'text/html',
        'data': "",  # Will be replace with the rendered template
    }
    # Local variables
    gateone_dir = settings['gateone_dir']
    user = settings['user']
    users_dir = settings['users_dir']
    container = settings['container']
    prefix = settings['prefix']
    url_prefix = settings['url_prefix']
    log_filename = settings['log_filename']
    short_logname = log_filename.split('.golog')[0]
    out_dict['filename'] = "%s.html" % short_logname
    theme = "%s.css" % settings['theme']
    colors = "%s.css" % settings['colors']
    # Important paths
    # NOTE: Using os.path.join() in case Gate One can actually run on Windows
    # some day.
    logs_dir = os.path.join(users_dir, "logs")
    log_path = os.path.join(logs_dir, log_filename)
    templates_path = os.path.join(gateone_dir, 'templates')
    colors_path = os.path.join(templates_path, 'term_colors')
    themes_path = os.path.join(templates_path, 'themes')
    plugins_path = os.path.join(gateone_dir, 'plugins')
    logging_plugin_path = os.path.join(plugins_path, 'logging')
    template_path = os.path.join(logging_plugin_path, 'templates')
    # recording format:
    # {"screen": [log lines], "time":"2011-12-20T18:00:01.033Z"}
    # Actual method logic
    if os.path.exists(log_path):
        # Next we render the theme and color templates so we can pass them to
        # our final template
        out_dict['metadata'] = get_or_update_metadata(log_path, user)
        try:
            rows = out_dict['metadata']['rows']
            cols = out_dict['metadata']['cols']
        except KeyError:
            # Log was created before rows/cols metadata was included via termio.py
            # Use some large values to ensure nothing wraps and hope for the best:
            rows = 40
            cols = 500
        with open(os.path.join(colors_path, colors)) as f:
            colors_file = f.read()
        colors_template = tornado.template.Template(colors_file)
        rendered_colors = colors_template.generate(container=container,
                                                   prefix=prefix,
                                                   url_prefix=url_prefix)
        with open(os.path.join(themes_path, theme)) as f:
            theme_file = f.read()
        theme_template = tornado.template.Template(theme_file)
        # Setup our 256-color support CSS:
        colors_256 = ""
        from gateone import COLORS_256
        for i in xrange(256):
            fg = "#%s span.fx%s {color: #%s;}" % (container, i, COLORS_256[i])
            bg = "#%s span.bx%s {background-color: #%s;} " % (container, i,
                                                              COLORS_256[i])
            colors_256 += "%s %s" % (fg, bg)
        colors_256 += "\n"
        rendered_theme = theme_template.generate(container=container,
                                                 prefix=prefix,
                                                 colors_256=colors_256,
                                                 url_prefix=url_prefix)
        # NOTE: 'colors' are customizable but colors_256 is universal.  That's
        # why they're separate.
        # Lastly we render the actual HTML template file
        # NOTE: Using Loader() directly here because I was getting strange EOF
        # errors trying to do it the other way :)
        loader = tornado.template.Loader(template_path)
        playback_template = loader.load('playback_log.html')
        recording = retrieve_log_frames(log_path, rows, cols)
        preview = 'false'
        playback_html = playback_template.generate(
            prefix=prefix,
            container=container,
            theme=rendered_theme,
            colors=rendered_colors,
            preview=preview,
            recording=json_encode(recording),
            url_prefix=url_prefix)
        out_dict['data'] = playback_html
    else:
        out_dict['result'] = _("ERROR: Log not found")
    message = {'save_file': out_dict}
    queue.put(message)
 def get(self):
     arguments = self.request.arguments()
     try:
         limit = int(self.request.get('limit', DEFAULT_LIMIT))
     except ValueError:
         utils.send_400(self.response, "Limit must be an integer")
         return
     if 't1' in arguments and 't2' in arguments:
         try:
             t1 = int(self.request.get('t1'))
             t2 = int(self.request.get('t2'))
         except ValueError:
             utils.send_400(
                 self.response,
                 "t1 and t2 should be integers representing unix epoch in seconds"
             )
             return
         [start_date, end_date] = utils.parse_timestamps(t1, t2)
         query = model.Scan.all() \
             .filter("timestamp >= ", start_date) \
             .filter("timestamp <=", end_date) \
             .order("-timestamp")
         matched_scans = query.fetch(limit)
         self.response.headers['Content-Type'] = 'application/json'
         self.response.out.write(
             utils.json_encode(utils.query_to_array(matched_scans), True))
         return
     if 'client_mac' in arguments:
         mac_string = self.request.get("client_mac")
         client = clients.get_client_with_mac(
             utils.mac_string_to_int(mac_string))
         matched_scans = []
         if client is not None:
             query = model.Scan.all() \
                 .filter("client = ", client.key()) \
                 .order("-timestamp")
             matched_scans = query.fetch(limit)
         self.response.headers['Content-Type'] = 'application/json'
         self.response.out.write(
             utils.json_encode(utils.query_to_array(matched_scans), True))
         return
     if 'base_station_mac' in arguments:
         base_station_mac = self.request.get("base_station_mac")
         base_station = base_stations.get_base_station_with_mac(
             utils.mac_string_to_int(base_station_mac))
         matched_scans = []
         if base_station is not None:
             query = model.Scan.all() \
                 .filter("base_station = ", base_station.key()) \
                 .order("-timestamp")
             matched_scans = query.fetch(limit)
         self.response.headers['Content-Type'] = 'application/json'
         self.response.out.write(
             utils.json_encode(utils.query_to_array(matched_scans), True))
         return
     if 'map_id' in arguments:
         map_id = self.request.get("map_id")  #TODO check if map exists
         if 'x' in arguments and 'y' in arguments and 'radius' in arguments:
             try:
                 x = int(self.request.get('x'))
                 y = int(self.request.get('y'))
                 radius = float(self.request.get('radius'))
             except ValueError:
                 utils.send_400(
                     self.response,
                     "x and y must be integers, radius must be float in meters"
                 )
                 return
             scale = maps.get_scale(map_id)
             delta = scale * radius
             #TODO FOR NOW THE X/Y DISTANCE FILTERING IS VERY INEFFICIENT!!!
             query = model.Scan.all() \
                 .filter("map_id = ", map_id) \
                 .filter("x <= ", x + delta) \
                 .filter('x >= ', x - delta)
             matched_scans = query.fetch(limit)
             matched_scans[:] = [
                 scan for scan in matched_scans if utils.point_in_circle(
                     scan.location.x, scan.location.y, x, y, delta)
             ]
             self.response.headers['Content-Type'] = 'application/json'
             self.response.out.write(
                 utils.json_encode(utils.query_to_array(matched_scans),
                                   True))
             return
Exemple #53
0
    def list_current(self, request):
        """
        列出本期竞猜的赛程信息
        :param request:
        :return:
        """
        # 开盘时间是当日10:00,用当前时间与今天10:00做比较,限定当今比赛日的开始结束时间
        now_date = datetime.now()
        today_time = now_date.strftime("%Y-%m-%d 00:00:00")
        today_date = datetime.strptime(today_time, "%Y-%m-%d 00:00:00")
        today_10date = today_date + timedelta(hours=10)
        yesterday_10date = today_10date - timedelta(days=1)
        tomorrow_10date = today_10date + timedelta(days=1)
        if now_date >= today_10date:
            start_time = today_10date.strftime("%Y-%m-%d 10:00:00")
            end_time = tomorrow_10date.strftime("%Y-%m-%d 10:00:00")
        else:
            start_time = yesterday_10date.strftime("%Y-%m-%d 10:00:00")
            end_time = today_10date.strftime("%Y-%m-%d 10:00:00")
        logger.info('now_time="%s" -> today start_time="%s", end_time="%s"' %
                    (dateTimeToStr(now_date), start_time, end_time))
        schedules = wcup_models.Schedule.objects.select_related(
            'country_a', 'country_b').filter(
                start_time__gte=start_time,
                start_time__lte=end_time).order_by('start_time')
        schedule_list = list()
        for schedule in schedules:
            res = {
                "schedule_id": schedule.pk,
                "country_a": schedule.country_a.name,
                "country_a_cn": schedule.country_a.name_zh_cn,
                "country_b": schedule.country_b.name,
                "country_b_cn": schedule.country_b.name_zh_cn,
                "start_time": dateTimeToStr(schedule.start_time),
                "guess_end_time": dateTimeToStr(schedule.guess_end_time),
                "type": schedule.type_cn,
                "pay_for": 0.0,
            }

            join_record = wcup_models.GuessRecord.objects.select_related(
                'condition').filter(schedule_id=schedule.pk,
                                    user_id=request.user.pk).first()
            if join_record:
                # 已经参与此场竞猜
                res.update({
                    "flag": 'joined',
                    "pay_for": ut.json_encode(join_record.pay_for),
                    "support_country_cn":
                    join_record.support_country.name_zh_cn,
                    "support_odds": join_record.support_odds,
                    "odds_a": join_record.condition.odds_a,
                    "odds_b": join_record.condition.odds_b,
                    "handicap_num": join_record.condition.handicap_num,
                    "handicap_disc": join_record.condition.handicap_disc,
                    "condition_id": join_record.condition.pk,
                })
            else:
                # 没有参与竞猜
                last_condition = wcup_models.GuessCondition.objects.filter(
                    schedule_id=schedule.pk,
                    is_valid=1).order_by('-create_time').first()
                res.update({
                    "odds_a": last_condition.odds_a,
                    "odds_b": last_condition.odds_b,
                    "handicap_num": last_condition.handicap_num,
                    "handicap_disc": last_condition.handicap_disc,
                    "condition_id": last_condition.pk,
                })
                if now_date > schedule.guess_end_time:
                    # 时间介于结束竞猜时间~比赛开始时间,不能再竞猜
                    res.update({"flag": "cannot_join"})
                else:
                    # 还没有到竞猜截止时间,可以竞猜
                    res.update({"flag": "unjoined"})

            schedule_list.append(res)

        return CommonReturn(CodeMsg.SUCCESS, '成功获取本期赛程信息',
                            {"schedules": schedule_list})
def _save_log_playback(queue, settings):
    """
    Writes a JSON-encoded message to the client containing the log in a
    self-contained HTML format similar to::

        ./logviewer.py log_filename

    The difference between this function and :py:meth:`_retrieve_log_playback`
    is that this one instructs the client to save the file to disk instead of
    opening it in a new window.

    :arg settings['log_filename']: The name of the log to display.
    :arg settings['colors']: The CSS color scheme to use when generating output.
    :arg settings['theme']: The CSS theme to use when generating output.
    :arg settings['where']: Whether or not the result should go into a new window or an iframe.

    The output will look like this::

        {
            'result': "Success",
            'data': <HTML rendered output>,
            'mimetype': 'text/html'
            'filename': <filename of the log recording>
        }

    It is expected that the client will create a new window with the result of
    this method.
    """
    #print("Running retrieve_log_playback(%s)" % settings);
    out_dict = {
        'result': "Success",
        'mimetype': 'text/html',
        'data': "",  # Will be replace with the rendered template
    }
    # Local variables
    user = settings['user']
    users_dir = settings['users_dir']
    container = settings['container']
    prefix = settings['prefix']
    log_filename = settings['log_filename']
    short_logname = log_filename.split('.golog')[0]
    out_dict['filename'] = "%s.html" % short_logname
    # Important paths
    logs_dir = os.path.join(users_dir, "logs")
    log_path = os.path.join(logs_dir, log_filename)
    #templates_path = os.path.join(gateone_dir, 'templates')
    #colors_path = os.path.join(templates_path, 'term_colors')
    #themes_path = os.path.join(templates_path, 'themes')
    template_path = os.path.join(PLUGIN_PATH, 'templates')
    # recording format:
    # {"screen": [log lines], "time":"2011-12-20T18:00:01.033Z"}
    # Actual method logic
    if os.path.exists(log_path):
        # Next we render the theme and color templates so we can pass them to
        # our final template
        out_dict['metadata'] = get_or_update_metadata(log_path, user)
        try:
            rows = out_dict['metadata']['rows']
            cols = out_dict['metadata']['columns']
        except KeyError:
            # Log was created before rows/cols metadata was included via termio.py
            # Use some large values to ensure nothing wraps and hope for the best:
            rows = 40
            cols = 500
        # NOTE: 'colors' are customizable but colors_256 is universal.  That's
        # why they're separate.
        # Lastly we render the actual HTML template file
        # NOTE: Using Loader() directly here because I was getting strange EOF
        # errors trying to do it the other way :)
        loader = tornado.template.Loader(template_path)
        playback_template = loader.load('playback_log.html')
        recording = render_log_frames(log_path, rows, cols)
        preview = 'false'
        playback_html = playback_template.generate(
            prefix=prefix,
            container=container,
            theme=settings['theme_css'],
            colors=settings['colors_css'],
            colors_256=settings['256_colors'],
            preview=preview,
            recording=json_encode(recording),
        )
        out_dict['data'] = playback_html
    else:
        out_dict['result'] = _("ERROR: Log not found")
    message = {'go:save_file': out_dict}
    queue.put(message)
Exemple #55
0
 def get(self):
     try:
         limit = int(self.request.get('limit', DEFAULT_LIMIT))
     except ValueError:
         send_400(self.response, "Limit must be an int")
         return
     arguments = self.request.arguments()
     if 't1' in arguments and 't2' in arguments:
         #GET /locations/:t1:t2
         t1 = self.request.get('t1', None)
         t2 = self.request.get('t2', None)
         [start_date, end_date] = parse_timestamps(t1, t2)
         if start_date is None and end_date is None:
             send_400(self.response, "Timestamp format issue, t1 and t2 must be UNIX times in seconds")
             return
         query = model.Location.all() \
             .filter("timestamp >= ", start_date) \
             .filter("timestamp <= ", end_date) \
             .order('-timestamp')
         matched_locations = query.fetch(limit)
         if 'mac' in arguments:
             #GET /locations/:mac:t1:t2
             mac_address = self.request.get('mac')
             #get the clients with the mac_address
             client = model.Client.all().filter("mac = ", mac_address).fetch(1)
             #get the client's scans
             matching_scans = client.scans.order(-model.Scan.timestamp).fetch(limit)
             map_filter = 'map_id' in arguments
             map_id = self.request.get('map_id')
             matched_locations = []
             for scan in matching_scans:
                 if map_filter and scan.location.map_id == map_id:
                     matched_locations.append(scan.location)
                 elif not map_filter:
                     matched_locations.append(scan.location)
         self.response.headers['Content-Type'] = 'application/json'
         self.response.out.write(utils.json_encode(utils.query_to_array(matched_locations), True))
         return
     elif 'map_id' in arguments:
         #GET /locations/:map_id
         map_id = self.request.get('map_id')  #TODO check if map exists
         query = model.Location.all().filter("map_id = ", map_id)
         needs_y_filtering = False
         if 'x' in arguments and 'y' in arguments:
             #GET /locations/:map_id:x:y:radius
             try:
                 x = int(self.request.get('x'))
                 y = int(self.request.get('y'))
                 radius = float(self.request.get('radius', 100))
             except ValueError:
                 send_400(self.response, "x and y must be integers, radius must be float in meters")
                 return
             scale = maps.get_scale(map_id)
             delta = scale * radius
             #TODO FOR NOW THE X/Y DISTANCE FILTERING IS VERY INEFFICIENT!!!
             query.filter("x <= ", x + delta).filter('x >= ', x - delta)
             needs_y_filtering = True
         locations = query.fetch(limit)
         if needs_y_filtering:
             locations[:] = [location for location in locations
                             if utils.point_in_circle(location.x, location.y, x, y, delta)]
         self.response.headers['Content-Type'] = 'application/json'
         self.response.out.write(utils.json_encode(utils.query_to_array(locations), True))
         return
     elif 'mac' in arguments:
         #GET /locations/:mac(:limit)
         try:
             mac_address = int(self.request.get('mac'))
         except ValueError:
             send_400(self.response, "mac_address must be formatted as an int")
             return
         #get the clients with the mac_address
         client = model.Client.all().filter("mac = ", mac_address).fetch(1)
         if len(client) == 0:
             matching_scans = []
         else:
             #get the client's scans
             matching_scans = client[0].scans.order("-timestamp").fetch(limit)
         matched_locations = []
         for scan in matching_scans:
             matched_locations.append(scan.location)
         self.response.headers['Content-Type'] = 'application/json'
         self.response.out.write(utils.json_encode(utils.query_to_array(matched_locations), True))
         return
     #nothing matched
     send_400(self.response, "Argument combination not supported or not enough arguments")
Exemple #56
0
            driver.service.process.send_signal(signal.SIGTERM)
            driver.quit()
        print(cf.head + json_encode(
            {'error': 'PhantomjsException', 'message': str(e)}))
        return
    current_url = copy.copy(driver.current_url)
    page_source = copy.copy(driver.page_source)
    # logger.info('phantomjs go {}'.format(current_url))
    driver.service.process.send_signal(signal.SIGTERM)
    driver.quit()
    save_tmp_web_html('web.html', page_source)
    if (u'403 - 禁止访问: 访问被拒绝' in page_source or
            'ERROR: The requested URL could not be retrieved' in
            page_source) and strategy.get('proxy'):
        print(cf.head + json_encode(
            {'error': 'ProxyForbiddenException', 'current_url': current_url,
             'page_source': page_source}))
        return
    print(cf.head + json_encode(
        {'current_url': current_url, 'page_source': page_source}))


def main():
    global b_debug
    try:
        if len(sys.argv) >= 3:
            b_debug = True
        if b_debug:
            print json.dumps(
                json_decode(sys.argv[1]), indent=4, ensure_ascii=False)
        params_json = json_decode(sys.argv[1])
Exemple #57
0
def get_or_update_metadata(golog_path, user, force_update=False):
    """
    Retrieves or creates/updates the metadata inside of *golog_path*.

    If *force_update* the metadata inside the golog will be updated even if it
    already exists.

    .. note::  All logs will need "fixing" the first time they're enumerated like this since they won't have an end_date.  Fortunately we only need to do this once per golog.
    """
    #logging.debug('get_or_update_metadata()')
    first_frame, distance = retrieve_first_frame(golog_path)
    metadata = {}
    if first_frame[14:].startswith('{'):
        # This is JSON, capture existing metadata
        metadata = json_decode(first_frame[14:])
        # end_date gets added by this function
        if not force_update and 'end_date' in metadata:
            return metadata  # All done
    # '\xf3\xb0\xbc\x8f' <--UTF-8 encoded SEPARATOR (for reference)
    encoded_separator = SEPARATOR.encode('UTF-8')
    golog = gzip.open(golog_path)
    # Loop over the file in big chunks (which is faster than read() by an order
    # of magnitude)
    chunk_size = 1024 * 128  # 128k should be enough for a 100x300 terminal full
    # of 4-byte unicode characters. That would be one BIG frame (i.e. unlikely).
    # Sadly, we have to read the whole thing into memory (log_data) in order to
    # perform this important work (creating proper metadata).
    # On the plus side re-compressing the log can save a _lot_ of disk space
    # Why?  Because termio.py writes the frames using gzip.open() in append mode
    # which is a lot less efficient than compressing all the data in one go.
    log_data = ''
    total_frames = 0
    while True:
        chunk = golog.read(chunk_size)
        total_frames += chunk.count(encoded_separator)
        log_data += chunk
        if len(chunk) < chunk_size:
            break
    # NOTE: -1 below because split() leaves us with an empty string at the end
    #golog_frames = log_data.split(encoded_separator)[:-1]
    start_date = first_frame[:13]  # Getting the start date is easy
    last_frame = retrieve_last_frame(golog_path)  # This takes some work
    end_date = last_frame[:13]
    version = u"1.0"
    connect_string = None
    from gateone import PLUGINS
    if 'ssh' in PLUGINS['py']:
        # Try to find the host that was connected to by looking for the SSH
        # plugin's special optional escape sequence.  It looks like this:
        #   "\x1b]_;ssh|%s@%s:%s\007"
        match_obj = RE_OPT_SSH_SEQ.match(log_data[:(chunk_size * 10)])
        if match_obj:
            connect_string = match_obj.group(1).split('|')[1]
    if not connect_string:
        # Try guessing it by looking for a title escape sequence
        match_obj = RE_TITLE_SEQ.match(log_data[:(chunk_size * 10)])
        if match_obj:
            # The split() here is an attempt to remove the tail end of
            # titles like this:  'someuser@somehost: ~'
            connect_string = match_obj.group(1)
    # TODO: Add some hooks here for plugins to add their own metadata
    metadata.update({
        u'user': user,
        u'start_date': start_date,
        u'end_date': end_date,
        u'frames': total_frames,
        u'version': version,
        u'connect_string': connect_string,
        u'filename': os.path.split(golog_path)[1]
    })
    # Make a *new* first_frame
    first_frame = u"%s:%s" % (start_date, json_encode(metadata))
    #golog_frames[0] = first_frame.encode('UTF-8') # Replace existing metadata
    # Re-save the log with the metadata included.
    #log_data = ''
    #for frame in golog_frames:
    #log_data += frame + encoded_separator
    #log_data = encoded_separator.join(golog_frames)
    # Replace the first frame and re-save the log
    log_data = (first_frame.encode('UTF-8') + encoded_separator +
                log_data[distance:])
    gzip.open(golog_path, 'w').write(log_data)
    return metadata
Exemple #58
0
 def save_bookmarks(self):
     """
     Saves self.bookmarks to self.bookmarks_path as a JSON-encoded list.
     """
     with open(self.bookmarks_path, 'w') as f:
         f.write(json_encode(self.bookmarks))
Exemple #59
0
def _retrieve_log_playback(queue, settings):
    """
    Writes a JSON-encoded message to the client containing the log in a
    self-contained HTML format similar to::

        ./logviewer.py log_filename

    *settings* - A dict containing the *log_filename*, *colors*, and *theme* to
    use when generating the HTML output.

    :arg settings['log_filename']: The name of the log to display.
    :arg settings['colors_css']: The CSS color scheme to use when generating output.
    :arg settings['theme_css']: The entire CSS theme <style> to use when generating output.
    :arg settings['where']: Whether or not the result should go into a new window or an iframe.

    The output will look like this::

        {
            'result': "Success",
            'log': <HTML rendered output>,
            'metadata': {<metadata of the log>}
        }

    It is expected that the client will create a new window with the result of
    this method.
    """
    #print("Running retrieve_log_playback(%s)" % settings);
    if 'where' not in settings:  # Avoids a KeyError if it is missing
        settings['where'] = None
    out_dict = {
        'result': "",
        'html': "",  # Will be replace with the rendered template
        'metadata': {},
        'where': settings['where']  # Just gets passed as-is back to the client
    }
    # Local variables
    gateone_dir = settings['gateone_dir']
    user = settings['user']
    users_dir = settings['users_dir']
    container = settings['container']
    prefix = settings['prefix']
    url_prefix = settings['url_prefix']
    log_filename = settings['log_filename']
    # Important paths
    # NOTE: Using os.path.join() in case Gate One can actually run on Windows
    # some day.
    logs_dir = os.path.join(users_dir, "logs")
    log_path = os.path.join(logs_dir, log_filename)
    templates_path = os.path.join(gateone_dir, 'templates')
    colors_path = os.path.join(templates_path, 'term_colors')
    themes_path = os.path.join(templates_path, 'themes')
    template_path = os.path.join(PLUGIN_PATH, 'templates')
    # recording format:
    # {"screen": [log lines], "time":"2011-12-20T18:00:01.033Z"}
    # Actual method logic
    if os.path.exists(log_path):
        # First we setup the basics
        out_dict['metadata'] = get_or_update_metadata(log_path, user)
        out_dict['metadata']['filename'] = log_filename
        try:
            rows = out_dict['metadata']['rows']
            cols = out_dict['metadata']['cols']
        except KeyError:
            # Log was created before rows/cols metadata was included via termio.py
            # Use some large values to ensure nothing wraps and hope for the best:
            rows = 40
            cols = 500
        out_dict['result'] = "Success"  # TODO: Add more error checking
        # NOTE: Using Loader() directly here because I was getting strange EOF
        # errors trying to do it the other way :)
        loader = tornado.template.Loader(template_path)
        playback_template = loader.load('playback_log.html')
        preview = 'false'
        if settings['where']:
            preview = 'true'
            recording = retrieve_log_frames(log_path, rows, cols, limit=50)
        else:
            recording = retrieve_log_frames(log_path, rows, cols)
        playback_html = playback_template.generate(
            prefix=prefix,
            container=container,
            theme=settings['theme_css'],
            colors=settings['colors_css'],
            colors_256=settings['256_colors'],
            preview=preview,
            recording=json_encode(recording),
            url_prefix=url_prefix)
        out_dict['html'] = playback_html
    else:
        out_dict['result'] = _("ERROR: Log not found")
    message = {'terminal:logging_log_playback': out_dict}
    queue.put(message)