def main(): # TODO : Prettify this code logging.info("Loading Dictionary from: %s" % dict_path) d = Mojifi.SymbolDictionary(dict_path, None) t = Mojifi.Translator(d) # Server Config settings = dict( template_path=abs_path("html/"), debug=True) server_settings = dict( xheaders=True) tornado.options.parse_command_line() # Server Startup logging.info("Running Tornado at http://localhost:%s" % options.port) application = tornado.web.Application([ (r"/js/(.*)", StaticFileHandler, {'path': abs_path('js/')}), (r"/css/(.*)", StaticFileHandler, {'path': abs_path('css/')}), (r"/(.*)", MainHandler, dict(translator=t))], **settings) application.listen(options.port, **server_settings) tornado.ioloop.IOLoop.instance().start()
def get(self): first_user_id = int(self.request.arguments["first_user_id"][0]) second_user_id = int(self.request.arguments["second_user_id"][0]) result = False if options.USE_RC: sstring = "%s#%s" % (first_user_id, second_user_id) result = self.rc.sismember("Related", sstring) if not result: logging.info("No matches for %i and %i found before" % (first_user_id, second_user_id)) if options.USE_DB: logging.info("Trying to analyse DB...") for uid in (first_user_id, second_user_id): if self.rc.scard(uid) > 0: self.rc.expire(uid, 300) cursor = yield self.db.execute( "SELECT host(ip) FROM %s where user_id=%i;" % (options.TABLE_NAME, uid) ) for ips in cursor.fetchall(): self.rc.sadd(uid, ips[0]) if self.rc.scard(first_user_id) > self.rc.scard(second_user_id): self.rc.sadd("Related", sstring) result = True self.write("Users %i and %i are related: %s." % (first_user_id, second_user_id, result)) self.finish()
def sendPushMessage(token, message, badge, sound, custom={}): logging.debug("Sending notification " + str(payload) + " to " + str(token)) print("Sending notification " + str(payload) + " to " + str(token)) connection = httplib.HTTPSConnection('api.parse.com', 443) connection.connect() connection.request( 'POST', '/1/push', json.dumps({ "channels": ["Indians"], "data": { "action": "com.example.UPDATE_STATUS", "alert": "Ricky Vaughn was injured during the game last night!", "name": "Vaughn", "newsItem": "Man bites dog" } }), { "X-Parse-Application-Id": config["X-Parse-Application-Id"], "X-Parse-REST-API-Key": config["X-Parse-REST-API-Key"], "Content-Type": "application/json" }) result = json.loads(connection.getresponse().read()) print("Result: {}".format(result)) logging.info("Result: {}".format(result))
def add_user(user_doc): """ add a user info collect inputs: user_doc = { 'username':'', 'email':'', 'phone':'', 'password':'', 'avatar':'', 'intro':'' 'user_type':'' } returns: False - failure Others - user info data """ logging.info('add_user Entering...') return db_backend.insert(collect_name = 'user', data = user_doc ) logging.info('add_user Leaving...') pass
def get(self): first_user_id = int(self.request.arguments['first_user_id'][0]) second_user_id = int(self.request.arguments['second_user_id'][0]) result = False if options.USE_RC: sstring = '%s#%s' % (first_user_id, second_user_id) result = self.rc.sismember('Related', sstring) if not result: logging.info('No matches for %i and %i found before' % ( first_user_id, second_user_id )) if options.USE_DB: logging.info('Trying to analyse DB...') for uid in (first_user_id, second_user_id): if self.rc.scard(uid) > 0: self.rc.expire(uid, 300) cursor = yield self.db.execute( "SELECT host(ip) FROM %s where user_id=%i;" % (options.TABLE_NAME, uid)) for ips in cursor.fetchall(): self.rc.sadd(uid, ips[0]) if self.rc.scard(first_user_id) > self.rc.scard(second_user_id): self.rc.sadd('Related', sstring) result = True self.write("Users %i and %i are related: %s." % ( first_user_id, second_user_id, result )) self.finish()
def add_user(user_doc): """ add a user info collect inputs: user_doc = { 'username':'', 'email':'', 'phone':'', 'password':'', 'avatar':'', 'intro':'' 'user_type':'' } returns: False - failure Others - user info data """ logging.info('add_user Entering...') return db_backend.insert(collect_name='user', data=user_doc) logging.info('add_user Leaving...') pass
async def inner(self, *args, **kwargs): try: data = await f(self, *args, **kwargs) except Exception as e: self.set_status(500) self.render_json({"error": "Invalid Request"}) self.finish() logging.info(format_exc())
def __getitem__(self, key): mashed_session = self.r.get(self._id) if mashed_session: session = pickle.loads(mashed_session) else: session = {} logging.info(type(session)) return session.get(key)
def wrapper(self, *args, **kwargs): handler_name = self.__class__.__name__ logging.info("%s Entering..." % (handler_name)) logging.info("%s Leaving..." % (handler_name)) return method(self, *args, **kwargs)
def get(self): ''' self.write('<html><body><form action="/login" method="post">' 'Name: <input type="text" name="name">' '<input type="submit" value="Sign in">' '</form></body></html>') ''' logging.info(self.get_argument('next', 'default')) self.render("login.html", action="register", next=self.get_argument('next', '/'))
def __init__(self): """ Class constructor. Sets up logging, active handlers and application server """ # Set up log file, level and formatting options.log_file_prefix = config.get('logging', 'path') options.logging = config.get('logging', 'level') options.log_to_stderr = config.getboolean('logging', 'stderr') # Port and arguments port = config.get('server', 'port') define('port', default=port, help='Port to be used') parse_command_line([]) # Override default logging format and date format log_format = config.get('logging', 'fmt', raw=True) date_format = config.get('logging', 'datefmt', raw=True) if date_format: formatter = tornado.log.LogFormatter(fmt=log_format, datefmt=date_format) for logger in logging.getLogger('').handlers: logging.info('Overriding log format for %s' % (logger)) logger.setFormatter(formatter) # Defining handlers # Removing optional handlers from handler list filtered_handlers = self.__handler_filter(handlers, config, optionalConfig) logging.info("Defining application (url, handler) pairs") application = tornado.web.Application(filtered_handlers, debug=config.getboolean( 'tornado', 'debug')) # Configuring server and SSL logging.info("Configuring HTTP server") if (config.has_section('ssl')): http_server = HTTPServer(application, ssl_options={ "certfile": config.get('ssl', 'hostcert'), "keyfile": config.get('ssl', 'hostkey'), }) else: http_server = HTTPServer(application) logging.info("Host certificate undefined, SSL is DISABLED") # Listening port http_server.listen(options.port) # Starting logging.info("Starting application on port: " + str(port)) tornado.ioloop.IOLoop.instance().start()
def get(self, slug): logging.info(slug) mdfile = PICKY_DIR + "/" + str(slug) + ".md" try: md = open(mdfile) except IOError: self.abort(404) markdown = md.read() md.close() render = RenderMarkdownPost(markdown) post = render.get_render_post() self.render("picky.html", post=post, slug=slug)
def __init__(self, db_parm): ''' 数据库连接初始化 ''' try: self.__class__._db_conn = Connection(db_parm['host'], db_parm['port']) self.__class__._database = self.__class__._db_conn[db_parm.get( "db_file", "falcon")] except Exception, e: logging.critical('Failed to connect mongodb: %s' % (e)) logging.info('Leaving DbBackend....') return None
def query(sql, args): data = None try: logging.info('query: {} {}'.format(sql, args)) with conn: cursor.execute(sql, args) if sql.strip().startswith('INSERT '): data = cursor.lastrowid else: data = cursor.fetchall() logging.info('query: {}'.format(data)) except Exception as e: logging.error(e) return data
def find(self, collect_name, condition={}, skip=0, limit=config.DEFAULT_PAGE_SIZE, fields=None): """ search in collect by a condition dict collect_name - collection name to be searched condition - a dict for the search condition skip - skips the top [skip] elements for pagination limit - number of documents to be returned zero for unlimited returns: None - failed to enquiry Others - the documents list """ logging.info('Enter database find ...') check_objectId(condition) logging.info("collect_name=%s, condition=%s, skip=%s, limit=%s" % (collect_name, condition, skip, limit)) try: if limit < 0: logging.error("limit=%d is not acceptable!" % limit) return None else: ret = self._database[collect_name].find(condition, fields=fields) # sort dataset if type(sort) is list: ret.sort(sort) if skip: ret.skip(int(skip)) if limit != 0: logging.info("Leaving find_collect") return list(ret.limit(int(limit))) else: logging.info("Leaving find_collect") return list(ret) except Exception, e: logging.error("findone in collection '%s' error: %s" % (collect_name, e)) logging.info("Leaving findone_collect...") return None
def post(self): markdown = self.get_argument("markdown", None) comment = self.get_argument("comment", 1) if not markdown: self.redirect("/post/new") render = RenderMarkdownPost(markdown) post = render.get_render_post() if comment == '0': comment = 0 post.update({"comment": comment}) post_id = self.create_new_post(**post) logging.info(post_id) self.redirect("/post/%s" % post_id) return
def __init__(self): """ Class constructor. Sets up logging, active handlers and application server """ # Set up log file, level and formatting options.log_file_prefix = config.get('logging', 'path') options.logging = config.get('logging', 'level') options.log_to_stderr = config.getboolean('logging', 'stderr') # Port and arguments port = config.get('server', 'port') define('port', default=port, help='Port to be used') parse_command_line([]) # Override default logging format and date format log_format = config.get('logging', 'fmt', raw = True) date_format = config.get('logging', 'datefmt', raw = True) if date_format: formatter = tornado.log.LogFormatter(fmt = log_format, datefmt = date_format) for logger in logging.getLogger('').handlers: logging.info('Overriding log format for %s' % (logger)) logger.setFormatter(formatter) # Defining handlers # Removing optional handlers from handler list filtered_handlers = self.__handler_filter(handlers, config, optionalConfig) logging.info("Defining application (url, handler) pairs") application = tornado.web.Application(filtered_handlers, debug = config.getboolean('tornado', 'debug')) # Configuring server and SSL logging.info("Configuring HTTP server") if (config.has_section('ssl')): http_server = HTTPServer(application, ssl_options = { "certfile" : config.get('ssl', 'hostcert') , "keyfile" : config.get('ssl', 'hostkey'), }) else: http_server = HTTPServer(application) logging.info("Host certificate undefined, SSL is DISABLED") # Listening port http_server.listen(options.port) # Starting logging.info("Starting application on port: " + str(port)) tornado.ioloop.IOLoop.instance().start()
def _fill_db(db): import random import socket import struct try: db.execute("DROP TABLE IF EXISTS %s;" % options.TABLE_NAME) db.execute("CREATE TABLE %s (user_id INT, ip INET, dt TIMESTAMP);" % options.TABLE_NAME) for n in range(1, options.TEST_COUNT): randip = socket.inet_ntoa(struct.pack(">I", random.randint(1, 0xFFFFFFFF))) randuser = random.randint(1000, 10000) db.execute("INSERT INTO %s VALUES(%i, '%s', NOW());" % (options.TABLE_NAME, randuser, randip)) # logging.info('%i %s' % (n,randip)) logging.info("Done - %i", n) except (psycopg2.Warning, psycopg2.Error) as error: logging.info(str(error))
def post(self): username = self.get_argument("name") password = self.get_argument("password") next_url = self.get_argument("next") #email = self.get_argument("email") logging.info({"username": username, "password": password}) db = self.application.db user = db.users.find_one({"name": username}) if user == None: self.redirect("login") if username == user['name'] and password == user['password']: self.my_session['user'] = username self.my_session['pwd'] = password self.set_secure_cookie("user", self.get_argument("name")) self.redirect(next_url) else: self.redirect('/login')
async def get(self, hashed_name): file_size = os.path.getsize('{}/{}'.format(self.file_path, hashed_name)) file_path = '{}/{}'.format(self.file_path, hashed_name) logging.info('download handler: {} {} bytes'.format( file_path, str(file_size))) self.set_header('Content-length', file_size) self.flush() with open(file_path, 'rb') as f: complete_download = False while not complete_download: data = f.read(config.CHUNK_SIZE) logging.debug('download chunk: {} bytes'.format(len(data))) if len(data) > 0: self.write(data) await gen.Task(self.flush) complete_download = (len(data) == 0)
def _fill_db(db): import random import socket import struct try: db.execute('DROP TABLE IF EXISTS %s;' % options.TABLE_NAME) db.execute('CREATE TABLE %s (user_id INT, ip INET, dt TIMESTAMP);' % options.TABLE_NAME) for n in range(1, options.TEST_COUNT): randip = socket.inet_ntoa(struct.pack('>I', random.randint(1, 0xffffffff))) randuser = random.randint(1000, 10000) db.execute("INSERT INTO %s VALUES(%i, '%s', NOW());" % ( options.TABLE_NAME, randuser, randip )) #logging.info('%i %s' % (n,randip)) logging.info('Done - %i', n) except (psycopg2.Warning, psycopg2.Error) as error: logging.info(str(error))
def auth_user(name, password): hashed_password = query( """ SELECT hashed_password FROM users WHERE name=? """, (name, )) hashed_password = hashed_password[0][ 'hashed_password'] if hashed_password else None # noqa logging.debug("hashed_password {}".format(hashed_password)) if not hashed_password: return None p = yield executor.submit(bcrypt.hashpw, tornado.escape.utf8(password), tornado.escape.utf8(hashed_password)) if hashed_password == p: logging.info('db.get_user: {}'.format(name)) return name return None
def wrap(*args, **kwargs): time1 = time.time() ret = function(*args, **kwargs) time2 = time.time() user = 0 try: user = request.POST.get('auth_token').split('_')[1] except: pass Timing.objects.create(user=user, func=function.func_name, time=int((time2 - time1) * 1000.0), args=args) logging.info('%s function took %0.3f ms' % (function.func_name, (time2 - time1) * 1000.0)) return ret
def _zlog(self, level, message): """ Actual log sender! Sends the message through zmq to Loggr. """ log_message = LogMessage( log_level=level, daemon_name=self.daemon_name, host_name=self.host, log_line=message ) try: if self.verbose: logging.info(log_message.as_dict()) self.client.send(service=self.service, request=log_message.as_json()) self.client.recv() except AttributeError, e: logging.error("Received illegal message: {}".format(message))
def post(self, *args, **kwargs): items = self.request.files.items() if not items: error = "Please select file first" for field, files in items: for info in files: filename = info['filename'] content_type = info['content_type'] body = info['body'] logging.info('POST {}: {} {} bytes'.format( field, content_type, len(body))) if content_type.lower() == 'application/pdf': threadpool.submit(pdf.save_pdf_file, body, filename, self.current_user.decode()) else: threadpool.submit(pdf.save_file, body, filename, self.current_user.decode()) self.redirect('/') return self.render("home.html", files_list=db.get_file_list(), error=error)
async def get(self, hashed_name, **params): page = int(params['page']) if params['page'] else 1 logging.info('page: {}'.format(page)) file_path = '{}/{}{}.png'.format(self.file_path, hashed_name, page) try: file_size = os.path.getsize(file_path) self.set_header('Content-Type', 'application/png') self.set_header('Content-length', file_size) self.flush() with open(file_path, 'rb') as f: complete_download = False while not complete_download: data = f.read(config.CHUNK_SIZE) logging.info('download chunk: {} bytes'.format(len(data))) if len(data) > 0: self.write(data) await gen.Task(self.flush) complete_download = (len(data) == 0) except FileNotFoundError: self.redirect("/")
def main(): tornado.options.log_file_prefix = config.LOG_FILE # set log file tornado.options.options.logging = "debug" # set log level tornado.options.parse_command_line() try: #start server #启动服务 application = Application() application.settings['DEBUG_TEMPLATE'] = tornado.options.options.debug_template logging.info("Start %s HTTP server on port:%d ...\n\n" % (config.PROJECT_NAME,options.port)) http_server = tornado.httpserver.HTTPServer(application,xheaders=True) http_server.listen(port=options.port,address=options.ip_address) tornado.ioloop.IOLoop.instance().start() except Exception as e: #start server fail logging.error('Failed to start HTTP server, due to : %s' % (e)) logging.error("HTTP server is terminated.")
def sendPushMessage(app_flavor, token, message, badge, sound, custom={}, sandbox=config["apn_sandbox"]): cert_file = config["apn_cert"] key_file = config["apn_key"] if sandbox == True: cert_file = config["apn_cert_dev"] key_file = config["apn_key_dev"] #token = '963c3f72abe5dee900f066e88486272dd7e2648948abb4352ecbb52294b7317e' apns = APNs(use_sandbox=sandbox, cert_file=cert_file, key_file=key_file) payload = Payload(alert=message, sound=sound, badge=badge, custom=custom) logging.info("Sending notification " + str(payload) + " to " + str(token)) print("Sending notification " + str(payload) + " to " + str(token)) if sandbox == True: print("sandboxmode") apns.gateway_server.send_notification(token, payload)
def get(self): logging.info('%s Entering...' % (self.__class__.__name__)) logging.info('write IndexHandler') render_str = 'IndexHandler at %s' % (datetime.datetime.now()) self.render('index.html', render_str=render_str) logging.info('%s Leaving...' % (self.__class__.__name__)) return
def update(self, collect_name, condition, data): ''' update collect data: collect_name - collection name to be updated condition - a dict for the search condition data - a dict for data to be updated returns: None - Nothing being updated Other - The updated object_id ''' logging.info('Enter database update ...') check_objectId(condition) check_objectId(data) try: logging.debug("condition=%s, data=%s" % (condition, data)) ret = self._database[collect_name].find_and_modify(query=condition, update=data, new=True) logging.info("Leaving update...") return ret except Exception, e: logging.error("update collection '%s' error: %s" % (collect_name, e)) logging.info("Leaving update_collect...") return None
def delete(self, collect_name, condition={}): """ delete collect by the specified condition collect_name - collection name condition - a dict for the search condition returns: None - failed to delete True - success """ logging.info('enter database delete....') check_objectId(condition) logging.debug("collect_name=%s, condition=%s" % (collect_name, condition)) try: collect = self.findone_collect(collect_name, condition) if collect is not None: result = self._database[collect_name].remove(condition) logging.info('leaving delete_collect....') return True return None except Exception, e: logging.error("delete collection '%s', error:%s" % (collect_name, e)) logging.info('Leaving delete_collect....') return None
def main(): tornado.options.log_file_prefix = config.LOG_FILE # set log file tornado.options.options.logging = "debug" # set log level tornado.options.parse_command_line() try: #start server #启动服务 application = Application() application.settings[ 'DEBUG_TEMPLATE'] = tornado.options.options.debug_template logging.info("Start %s HTTP server on port:%d ...\n\n" % (config.PROJECT_NAME, options.port)) http_server = tornado.httpserver.HTTPServer(application, xheaders=True) http_server.listen(port=options.port, address=options.ip_address) tornado.ioloop.IOLoop.instance().start() except Exception as e: #start server fail logging.error('Failed to start HTTP server, due to : %s' % (e)) logging.error("HTTP server is terminated.")
def main(): # TODO : Prettify this code logging.info("Loading Dictionary from: %s" % dict_path) d = Mojifi.SymbolDictionary(dict_path, None) t = Mojifi.Translator(d) # Server Config settings = dict(template_path=abs_path("html/"), debug=True) server_settings = dict(xheaders=True) tornado.options.parse_command_line() # Server Startup logging.info("Running Tornado at http://localhost:%s" % options.port) application = tornado.web.Application([(r"/js/(.*)", StaticFileHandler, { 'path': abs_path('js/') }), (r"/css/(.*)", StaticFileHandler, { 'path': abs_path('css/') }), (r"/(.*)", MainHandler, dict(translator=t))], **settings) application.listen(options.port, **server_settings) tornado.ioloop.IOLoop.instance().start()
def get_current_user(self): #return self.get_cookie(Session.session_id) cook_user = self.get_secure_cookie("user") session_id = self.get_cookie(Session.session_id) logging.info(cook_user) logging.info(session_id) session = self.application.r.get(session_id) logging.info(session) if session: return cook_user else: return None
def check_objectId(data): """ This method verifies the key dict['_id'] is a standard ObjectId or not """ logging.info("Entering check_objectId...") try: if type(data['_id']) is not ObjectId: data['_id'] = ObjectId(data['_id']) return data except KeyError: logging.error("data['_id'] doesn't exist!") logging.info("Leaving check_objectId...") return None except TypeError: logging.error("data is not a dict!") logging.info("Leaving check_objectId...") return None except InvalidId: logging.error("data['_id'] is not a valid ObjectId!") logging.info("Leaving check_objectId...") return None
def findone(self, collect_name, condition={}): """ search in collect by a condition dict for the 1st document ONLY collect_name - collection name to be searched condition - a dict for the search condition returns: None - failed to enquiry Others - the documents """ logging.info('Enter database findone ...') check_objectId(condition) logging.info("collect_name=%s, condition=%s" % (collect_name, condition)) try: ret = self._database[collect_name].find_one(condition) logging.info("Leaving findonet...") return ret except Exception, e: logging.error("findone in collection '%s' error: %s" % (collect_name, e)) logging.info("Leaving findone_collect...") return None
def insert(self, collect_name, data): """ insert collect_name - collection name data - document to be inserted returns: None - failed to insert docment Other - the object_id in string """ logging.info("Enter database insert....") logging.debug("collect_name=%s, data=%s" % (collect_name, data)) try: _id = self._database[collect_name].insert(data) logging.info('Leaving insert...') return str(_id) except Exception, e: logging.error('insert into collect %s, error:%s' % (collect_name, e)) logging.info('Leaving insert_collect....') return None
def __handler_filter(self, handlers, config, optionalConfig): """ Remove handlers which are not to be used when optional functions are not configured """ # Create a copy of the handler list, which we will modify res = list(handlers) logging.debug('Active endpoints: %s' % res) for section in optionalConfig.keys(): logging.info('Checking optional configuration section [%s]' % (section)) try: config.items(section) except ConfigParser.NoSectionError: logging.info('Section not found. Removing related endpoints') # Remove handlers whose url matches the missing optional # section names for handler in handlers: url, handler_class = handler if re.search(section, url): logging.warning('Removing endpoint [%s]' % url) res.remove(handler) logging.info('Active endpoints: %s' % res) return res
) for ips in cursor.fetchall(): self.rc.sadd(uid, ips[0]) if self.rc.scard(first_user_id) > self.rc.scard(second_user_id): self.rc.sadd("Related", sstring) result = True self.write("Users %i and %i are related: %s." % (first_user_id, second_user_id, result)) self.finish() if __name__ == "__main__": try: parse_config_file("config.conf") except: logging.info("Config file ./config.conf not found") application = tornado.web.Application([(r"/user_relations/", MainHandler)]) ioloop = tornado.ioloop.IOLoop.instance() if options.USE_RC: redisConnectionPool = redis.ConnectionPool(host="localhost", port=6379, db=12) application.rc = redis.Redis(connection_pool=redisConnectionPool) logging.info("Using Redis") if options.FLUSH_RC: application.rc.flushdb() if options.USE_DB: application.db = momoko.Pool(
#!/usr/bin/env python # -*- coding:utf-8 -*- # Filename: database.py # ----------------------------------------------------------------- # 2014-10-30 created import sys import redis from tornado.log import logging sys.path.append("../..") import config ### Connct to Redis ### redis_conn = None try: redis_conn = redis.StrictRedis(host=config.REDIS_HOST, port=config.REDIS_PORT, db=config.REDIS_DB) logging.info("Redis Connected successfully") except Exception, e: logging.error("Could not connect to Redis:%s\n" % e)