def test_setup_logging(self): with patch(u'adsmutils.ConcurrentRotatingFileHandler') as cloghandler: adsmutils.setup_logging(u'app') f = os.path.abspath(os.path.join(os.path.abspath(__file__), u'../../..')) tmp = str(cloghandler.call_args) tmp = tmp.replace("=u'", "='") # remove unicode annotations in python 2 self.assertEqual("call(backupCount=10, encoding='UTF-8', filename='{filename}/logs/app.log', maxBytes=10485760, mode='a')".format(filename=f), tmp)
def test_setup_logging(self): with patch('adsmutils.ConcurrentRotatingFileHandler') as cloghandler: adsmutils.setup_logging('app') f = os.path.abspath( os.path.join(os.path.abspath(__file__), '../../..')) self.assertEqual( "call(backupCount=10, encoding=u'UTF-8', filename=u'{filename}/logs/app.log', maxBytes=10485760, mode=u'a')" .format(filename=f), str(cloghandler.call_args))
def test_logging(self): logdir = os.path.abspath( os.path.join(os.path.dirname(__file__), u'../../logs')) foo_log = logdir + u'/foo.bar.log' if os.path.exists(foo_log): os.remove(foo_log) logger = adsmutils.setup_logging(u'foo.bar') logger.warning(u'first') logger.handlers[0].stream.flush() self.assertTrue(os.path.exists(foo_log)) c = _read_file(foo_log) self.assertTrue('WARNING' in c) self.assertTrue('test_init.py' in c) self.assertTrue('first' in c) # now multiline message logger.warning(u'second\nthird') logger.warning(u'last') c = _read_file(foo_log) self.assertTrue(u'second\n third' in c) msecs = False for x in c.strip().split(u'\n'): datestr = x.split(u' ')[0] if datestr != u'': t = adsmutils.get_date(datestr) if t.microsecond > 0: msecs = True self.assertTrue(msecs) # test json formatter # replace the default formatter for handler in logger.handlers: handler.formatter = adsmutils.get_json_formatter() logger.info(u'test json formatter') c = _read_file(foo_log) self.assertTrue(u'"message": "test json formatter"' in c) self.assertTrue(u'"hostname":' in c) self.assertTrue(u'"lineno":' in c) # verfiy that there was only one log handler, logging to a file self.assertTrue(len(logger.handlers), 1) # now create a logger, requesting logs be written to stdout as well # so there will be two log handlers logger2 = adsmutils.setup_logging(name_=u'foo.bar.2', attach_stdout=True) self.assertTrue(len(logger2.handlers), 2)
def test_logging(self): logdir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../logs')) foo_log = logdir + '/foo.bar.log' if os.path.exists(foo_log): os.remove(foo_log) logger = adsmutils.setup_logging('foo.bar') logger.warn('first') logger.handlers[0].stream.flush() #print foo_log self.assertTrue(os.path.exists(foo_log)) c = _read_file(foo_log) self.assertTrue('test_init.py:19] first' in c) # now multiline message logger.warn('second\nthird') logger.warn('last') c = _read_file(foo_log) #print c self.assertTrue('second\n third' in c)
def log_request(bibcode, user, link_type, url, referrer, client_id, real_ip, user_agent): """ log the click to aws :param bibcode: :param user: :param link_type: :param url: :param referrer: :param client_id: :param real_ip: :param user_agent: :return: """ # if logger doesn't exist initialize it # logger is a static variable if not hasattr(log_request, "logger"): log_request.logger = utils.setup_logging(name_='linkout_clicks', attach_stdout=True) # replace the default formatter for handler in log_request.logger.handlers: formatter = u'%(asctime)s, %(process)d, %(linkout_clicks)s, ' \ u'%(user)s, %(link)s, %(bibcode)s, %(service)s, %(referer)s, %(client_id)s %(real_ip)s' handler.formatter = utils.get_json_formatter(logfmt=formatter) message = { 'linkout_clicks': 'resolver_linkout_click', 'user': user, 'link': link_type, 'bibcode': bibcode, 'service': url, 'referer': referrer, 'client_id': client_id, 'real_ip': real_ip, 'user_agent': user_agent } log_request.logger.info(message)
import argparse import json from adsmutils import setup_logging import adsparser logger = setup_logging('verify_parsing') def verify_parsing(filename=None): with open(filename) as json_file: data = json.load(json_file) success_user = 0. success_query = 0. fail_user = 0. fail_query = 0. keyword_keys = ['daily_t1,', 'phy_t1,', 'phy_t2,', 'pre_t1,', 'pre_t2,', 'ast_t1,', 'ast_t2,'] # step through users for k, v in data.items(): tmp_success = 0. tmp_fail = 0. # step through each setup for kk, vv in v.items(): if kk in keyword_keys: try: newquery = adsparser.parse_classic_keywords(vv) success_query += 1 tmp_success += 1 except: logger.info(u'Query for {0} failed to parse: {1}'.format(k, vv)) fail_query += 1