def main(): list_of_test_files = list() for directory in os.walk('.'): for filename in directory[2]: if re.search(".*_tests?\.py", filename): list_of_test_files.append(filename) list_of_test_files.append('config_testing.py') coverage = Coverage(omit=list_of_test_files) list_of_test_files.pop() coverage.start() # no log messages logger.setLevel(1000000000000) loader = unittest.TestLoader() suite = loader.discover('.', '*_test.py') test_runner = unittest.TextTestRunner(verbosity=2) test_result = test_runner.run(suite) coverage.stop() coverage.report(file=sys.stdout) if args.html_report: coverage.html_report(directory=args.html_report) print(test_result)
def load(fn, verbose = 0): ''' load an obj file and return a scene ''' verbosity = { 0: logging.WARNING, # FIXME: what should we do with logging.ERROR ? 1: logging.INFO, 2: logging.DEBUG, } logger.setLevel(verbosity[verbose]) sc = Scene() sc.read(fn) sc.bb = sc.compute_bb() if sc.views == []: logger.info('No views in file, creating one dummy view') sc.views = [View()] v = sc.views[0] logger.info(v.__str__()) if v.empty(): logger.info('Recompute first view') v.reset(sc.bb) logger.info(sc.__str__()) return sc
def load(fn, verbose=0): ''' load an obj file and return a scene ''' verbosity = { 0: logging.WARNING, # FIXME: what should we do with logging.ERROR ? 1: logging.INFO, 2: logging.DEBUG, } logger.setLevel(verbosity[verbose]) sc = Scene() sc.read(fn) sc.bb = sc.compute_bb() if sc.views == []: logger.info('No views in file, creating one dummy view') sc.views = [View()] v = sc.views[0] logger.info(v.__str__()) if v.empty(): logger.info('Recompute first view') v.reset(sc.bb) logger.info(sc.__str__()) return sc
def main(): # no log messages logger.setLevel(1000000000000) loader = unittest.TestLoader() suite = loader.discover('.', '*_test.py') test_runner = unittest.TextTestRunner(verbosity=2) test_runner.run(suite)
def test_run(self, mock_class): logger.setLevel(10050000) signature = Mock() mock_class().get_results.return_value = {"1": 123} series1 = Series([1], signature, [1, 2]) series1.run() result = {signature: {"1": 123}} self.assertEqual(list(series1._results.values()), list(result.values()))
def set_distribution_defaults(opts): if opts['use_popdist']: opts['distributed_worker_count'] = popdist.getNumInstances() opts['distributed_worker_index'] = popdist.getInstanceIndex() else: opts['distributed_worker_count'] = 1 opts['distributed_worker_index'] = 0 if opts['distributed_worker_index'] != 0 and not opts['log_all_workers']: logger.setLevel(logging.ERROR)
def test_run(self): with patch('config.Generator') as Generator: series.Series = Mock() series.Series().get_results.return_value = {'aba': 'caba'} test_round = Round(players_list=[[1, 2]], game_info=Mock()) test_round._jurystates_list = [Mock()] logger.setLevel(10050000) # logger = Mock() test_round.run() self.assertEqual(test_round.games_results, {'aba': 'caba'})
def set_distribution_defaults(opts): if opts['use_popdist']: opts['distributed_worker_count'] = popdist.getNumInstances() opts['distributed_worker_index'] = popdist.getInstanceIndex() opts['summary_str'] += 'Popdist\n' opts['summary_str'] += ' Process count: {distributed_worker_count}\n' opts['summary_str'] += ' Process index: {distributed_worker_index}\n' else: opts['distributed_worker_count'] = 1 opts['distributed_worker_index'] = 0 if opts['distributed_worker_index'] != 0 and not opts['log_all_workers']: logger.setLevel(logging.ERROR)
def main(): rel = lambda *x: os.path.abspath(os.path.join(os.path.dirname(__file__), *x)) define('listen', metavar='IP', default='0.0.0.0', help='listen on IP address (default 0.0.0.0)') define('port', metavar='PORT', default=8888, type=int, help='listen on PORT (default 8888)') define('debug', metavar='True|False', default=True, type=bool, help='debug mode') define("config", default="", help="config file") options.parse_command_line() settings = dict( template_path=rel('templates'), static_path=rel('static'), debug=options.debug, xsrf_cookies=True, cookie_secret="bZJc2sWbQLKos6GkHn/VB9oXwQtsw2d1QRvJ5/xJ89E=", login_url="/", # ui_methods=uimodule, ) if options.debug: import logging logger.setLevel(logging.DEBUG) application = Application([ (r'/', LoginHandler), (r'/web[/]*', WebHandler), (r'/kcs/resources/image/world/.*(l|s|t)\.png', WorldImageHandler), (r'/kcs/(.*)', CacheHandler), (r'/kcsapi/(.*)', APIHandler), (r'/logout[/]*', APIHandler), ], **settings) application.listen(address=options.listen, port=options.port) logger.info("http server started on %s:%s" % (options.listen, options.port)) IOLoop.instance().start()
def main(): if token is None: logger.error('Missing env variable "sentrycloudtoken"') return if sentry_url is None: logger.error('Missing env variable "sentryurl"') return parser = argparse.ArgumentParser() parser.add_argument( '--field', help='Field name to pull from sentry. [culprit, filename, title, type]', default='title', ) parser.add_argument('--file', help='File path to export to. Ie. result.png', default='result.png') parser.add_argument( '--cache', help= 'Utilize cache result from the last Sentry API query. False will delete the previous cache.', default=False, ) parser.add_argument('--verbose', help='Verbose to stdout', default=False) args = parser.parse_args() logger.info('Running with args: {}'.format(args)) if args.verbose is False: logger.setLevel(logging.DEBUG) if parser.cache is False: os.remove(cache_file_name) dataMap = get_data() counts = count(dataMap, args.field) create_word_cloud(counts, args.file) if parser.cache is False: os.remove(cache_file_name)
def main(): ''' Main entry point, start the server. ''' # Tell Tornado to parse the command line for us. tornado.options.parse_command_line() # Init logging to file init_file_log(logging.DEBUG) # Set the console logging level if options.debug: logger.setLevel(logging.DEBUG) options.logging = "debug" tornado.log.enable_pretty_logging(options, logging.getLogger("tornado.access")) tornado.log.enable_pretty_logging( options, logging.getLogger("tornado.application")) tornado.log.enable_pretty_logging(options, logging.getLogger("tornado.general")) else: logger.setLevel(logging.INFO) logger.info("Project intro WebSocket server.") # Intital setup of the Raspberry Pi. # GPIO.setwarnings(False) GPIO.setmode(GPIO.BCM) # Create a Tornado HTTP and WebSocket server. http_server = tornado.httpserver.HTTPServer(APP) http_server.listen(options.port) logger.info("Listening on port: " + str(options.port)) # Start the Tornado event loop. tornado.ioloop.IOLoop.instance().start() # Close the log if we're done. close_log()
def open(self): _map = { 'tab': lambda event, values: self.handle_tab_event(self.find_tab_by_key(event), event, values), 'tab_group': lambda event, values: self.handle_tab_group_event(values[event]), 'sleep': lambda event, values: self.handle_sleep_event(event, values[event]), 'timer': lambda event, values: self.handle_timer_event(event, values[event]), 'adb': lambda event, values: self.handle_adb_event(event, values[event]), 'button': lambda event, values: self.handle_button_event(event), 'log_level': lambda event, values: (self.data.update({'loglevel': values[event]}), logger.setLevel(values[event])), 'update': lambda event, values: self.handle_update_event(event, values[event]) } while True: event, values = self.window.read() if event in (sg.WIN_CLOSED, '_button_Exit_'): self.__save() self.stop_all_safe() break # print(f'event={event}, ' # f'(value, type)={(values[event], type(values[event])) if event in values else ("none", "none_type")}') matchresult = self._open_re.match(event) if matchresult: _cls = 'tab' if matchresult[1].isdigit() else matchresult[1] _map[_cls](event, values) self.window.close()
from modbus_tk import modbus import logging formatter = logging.Formatter( '%(asctime)s %(levelname)8s [%(filename)s%(lineno)06s] %(message)s') ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) #log formatter formatter = logging.Formatter( '%(asctime)s %(levelname)8s [%(filename)s%(lineno)06s] %(message)s') ch.setFormatter(formatter) logger = logging.root logger.setLevel(logging.DEBUG) logger.addHandler(ch) # PORT = '/dev/ttyUSB0' PORT = '/dev/pts/2' master = RtuMaster( serial.Serial(port=PORT, baudrate=115200, bytesize=8, parity='N', stopbits=1, xonxoff=0) ) master.set_timeout(1.0) master.set_verbose(True) # centroid = CentroidPacket()
class DBconnection: logger.setLevel(logging.INFO) # Constructor for Server class def __init__(self, db_host, db_user, db_pass, db_name): self.conn = mysql.connector.connect(host=db_host, user=db_user, password=db_pass, database=db_name) self.cursor = self.conn.cursor() # Function to close database connection def close_conn(self): self.conn.close() def display(self): sql_query = "SELECT * FROM employee_payroll;" try: self.cursor.execute(sql_query) result = self.cursor.fetchall() for i in result: emp_id = i[0] name = i[1] gender = i[2] salary = i[3] start = i[4] logger.info( f"id:{emp_id},name:{name},gender:{gender},salary:{salary},start:{start}" ) except Exception: logger.error('Error:Unable to fetch data.') def InsertRecord(self): query = "INSERT INTO employee_payroll(id, name, gender, salary, start) VALUES(6, 'vikas', 'M', 400000, '2018-06-05');" try: self.cursor.execute(query) self.conn.commit() logger.info('Record Inserted Successfully') except Exception: logger.error("Error! unable to insert data") self.conn.rollback() def updatadata(self): query1 = "UPDATE employee_payroll SET salary=30000 WHERE name='bill'" query2 = "SELECT * FROM employee_payroll WHERE name='James'" try: self.cursor.execute(query1) self.conn.commit() logger.info('Record Updated successfully') except Exception: logger.info("Error! unable to update data") self.conn.rollback() def deletedata(self): query = "DELETE FROM employee_payroll WHERE name='chalie'" try: self.cursor.execute(query) self.conn.commit() logger.info('Record Deteted Successfully') except Exception: logger.error("Error! unable to update data") self.conn.rollback()
parser.add_argument('--verbose', action='store_true', help='enable verbose messages') parser.add_argument('--info', action='store_true', help='enable informational messages') parser.add_argument('--pretty', action='store_true', help='pretty print json') parser.add_argument('--config', default='config_forwarder.json', help='configuration file, default config_forwarder.json') args = parser.parse_args() if args.verbose: log.setLevel(logging.DEBUG) elif args.info: log.setLevel(logging.INFO) else: log.setLevel(logging.WARNING) cfg = None if args.config: try: cfg = config_util.load_config(args.config) except FileNotFoundError as e: die('config file not found', e, ErrorCode.FILE_NOT_FOUND) except json.decoder.JSONDecodeError as e: die(f'json error in {args.config}', e, ErrorCode.FILE_INVALID)
def cli(verbose): """Signal Deviation Alerts.""" if verbose: logger.setLevel('DEBUG')
# 更新器不启用文件日志 import logging from log import fileHandler, logger, new_file_handler logger.name = "reversi" logger.removeHandler(fileHandler) logger.addHandler(new_file_handler()) logger.setLevel(logging.INFO) import copy import random import sys import time from collections import Counter from datetime import datetime, timedelta from typing import Callable, Dict, Optional, Tuple from PyQt5.Qt import (QApplication, QBrush, QDialog, QDialogButtonBox, QIcon, QImage, QLabel, QMessageBox, QPalette, QSize) from PyQt5.QtCore import QThread, pyqtSignal from PyQt5.QtGui import QPixmap from log import asciiReset, color from qt_wrapper import * from util import range_from_one board_size = 8 cell_blue = -1