def parse_cli(): parser = argparse.ArgumentParser( description="Detector types of plans by photos of their trees." ) parser.add_argument('leaf_image', type=str, nargs='+', help="Path to leaf image to recognize") parser.add_argument("-t", "--training", required=True, help="Path to the directory with training images.") parser.add_argument('-T', '--thresholding', default=False, action='store_true', help='Enable images thresholding') parser.add_argument('-r', '--resize', default=False, action='store_true', help='Enable images resizing') parser.add_argument('-v', '--verbose', default=False, action='store_true', help='Enable debug logging') args = parser.parse_args() if args.verbose: logger.setLevel(DEBUG) if not all(os.path.isfile(leaf) for leaf in args.leaf_image): raise errors.LeadDetectionError('Not found images: {0}!'.format( args.leaf_image)) if not os.path.isdir(args.training): raise errors.LeadDetectionError('Directory {0} doesn\'t exist!'.format( args.training)) return args.leaf_image, args.training, args.thresholding, args.resize
def _setupLogging(logLevel: str, logLevelFramework: str): logging.basicConfig(level=logging.ERROR) for aiohttpLogger in (log.access_logger, log.server_logger): aiohttpLogger.setLevel(logLevelFramework.upper()) logger.setLevel(logLevel.upper())
def __init__(self, version, updater): # set version self.version = version # set updater self.updater = updater # keep state of 'toggle key' self.toggle_key_active = False # read user defaults (preferences) self.prefs = NSUserDefaults.standardUserDefaults() # register some default values self.prefs.registerDefaults_(dict( QuoteFixFixReply = True, QuoteFixFixReplyAll = True, QuoteFixFixForward = True, QuoteFixFixDraft = False, QuoteFixFixNewMessage = False, )) # set log level logger.setLevel(self.is_debugging and logger.DEBUG or logger.WARNING) logger.debug('debug logging active') # add menu item for quick enable/disable Menu.alloc().initWithApp_(self).inject() # check update interval self.check_update_interval = self.prefs.int["QuoteFixCheckUpdateInterval"] or 0 # check if we're running in a different Mail version as before self.check_version()
def __init__(self, version, updater): # set version self.version = version # set updater self.updater = updater # keep state of 'toggle key' self.toggle_key_active = False # read user defaults (preferences) self.prefs = NSUserDefaults.standardUserDefaults() # register some default values self.prefs.registerDefaults_( dict(MailTrackOption=True, MailTrackEnableDebugging=False, MailTrackDisabled=False)) # set log level logger.setLevel(self.is_debugging and logger.DEBUG or logger.WARNING) logger.debug('debug logging active') # add menu item for quick enable/disable Menu.alloc().initWithApp_(self).inject() # check update interval self.check_update_interval = self.prefs.int[ "MailTrackCheckUpdateInterval"] or 0 # check if we're running in a different Mail version as before self.check_version()
def _set_log_level(self, level): if level=='debug': logger.setLevel(DEBUG) elif level=='info': logger.setLevel(INFO) elif level=='warning': logger.setLevel(WARNING) elif level=='error': logger.setLevel(ERROR) elif level=='critical': logger.setLevel(CRITICAL) else: raise TypeError
def main(): args = __get_arguments() if args.verbose: logger.setLevel('DEBUG') d = parse(args.equation) degree, solution = solve(d['values']) reduced_form = __reduced_form(d, degree) __print_solution(reduced_form, degree, solution) if args.graphic and degree <= 2: graph(reduced_form, d['variable'], degree)
def args_parse(): p = OptionParser() p.add_option('-e', '--expr', dest='expr', action='append', type='string', help='specify experssion') p.add_option('-v', '--verbose', dest='verbose', default='2', help='specify verbose level') p.add_option('-o', '--out_dir', dest='out_dir', default='logs', type='string', help='specify output directory') p.add_option('-l', '--logfile', dest='logfile', default='dvpy_master.log', type='string', help='specify log filename') p.add_option('-m', '--max_agents', dest='max_agents', default=1, type='int', help='specify max agent number') p.add_option('-s', '--suite', dest='suite', action='callback', type='string', callback=add_suite, help='specify suite') p.add_option('-t', '--test', action='callback', dest='test', type='string', callback=add_test, help='specify test') p.add_option('-w', '--where', action='store', dest='where', type='string', help='specify test selector') p.add_option('-a', '--action', action='append', dest='action', type='string', help='specify action') p.add_option('-f', '--patchfile', action='append', dest='patchfile', type='string', help='specify patch file') p.add_option('-g', '--gcf', dest='gcf', type='string', default='local', help='specify gcf') (opts, args) = p.parse_args() logger.setLevel(get_level(opts.verbose)) master_log = path.abspath(path.join(opts.out_dir, opts.logfile)) try: makedirs(path.dirname(master_log)) except Exception as e: pass if path.exists(master_log): rename(master_log, (master_log+'.bak')) fh = logging.FileHandler(master_log) fh.setFormatter(logging.Formatter(FORMAT)) fh.setLevel(get_level(opts.verbose)) logger.addHandler(fh) return (opts, args)
def main(): global host, port, agent_id, out_dir p = OptionParser() p.add_option('-m', '--host', dest='host', help='specify host name') p.add_option('-p', '--port', dest='port', help='specify port') p.add_option('-i', '--id', dest='id', help='specify id name') p.add_option('-o', '--out_dir', dest='out_dir', help='specify output directory') p.add_option('-v', '--verbose', dest='verbose', default='1', help='specify verbose level') (options, args) = p.parse_args() host = options.host port = int(options.port) agent_id = options.id out_dir = path.abspath(options.out_dir) logger.setLevel(get_level(options.verbose)) agent_log = path.abspath(path.join(out_dir, 'agents', agent_id)) try: os.makedirs(path.dirname(agent_log)) except Exception as e: pass if path.exists(agent_log): rename(agent_log, (agent_log + '.bak')) fh = logging.FileHandler(agent_log) fh.setFormatter(logging.Formatter(FORMAT)) fh.setLevel(get_level(options.verbose)) logger.addHandler(fh) run()
def start_agent_server(_out_q, _in_q, out_dir, verbose): global in_q, out_q in_q = _in_q out_q = _out_q logger.setLevel(get_level(verbose)) server_log = path.abspath(path.join(out_dir, 'agent_server.log')) try: os.makedirs(path.dirname(server_log)) except Exception as e: pass if path.exists(server_log): os.rename(server_log, (server_log+'.bak')) fh = logging.FileHandler(server_log) fh.setFormatter(logging.Formatter(FORMAT)) fh.setLevel(get_level(verbose)) logger.addHandler(fh) loop = asyncio.get_event_loop() server_host = socket.gethostname() server_ip = socket.gethostbyname(server_host) coro = loop.create_server(AgentServerProtocal, server_ip) server = loop.run_until_complete(coro) _, server_port = server.sockets[0].getsockname() logger.info('agent server started on {}:{}'.format(server_host, server_port)) out_q.put((server_host, server_port)) loop.run_forever() server.close() loop.run_until_complete(server.wait_closed()) loop.close()
def main(): global host, port, agent_id, out_dir p = OptionParser() p.add_option('-m', '--host', dest='host', help='specify host name') p.add_option('-p', '--port', dest='port', help='specify port') p.add_option('-i', '--id', dest='id', help='specify id name') p.add_option('-o', '--out_dir', dest='out_dir', help='specify output directory') p.add_option('-v', '--verbose', dest='verbose', default='1', help='specify verbose level') (options, args) = p.parse_args() host = options.host port = int(options.port) agent_id = options.id out_dir = path.abspath(options.out_dir) logger.setLevel(get_level(options.verbose)) agent_log = path.abspath(path.join(out_dir, 'agents', agent_id)) try: os.makedirs(path.dirname(agent_log)) except Exception as e: pass if path.exists(agent_log): rename(agent_log, (agent_log+'.bak')) fh = logging.FileHandler(agent_log) fh.setFormatter(logging.Formatter(FORMAT)) fh.setLevel(get_level(options.verbose)) logger.addHandler(fh) run()
def changeDebugging_(self, sender): is_debugging = sender.state() logger.setLevel(is_debugging and logger.DEBUG or logger.WARNING) logger.debug('debug logging active')
def main(): parser = argparse.ArgumentParser() parser.add_argument('--version', '-V', action='store_true', help='show version') parser.add_argument('--reset-config', action='store_true', help='reset qt config') parser.add_argument( '--logger-level', default='info', choices=['debug', 'info', 'warning', 'fatal', 'error'], help='logger level', ) parser.add_argument('filename', nargs='?', help='image or label filename') parser.add_argument( '--output', '-O', '-o', help='output file or directory (if it ends with .json it is ' 'recognized as file, else as directory)') default_config_file = os.path.join(os.path.expanduser('~'), '.labelme_kairc') parser.add_argument( '--config', dest='config_file', help='config file (default: %s)' % default_config_file, default=default_config_file, ) # config for the gui parser.add_argument( '--nodata', dest='store_data', action='store_false', help='stop storing image data to JSON file', default=argparse.SUPPRESS, ) parser.add_argument( '--autosave', dest='auto_save', action='store_true', help='auto save', default=argparse.SUPPRESS, ) parser.add_argument( '--nosortlabels', dest='sort_labels', action='store_false', help='stop sorting labels', default=argparse.SUPPRESS, ) parser.add_argument( '--flags', help='comma separated list of flags OR file containing flags', default=argparse.SUPPRESS, ) parser.add_argument( '--labelflags', dest='label_flags', help='yaml string of label specific flags OR file containing json ' 'string of label specific flags (ex. {person-\d+: [male, tall], ' 'dog-\d+: [black, brown, white], .*: [occluded]})', default=argparse.SUPPRESS, ) parser.add_argument( '--labels', help='comma separated list of labels OR file containing labels', default=argparse.SUPPRESS, ) parser.add_argument( '--validatelabel', dest='validate_label', choices=['exact', 'instance'], help='label validation types', default=argparse.SUPPRESS, ) parser.add_argument( '--keep-prev', action='store_true', help='keep annotation of previous frame', default=argparse.SUPPRESS, ) parser.add_argument( '--epsilon', type=float, help='epsilon to find nearest vertex on canvas', default=argparse.SUPPRESS, ) args = parser.parse_args() if args.version: print('{0} {1}'.format(__appname__, __version__)) sys.exit(0) logger.setLevel(getattr(logging, args.logger_level.upper())) if hasattr(args, 'flags'): if os.path.isfile(args.flags): with codecs.open(args.flags, 'r', encoding='utf-8') as f: args.flags = [l.strip() for l in f if l.strip()] else: args.flags = [l for l in args.flags.split(',') if l] if hasattr(args, 'labels'): if os.path.isfile(args.labels): with codecs.open(args.labels, 'r', encoding='utf-8') as f: args.labels = [l.strip() for l in f if l.strip()] else: args.labels = [l for l in args.labels.split(',') if l] if hasattr(args, 'label_flags'): if os.path.isfile(args.label_flags): with codecs.open(args.label_flags, 'r', encoding='utf-8') as f: args.label_flags = yaml.load(f) else: args.label_flags = yaml.load(args.label_flags) config_from_args = args.__dict__ config_from_args.pop('version') reset_config = config_from_args.pop('reset_config') filename = config_from_args.pop('filename') output = config_from_args.pop('output') config_file = config_from_args.pop('config_file') config = get_config(config_from_args, config_file) if not config['labels'] and config['validate_label']: logger.error('--labels must be specified with --validatelabel or ' 'validate_label: true in the config file ' '(ex. ~/.labelme_kairc).') sys.exit(1) output_file = None output_dir = None if output is not None: if output.endswith('.json'): output_file = output else: output_dir = output app = QtWidgets.QApplication(sys.argv) app.setApplicationName(__appname__) app.setWindowIcon(newIcon('icon')) win = MainWindow( config=config, filename=filename, output_file=output_file, output_dir=output_dir, ) if reset_config: logger.info('Resetting Qt config: %s' % win.settings.fileName()) win.settings.clear() sys.exit(0) win.show() win.raise_() sys.exit(app.exec_())
if __name__ == "__main__": parser = argparse.ArgumentParser( description="Break cipher texts encrypted " + "with the affine cipher algorithm") parser.add_argument("path", help="File path") parser.add_argument("-v", action="store_true", help="[v]erbose mode") parser.add_argument("--limit", action="store", type=int, default=10, help="Number of characters to try. 1 <= limit <= 26") args = parser.parse_args() if args.v: logger.setLevel(logging.INFO) else: logger.setLevel(logging.WARNING) limit = args.limit path = args.path if not (0 < limit <= 26): print("error: limit must be an integer between 1 and 26.\n") parser.print_help() exit() try: with open(path, "r") as f: cipher_text = f.read()
def main(): parser = argparse.ArgumentParser() parser.add_argument("--version", "-V", action="store_true", help="show version") parser.add_argument("--reset-config", action="store_true", help="reset qt config") parser.add_argument( "--logger-level", default="info", choices=["debug", "info", "warning", "fatal", "error"], help="logger level", ) parser.add_argument("filename", nargs="?", help="image or label filename") parser.add_argument( "--output", "-O", "-o", help="output file or directory (if it ends with .json it is " "recognized as file, else as directory)", ) default_config_file = os.path.join(os.path.expanduser("~"), ".labelmerc") parser.add_argument( "--config", dest="config", help="config file or yaml-format string (default: {})".format( default_config_file), default=default_config_file, ) # config for the gui parser.add_argument( "--nodata", dest="store_data", action="store_false", help="stop storing image data to JSON file", default=argparse.SUPPRESS, ) parser.add_argument( "--autosave", dest="auto_save", action="store_true", help="auto save", default=argparse.SUPPRESS, ) parser.add_argument( "--nosortlabels", dest="sort_labels", action="store_false", help="stop sorting labels", default=argparse.SUPPRESS, ) parser.add_argument( "--flags", help="comma separated list of flags OR file containing flags", default=argparse.SUPPRESS, ) parser.add_argument( "--labelflags", dest="label_flags", help=r"yaml string of label specific flags OR file containing json " r"string of label specific flags (ex. {person-\d+: [male, tall], " r"dog-\d+: [black, brown, white], .*: [occluded]})", # NOQA default=argparse.SUPPRESS, ) parser.add_argument( "--labels", help="comma separated list of labels OR file containing labels", default=argparse.SUPPRESS, ) parser.add_argument( "--validatelabel", dest="validate_label", choices=["exact"], help="label validation types", default=argparse.SUPPRESS, ) parser.add_argument( "--keep-prev", action="store_true", help="keep annotation of previous frame", default=argparse.SUPPRESS, ) parser.add_argument( "--epsilon", type=float, help="epsilon to find nearest vertex on canvas", default=argparse.SUPPRESS, ) args = parser.parse_args() if args.version: print("{0} {1}".format(__appname__, __version__)) sys.exit(0) logger.setLevel(getattr(logging, args.logger_level.upper())) if hasattr(args, "flags"): if os.path.isfile(args.flags): with codecs.open(args.flags, "r", encoding="utf-8") as f: args.flags = [line.strip() for line in f if line.strip()] else: args.flags = [line for line in args.flags.split(",") if line] if hasattr(args, "labels"): if os.path.isfile(args.labels): with codecs.open(args.labels, "r", encoding="utf-8") as f: args.labels = [line.strip() for line in f if line.strip()] else: args.labels = [line for line in args.labels.split(",") if line] if hasattr(args, "label_flags"): if os.path.isfile(args.label_flags): with codecs.open(args.label_flags, "r", encoding="utf-8") as f: args.label_flags = yaml.safe_load(f) else: args.label_flags = yaml.safe_load(args.label_flags) config_from_args = args.__dict__ config_from_args.pop("version") reset_config = config_from_args.pop("reset_config") filename = config_from_args.pop("filename") output = config_from_args.pop("output") config_file_or_yaml = config_from_args.pop("config") config = get_config(config_file_or_yaml, config_from_args) if not config["labels"] and config["validate_label"]: logger.error("--labels must be specified with --validatelabel or " "validate_label: true in the config file " "(ex. ~/.labelmerc).") sys.exit(1) output_file = None output_dir = None if output is not None: if output.endswith(".json"): output_file = output else: output_dir = output translator = QtCore.QTranslator() translator.load( QtCore.QLocale.system().name(), osp.dirname(osp.abspath(__file__)) + "/translate", ) app = QtWidgets.QApplication(sys.argv) app.setApplicationName(__appname__) app.setWindowIcon(newIcon("icon")) app.installTranslator(translator) win = MainWindow( config=config, filename=filename, output_file=output_file, output_dir=output_dir, ) if reset_config: logger.info("Resetting Qt config: %s" % win.settings.fileName()) win.settings.clear() sys.exit(0) win.show() win.raise_() sys.exit(app.exec_())
parser.add_argument('-i', '--incremental', required=False, help="Incremental backup.", action="store_true") parser.add_argument('-v', '--verbose', required=False, help="Increase verbosity backup.", action="store_true") args = parser.parse_args() if args.verbose: logging.basicConfig(level=logging.DEBUG) logger.setLevel(level=logging.DEBUG) else: logging.basicConfig(level=logging.INFO) logger.setLevel(level=logging.INFO) incremental_backup = False if args.incremental: incremental_backup = True init_params(PARAMS) if str2bool(PARAMS['SEND_MAIL']): init_mail(fromaddr=PARAMS['SMTP_FROM'], password=PARAMS['SMTP_PWD'], toaddrs=PARAMS['SMTP_TO'], subject="DPV Cloud Backup",
Fore.WHITE + "Config created! It is named config.json. Edit it accordingly and start the server again!" ) exit() else: #config check and updater AllGood = True NeedSet = [] for key in list(DefaultConfig.keys()): if key not in list(UserConfig.keys()): AllGood = False NeedSet.append(key) if AllGood: # setup logging logger.setLevel("INFO") logger.info(Fore.GREEN + "Configuration loaded successfully! Loading..." + Fore.RESET) else: #fixes config logger.info(Fore.BLUE + "Updating config..." + Fore.RESET) for Key in NeedSet: UserConfig[Key] = DefaultConfig[Key] logger.info( Fore.BLUE + f"Option {Key} added to config. Set default to '{DefaultConfig[Key]}'." + Fore.RESET) logger.info( Fore.GREEN + "Config updated! Please edit the new values to your liking." +
def __init__(self, workload_settings, target_settings, shutdown_event=None): self.ws = workload_settings self.ts = target_settings self.shutdown_event = shutdown_event logger.setLevel(logging.INFO) self.existing_keys = ExistingKey(self.ws.working_set, self.ws.working_set_access, self.ts.prefix) self.new_keys = NewKey(self.ts.prefix, self.ws.expiration) self.keys_for_removal = KeyForRemoval(self.ts.prefix) if not hasattr(self.ws, 'doc_gen') or self.ws.doc_gen == 'old': extra_fields = False if (hasattr(self.ws, 'extra_doc_fields') and self.ws['extra_doc_fields'] == 'yes'): extra_fields = True self.docs = NewDocument(self.ws.size, extra_fields) elif self.ws.doc_gen == 'new': self.docs = NewNestedDocument(self.ws.size) elif self.ws.doc_gen == 'merge': isRandom = True if self.ts.prefix == 'n1ql': isRandom = False self.docs = MergeDocument(self.ws.size, self.ws.doc_partitions, isRandom) elif self.ws.doc_gen == 'reverse_lookup': isRandom = True if self.ts.prefix == 'n1ql': isRandom = False self.docs = ReverseLookupDocument(self.ws.size, self.ws.doc_partitions, isRandom) elif self.ws.doc_gen == 'reverse_lookup_array_indexing': isRandom = True if self.ts.prefix == 'n1ql': isRandom = False if self.ws.updates: # plus 10 to all values in array when updating doc self.docs = ReverseLookupDocumentArrayIndexing( self.ws.size, self.ws.doc_partitions, self.ws.items, delta=random.randint(0, 10)) else: self.docs = ReverseLookupDocumentArrayIndexing( self.ws.size, self.ws.doc_partitions, self.ws.items) elif self.ws.doc_gen == 'spatial': self.docs = NewDocumentFromSpatialFile( self.ws.spatial.data, self.ws.spatial.dimensionality) elif self.ws.doc_gen == 'large_subdoc': self.docs = NewLargeDocument(self.ws.size) self.next_report = 0.05 # report after every 5% of completion host, port = self.ts.node.split(':') # Only FTS uses proxyPort and authless bucket right now. # Instead of jumping hoops to specify proxyPort in target # iterator/settings, which only passes down very specific attributes, # just detect fts instead. The following does not work with # authless bucket. FTS's worker does its own Couchbase.connect if not (hasattr(self.ws, "fts") and hasattr( self.ws.fts, "doc_database_url")): # default sasl bucket self.init_db({'bucket': self.ts.bucket, 'host': host, 'port': port, 'username': self.ts.bucket, 'password': self.ts.password}) self.fallingBehindCount = 0
def visualise_k_hop_graph(target_word: str, checkpoint: Optional[Union[str, Path]] = None, weights_filepath: Optional[Union[str, Path]] = None, vocab_filepath: Optional[Union[str, Path]] = None, k: Optional[int] = 2, alpha: Optional[float] = None, min_node_size: Optional[float] = 20, max_node_size: Optional[float] = 120, min_font_size: Optional[float] = 6, max_font_size: Optional[float] = 24, node_alpha: Optional[float] = 1, edge_alpha: Optional[float] = 0.15, target_word_label_colour: Optional[str] = 'black', colour_map: Optional[str] = 'tab20c', output_path: Optional[Union[str, Path]] = None, figure_width: Optional[int] = 800, figure_height: Optional[int] = 600, figure_dpi: Optional[int] = 96, export_dpi: Optional[int] = 96, verbose: Optional[bool] = False) -> None: """Visualise the k-hop graph for the given word embeddings and interest word. Requires one of checkpoint / (weights_filepath and vocab_filepath). If output_path is specified, then no preview window is drawn. """ # Ensure that at least on data argument was provided if checkpoint is None and weights_filepath is None and vocab_filepath is None: logger.error( 'One of checkpoint / (weights-filepath and vocab-filepath) is required!' ) exit(1) if checkpoint is not None: checkpoint = Path(checkpoint) weights_filepath = checkpoint / 'proj_weights.npy' vocab_filepath = checkpoint / 'vocab.txt' else: weights_filepath = Path(weights_filepath) vocab_filepath = Path(vocab_filepath) if not verbose: logger.setLevel(logging.ERROR) embeddings = WordEmbeddings(weights_filepath, vocab_filepath, name_metadata=weights_filepath.parent.stem) figsize = (figure_width / figure_dpi, figure_height / figure_dpi) plt.figure(figsize=figsize, dpi=figure_dpi) draw_k_hop_graph(embeddings, target_word, k, alpha=alpha, min_node_size=min_node_size, max_node_size=max_node_size, min_font_size=min_font_size, max_font_size=max_font_size, node_alpha=node_alpha, edge_alpha=edge_alpha, target_word_label_colour=target_word_label_colour, community_colour_map=colour_map) # Show the plot, or output it, depending on the mode. plt.axis('off') if not output_path: plt.show() else: output_path = Path(output_path) output_format = (output_path.suffix or 'png').replace('.', '') output_path.parent.mkdir(parents=True, exist_ok=True) if output_format == 'tex' or output_format == 'latex': tikzplotlib.save(output_path) else: plt.savefig(output_path, dpi=export_dpi) logger.info('Exported figure to {}'.format(output_path))
def args_parse(): p = OptionParser() p.add_option('-e', '--expr', dest='expr', action='append', type='string', help='specify experssion') p.add_option('-v', '--verbose', dest='verbose', default='2', help='specify verbose level') p.add_option('-o', '--out_dir', dest='out_dir', default='logs', type='string', help='specify output directory') p.add_option('-l', '--logfile', dest='logfile', default='dvpy_master.log', type='string', help='specify log filename') p.add_option('-m', '--max_agents', dest='max_agents', default=1, type='int', help='specify max agent number') p.add_option('-s', '--suite', dest='suite', action='callback', type='string', callback=add_suite, help='specify suite') p.add_option('-t', '--test', action='callback', dest='test', type='string', callback=add_test, help='specify test') p.add_option('-w', '--where', action='store', dest='where', type='string', help='specify test selector') p.add_option('-a', '--action', action='append', dest='action', type='string', help='specify action') p.add_option('-f', '--patchfile', action='append', dest='patchfile', type='string', help='specify patch file') p.add_option('-g', '--gcf', dest='gcf', type='string', default='local', help='specify gcf') (opts, args) = p.parse_args() logger.setLevel(get_level(opts.verbose)) master_log = path.abspath(path.join(opts.out_dir, opts.logfile)) try: makedirs(path.dirname(master_log)) except Exception as e: pass if path.exists(master_log): rename(master_log, (master_log + '.bak')) fh = logging.FileHandler(master_log) fh.setFormatter(logging.Formatter(FORMAT)) fh.setLevel(get_level(opts.verbose)) logger.addHandler(fh) return (opts, args)
def start_transport_server(handlers=[]): global current_node global request_handlers global stream_server db.init(db_name=config.DB_NAME , user_name=config.DB_USER_NAME, password=config.DB_PASSWORD, host=config.DB_HOST, namespace=config.DB_NAMESPACE) import argparse parser = argparse.ArgumentParser(description='process arguments') parser.add_argument('--host_address', help='host name specifically') parser.add_argument('--port', help='host port specifically') parser.add_argument('--force', help='force use same node config') parser.add_argument('--proxy_80_port', help='proxy server to connect to this server') parser.add_argument('--log_level', help='log level , debug or error or info') parser.add_argument('--num_max_connections',type=int, help='Maximum connections to accept' , default=1400) args = parser.parse_args() if(args.log_level=='debug'): logger.setLevel(logging.DEBUG) log_handler.setLevel(logging.DEBUG) init_timed_rotating_log("logs/logs_"+args.port+".log", logging.DEBUG) if(not args.port or not args.host_address): logger.debug("port and host name needed") return node_id = db.node_config_exists(args.host_address, args.port) if(node_id and not args.force): logger.error("Node config exists in db") return node_id = node_id or db.create_node(None, args.host_address, None, args.port) db.update_node_info(node_id, proxy_80_port= args.proxy_80_port , num_connections=0, num_max_connections=args.num_max_connections) current_node = util_funcs.from_kwargs(Node, **db.get_node_by_id(node_id)) ## clear all connections to the node from db db.clear_connections_to_node_from_db(node_id) thread = gevent.spawn(current_node.send_heartbeat)# loop forever and send heartbeat every 10 minutes refresh_stats = gevent.spawn(current_node.refresh_stats)# loop forever and send heartbeat every 10 minutes db_periodic_flush = gevent.spawn(db.do_in_background)# loop forever and send heartbeat every 10 minutes for regex, handler in handlers: if(isinstance(regex, str)): regex = re.compile(regex) request_handlers.append((regex, handler)) request_handlers.sort(key = lambda x:x[0] , reverse=True) stream_server = StreamServer( ('', int(args.port)), handle_connection) stream_server.serve_forever()
#!/usr/bin/env python from PyQt4.QtCore import * from PyQt4.QtGui import * from PyKDE4.kdecore import * from PyKDE4.kdeui import * from PyKDE4.plasma import Plasma from PyKDE4 import plasmascript from kloud_config import * import logging from logger import logger as log log.setLevel(logging.DEBUG) class KloudConfig(QWidget, Ui_Kloud): def __init__(self, parent): QWidget.__init__(self, parent) self.setupUi(self) # self.connect(self.localTimeZone, SIGNAL("stateChanged(int)"), self, SLOT("slotLocalTimeZoneToggled(int)")) class AWSPlasmoid(plasmascript.Applet): def __init__(self, parent, args=None): plasmascript.Applet.__init__(self, parent) def init(self): self.dialog = None self.setHasConfigurationInterface(True) self.setAspectRatioMode(Plasma.Square)
from logger import logger from os.path import join from os.path import exists class Config: """Configuration manager""" filename = "config.toml" filepath = join(".", filename) def read(self, path=None): """Read and parse config, return a dict""" if path is None: path = self.filepath if not exists(path): return {} with open(path) as conffile: config = toml.loads(conffile.read()) logger.debug(config) return config config = Config() # build a config instance if __name__ == '__main__': logger.setLevel(logging.DEBUG) config.read()
parser = argparse.ArgumentParser(description='~ Dark Magic 4 Logs ~') # Global arguments group = parser.add_mutually_exclusive_group(required=False) group.add_argument('--logs', type=str, nargs=2, help="""log_path1,log_path2 backend1[,backend2...] The backend should be in %s""" %str(dm4l.get_available_handlers())) group.add_argument('--file', type=str, default='./monitors.conf', help="Reads: log_path<space>backend\\nlog_path... from here") group.add_argument('--path', type=str, default=None, nargs=2, help="Reads all logs in path. Ex. --path ./*.log") parser.add_argument('--safe', action='store_false', help="Ignore erroneous logs") parser.add_argument('--silent', action='store_true', help='Do not show warnings') parser.add_argument('--refresh', type=int, default=0, help="Seconds to refresh data. 0 = run once.") parser.add_argument('plug', type=str, nargs='+', help="list plugins to activate") args = parser.parse_args() if args.silent: logger.setLevel(logging.FATAL) else: logger.setLevel(logging.INFO) with open(os.path.join(os.path.dirname(__file__), 'logo.txt'),'r') as infile: logger.info(infile.read()) [h.flush() for h in logger.handlers] dm4l = DM4L() dm4l.set_safe(True) if args.logs is not None: args.logs[0] = args.logs[0].split(',') args.logs[1] = args.logs[1].split(',') if len(args.logs[1]) == 1 and len(args.logs[1]) < len(args.logs[0]): args.logs[1] *= len(args.logs[0]) assert(len(args.logs[1]) == len(args.logs[0])) dm4l.set_input(DM4L.FROM_LIST, args.logs)