def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub', desired_capabilities=None, browser_profile=None): """ Create a new driver that will issue commands using the wire protocol. :Args: - command_executor - Either a command.CommandExecutor object or a string that specifies the URL of a remote server to send commands to. - desired_capabilities - Dictionary holding predefined values for starting a browser - browser_profile - A selenium.webdriver.firefox.firefox_profile.FirefoxProfile object. Only used if Firefox is requested. """ if desired_capabilities is None: raise WebDriverException("Desired Capabilities can't be None") if not isinstance(desired_capabilities, dict): raise WebDriverException( "Desired Capabilities must be a dictionary") self.command_executor = command_executor if type(self.command_executor) is str or type( self.command_executor) is unicode: self.command_executor = RemoteConnection(command_executor) self.session_id = None self.capabilities = {} self.error_handler = ErrorHandler() self.start_client() self.start_session(desired_capabilities, browser_profile)
def __init__(self, remote_server_addr, keep_alive=False): # Attempt to resolve the hostname and get an IP address. self.keep_alive = keep_alive self.error_handler = ErrorHandler() parsed_url = parse.urlparse(remote_server_addr) addr = "" if parsed_url.hostname: try: netloc = socket.gethostbyname(parsed_url.hostname) addr = netloc if parsed_url.port: netloc += ':%d' % parsed_url.port if parsed_url.username: auth = parsed_url.username if parsed_url.password: auth += ':%s' % parsed_url.password netloc = '%s@%s' % (auth, netloc) remote_server_addr = parse.urlunparse( (parsed_url.scheme, netloc, parsed_url.path, parsed_url.params, parsed_url.query, parsed_url.fragment)) except socket.gaierror: LOGGER.info('Could not get IP address for host: %s' % parsed_url.hostname) self._url = remote_server_addr if keep_alive: self._conn = httplib.HTTPConnection( str(addr), str(parsed_url.port), timeout=self._timeout)
def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub', desired_capabilities=None, browser_profile=None): """Create a new driver that will issue commands using the wire protocol. Args: command_executor - Either a command.CommandExecutor object or a string that specifies the URL of a remote server to send commands to. desired_capabilities - Dictionary holding predefined values for starting a browser browser_profile: A browser profile directory as a Base64-encoded zip file. Only used if Firefox is requested. """ if desired_capabilities is None: raise WebDriverException(" Desired Capabilities can't be None") self.command_executor = command_executor if type(self.command_executor) is str: self.command_executor = RemoteConnection(command_executor) self.session_id = None self.capabilities = {} self.error_handler = ErrorHandler() self.start_client() self.start_session(desired_capabilities, browser_profile)
def __init__(self, command_executor, browser_name, platform, version='', javascript_enabled=True): """Create a new driver that will issue commands using the wire protocol. Args: command_executor - Either a command.CommandExecutor object or a string that specifies the URL of a remote server to send commands to. browser_name - A string indicating which browser to request a new session for from the remote server. Should be one of {mobile safari|firefox|internet explorer|htmlunit|chrome}. platform - A string indicating the desired platform to request from the remote server. Should be one of {WINDOWS|XP|VISTA|MAC|LINUX|UNIX|ANY}. version - A string indicating a specific browser version to request, or an empty string ot use any available browser. Defaults to the empty string. javascript_enabled - Whether the requested browser should support JavaScript. Defaults to True. """ self.command_executor = command_executor if type(self.command_executor) is str: self.command_executor = RemoteConnection(command_executor) self.session_id = None self.capabilities = {} self.error_handler = ErrorHandler() self.start_client() self.start_session(browser_name=browser_name, platform=platform, version=version, javascript_enabled=javascript_enabled)
def _main(arguments): error_tracker = ErrorHandler() run(arguments) if error_tracker.fired: print( "A fatal error occurred, please review the log output for more information.", file=sys.stderr, ) sys.exit(1) sys.exit(0)
def _configure_logging(log_level: str) -> None: logger = logging.getLogger(__name__) logging.basicConfig( handlers=[ logging.StreamHandler(sys.stdout), ], format="%(asctime)s - %(levelname)s - %(name)s - %(message)s", level=log_level, ) error_tracker = ErrorHandler() return logger, error_tracker
def main() -> None: load_dotenv() # WARNING! Do not make the following call in a function decorated with @catch_exceptions arguments = parse_main_arguments(sys.argv[1:]) _configure_logging(arguments) error_tracker: ErrorHandler = ErrorHandler() _run_loader(arguments) if error_tracker.fired: print( "A fatal error occurred, please review the log output for more information.", file=sys.stderr, ) sys.exit(1) sys.exit(0)
async def main() -> None: load_dotenv() configuration = parse_main_arguments() _configure_logging(configuration) # Important that this comes _after_ the logging configuration error_tracker = ErrorHandler() await run(configuration) if error_tracker.fired: print( "A fatal error occurred, please review the log output for more information.", file=sys.stderr, ) sys.exit(1) sys.exit(0)
def main() -> None: global logger load_dotenv() arguments = parse_main_arguments(sys.argv[1:]) _configure_logging(arguments) error_tracker: ErrorHandler = ErrorHandler() try: run(arguments) except Exception as error: logger.error(error) print(error.__traceback__, file=sys.stderr) if error_tracker.fired: print( "A fatal error occurred, please review the log output for more information.", file=sys.stderr, ) sys.exit(1) sys.exit(0)
def __init__(self, bp_name, filename, url_prefix="", template_folder=""): # setting blue print for this module self.set_blueprint(bp_name, filename, url_prefix, template_folder) # == self.error_handle = ErrorHandler() # common - varbiales available to all module templates self.common = {} # == # Required roles, leaving this empty means anyone can access this module. # anon - is the defualt role a(not logged in). # viewer - is the default role of a logged in user. # buyer - Consumer looking for ad spaces, has access to the search module (has already made a payment). # subscriber - A subscribed user # provider - Ad space provider, who will have access to add their media to the system. # admin - Has access to everything self.admin_role = "admin" self.available_roles = [ "anon", "viewer", "buyer", "subscriber", "provider", self.admin_role ] self.required_roles = [] # set in each module
def __init__(self, level=logging.ERROR, message='No output as errors have occurred.'): from errorhandler import ErrorHandler self.handler = ErrorHandler(level) self.message = message
def __init__(self): self.db_name = current_app.config["DB"]["name"] self.collection_name = "" self.content = None self.error_handle = ErrorHandler()
def _main(): """pyznap main function. Parses arguments and calls snap/clean/send functions accordingly. Returns ------- int Exit code """ settings = { "matching": None, } parser = ArgumentParser( prog='pyznap', description='ZFS snapshot tool written in python (version=' + __version__ + ')') parser.add_argument('-q', '--quiet', action="store_true", dest="quiet", help='quiet logging, only errors shown (WARNING)') parser.add_argument('-v', '--verbose', action="store_true", dest="verbose", help='print more verbose output (DEBUG)') parser.add_argument('-t', '--trace', action="store_true", dest="trace", help='print run tracing output (TRACE)') parser.add_argument('-n', '--dry-run', action="store_true", dest="dry_run", help='only test run, no action taken') parser.add_argument('--syslog', action="store_true", dest="syslog", help='add logging to syslog (INFO)') parser.add_argument('--logconfig', action="store_true", dest="logconfig", help='add config name to log') parser.add_argument('--config', action="store", dest="config", help='path to config file') parser.add_argument('-m', '--matching', action="store", dest="matching", help='only process matching filesystems') parser.add_argument('--pidfile', action="store", dest="pidfile", default=None, help='path to pid file') parser.add_argument('-V', '--version', action="store_true", dest="version", help='print version number') subparsers = parser.add_subparsers(dest='command') parser_setup = subparsers.add_parser('setup', help='initial setup') parser_setup.add_argument( '-p', '--path', action='store', dest='path', help='pyznap config dir. default is {:s}'.format(CONFIG_DIR)) parser_snap = subparsers.add_parser('snap', help='zfs snapshot tools') parser_snap.add_argument('--take', action="store_true", help='take snapshots according to config file') parser_snap.add_argument( '--clean', action="store_true", help='clean old snapshots according to config file') parser_snap.add_argument( '--full', action="store_true", help='take snapshots then clean old according to config file') parser_send = subparsers.add_parser('send', help='zfs send/receive tools') parser_send.add_argument('-s', '--source', action="store", dest='source', help='source filesystem') parser_send.add_argument('-d', '--dest', action="store", dest='dest', help='destination filesystem') parser_send.add_argument('-i', '--key', action="store", dest='key', help='ssh key if only source or dest is remote') parser_send.add_argument('-j', '--source-key', action="store", dest='source_key', help='ssh key for source if both are remote') parser_send.add_argument('-k', '--dest-key', action="store", dest='dest_key', help='ssh key for dest if both are remote') parser_send.add_argument( '-c', '--compress', action="store", dest='compress', help='compression to use for ssh transfer. default is lzop') parser_send.add_argument('-e', '--exclude', nargs='+', dest='exclude', help='datasets to exclude') parser_send.add_argument('-w', '--raw', action="store_true", dest='raw', help='raw zfs send. default is false') parser_send.add_argument('-r', '--resume', action="store_true", dest='resume', help='resumable send. default is false') parser_send.add_argument('-l', '--last', action="store_true", dest='send_last_snapshot', help='stat sending from last snapshot') parser_send.add_argument( '--dest-auto-create', action="store_true", dest='dest_auto_create', help='create destination if it does not exist. default is false') parser_send.add_argument('--retries', action="store", type=int, dest='retries', default=0, help='number of retries on error. default is 0') parser_send.add_argument( '--retry-interval', action="store", type=int, dest='retry_interval', default=10, help='interval in seconds between retries. default is 10') parser_send.add_argument( '--max-depth', action="store", type=int, dest='max_depth', help= 'define max depth for child recursion (0 no child, default infinite depth)' ) parser_fix = subparsers.add_parser( 'fix', help='fix zfs snapshot from other format to pyznap') parser_fix.add_argument('-t', '--type', action="store", dest='type', help='snapshot type name') parser_fix.add_argument( '-f', '--format', action="store", required=True, dest='format', help= 'snapshot format specification (regexp/@predefined[@zfs-auto-snap,@zfsnap])' ) parser_fix.add_argument('-m', '--map', action="store", dest='map', help='optional type mapping (old=new:...)') parser_fix.add_argument('-r', '--recurse', action="store_true", dest='recurse', help='recurse in child filesystems') # TODO: time shift parser_fix.add_argument('filesystem', nargs='+', help='filesystems to fix') subparsers.add_parser('full', help='full cycle: snap --take / send / snap --clean') parser_status = subparsers.add_parser( 'status', help='check filesystem snapshots status') parser_status.add_argument('--format', action="store", default='log', choices=['log', 'jsonl', 'html'], dest='status_format', help='status output format') parser_status.add_argument('--all', action="store_true", dest='status_all', help='show all ZFS filesystems') parser_status.add_argument('--print-config', action="store_true", dest='print_config', help='only print parsed and processed config') parser_status.add_argument('--values', action="store", dest='values', help='coma separated values to print') parser_status.add_argument('--filter', action="append", dest='filter_values', help='add filter for col=value') parser_status.add_argument('--exclude', action="append", dest='filter_exclude', help='exclude name filesystems (fnmatch)') if len(sys.argv) == 1: parser.print_help(sys.stderr) sys.exit(1) args = parser.parse_args() if args.version: print(__version__) sys.exit() if not args.command: print('ERROR: No command specified.\n') parser.print_help(sys.stderr) sys.exit(1) e = ErrorHandler() loglevel = logging.INFO if args.quiet: loglevel = logging.WARNING if args.verbose: loglevel = logging.DEBUG if args.command == 'status' and args.status_format != 'log': # for raw status only error show loglevel = logging.ERROR if args.trace: # trace override all logging.addLevelName(8, 'TRACE') loglevel = 8 basicloglevel = min(loglevel, logging.INFO) if args.syslog else loglevel # logging.basicConfig(level=basicloglevel) root_logger = logging.getLogger() root_logger.setLevel(basicloglevel) config_path = args.config if args.config else os.path.join( CONFIG_DIR, 'pyznap.conf') logadd = ' #' + config_path if args.logconfig else '' console_fmt = logging.Formatter('%(asctime)s %(levelname)s: %(message)s' + logadd, datefmt='%b %d %H:%M:%S') if loglevel < logging.WARNING: console_handler = logging.StreamHandler(sys.stdout) console_handler.setFormatter(console_fmt) console_handler.addFilter( lambda record: record.levelno < 30 ) # logging.WARNING make exception in destroy console_handler.setLevel(loglevel) root_logger.addHandler(console_handler) console_err_handler = logging.StreamHandler(sys.stderr) console_err_handler.setFormatter(console_fmt) console_err_handler.setLevel(logging.WARNING) root_logger.addHandler(console_err_handler) if args.syslog: # setup logging to syslog syslog_handler = logging.handlers.SysLogHandler( address='/dev/log', facility=logging.handlers.SysLogHandler.LOG_DAEMON) syslog_handler.setFormatter( logging.Formatter('pyznap: [%(levelname)s] %(message)s' + logadd)) # syslog always level INFO syslog_handler.setLevel(logging.INFO) root_logger.addHandler(syslog_handler) logger = logging.getLogger(__name__) if args.dry_run: set_dry_run() if args.matching: settings['matching'] = args.matching if args.pidfile is not None: if not check_pid(args.pidfile): logger.info('pidfile {} exists, exiting'.format(args.pidfile)) sys.exit(1) open(args.pidfile, "w").write("{}\n".format(os.getpid())) try: logger.info('Starting pyznap...') if args.command in ('snap', 'send', 'full', 'status'): logger.info('Read config={}'.format(config_path)) config = read_config(config_path) if config == None: return 1 if args.command == 'setup': path = args.path if args.path else CONFIG_DIR create_config(path) elif args.command == 'full': take_config(config, settings) send_config(config, settings) clean_config(config, settings) elif args.command == 'snap': # Default if no args are given if not args.take and not args.clean: args.full = True if args.take or args.full: take_config(config, settings) if args.clean or args.full: clean_config(config, settings) elif args.command == 'send': if args.source and args.dest: # use args.key if either source or dest is remote source_key, dest_key = None, None if args.dest.startswith('ssh'): dest_key = [args.key] if args.key else None elif args.source.startswith('ssh'): source_key = args.key if args.key else None # if source_key and dest_key are given, overwrite previous value source_key = args.source_key if args.source_key else source_key dest_key = [args.dest_key] if args.dest_key else dest_key # get exclude rules exclude = [args.exclude] if args.exclude else None # check if raw send was requested raw = [args.raw] if args.raw else None # compress ssh zfs send/receive compress = [args.compress] if args.compress else None # use receive resume token resume = [args.resume] if args.resume else None # retry zfs send/receive retries = [args.retries] if args.retries else None # wait interval for retry retry_interval = [args.retry_interval ] if args.retry_interval else None # automatically create dest dataset if it does not exist dest_auto_create = [args.dest_auto_create ] if args.dest_auto_create else None # start send from last snapshot send_last_snapshot = [args.send_last_snapshot ] if args.send_last_snapshot else None send_config([{ 'name': args.source, 'dest': [args.dest], 'key': source_key, 'dest_keys': dest_key, 'compress': compress, 'exclude': exclude, 'raw_send': raw, 'resume': resume, 'dest_auto_create': dest_auto_create, 'retries': retries, 'retry_interval': retry_interval, 'max_depth': args.max_depth, 'send_last_snapshot': send_last_snapshot }]) elif args.source and not args.dest: logger.error('Missing dest...') elif args.dest and not args.source: logger.error('Missing source...') else: send_config(config, settings) elif args.command == 'fix': tmap = args.map if tmap: tmap = dict(kw.split('=') for kw in args.map.split(':')) fix_snapshots(args.filesystem, format=args.format, type=args.type, recurse=args.recurse, type_map=tmap) elif args.command == 'status': if args.print_config: print(str(config)) else: filter_values = None if args.filter_values: filter_values = {} for fv in args.filter_values: f, v = fv.split('=') v = {'true': True, 'false': False}.get(v.lower(), v) filter_values[f] = v status_config(config, output=args.status_format, show_all=args.status_all, values=tuple(args.values.split(',')) if args.values else None, filter_values=filter_values, filter_exclude=args.filter_exclude, settings=settings) zfs.STATS.log() logger.info('Finished successfully...') finally: if args.pidfile is not None: os.unlink(args.pidfile) return 1 if e.fired else 0
def __init__(self): self.error_handle = ErrorHandler() self.cache_handle = Cache
class Plugin: is_active = False url_for = url_for error_handle = ErrorHandler() template = Environment(loader=FileSystemLoader(current_app.config["PLUGINS_PATH"])) admin_role = "admin" available_roles = ["anon", "viewer", "buyer", "subscriber", "provider", admin_role] required_roles = [] # set in each module def __init__(self): self.required_roles = [] # set in each module def activate(self): """Activate this plugin""" if self.is_active==False: self.is_active = True self.main() def deactivate(self): """Deactivate this plugin""" if self.is_active==True: self.is_active = False def main(self): """ This is the entry point of plugin functions. All plugin activities should be within main function. It'll be called right after "activating" the plugin. Also, after "save_settings" and "save data". """ pass def reload(self): """ This function will by default call the main function. It'll be called after saving settings, or inserting new data. This is to prevent """ self.main() def save_settings(self, settings, user_specific=True, overwrite=False): """ This function saves any settings specific to a plugin """ try: PSM = PluginSettingsModel(plugin_id=self.id) if overwrite==False: PSM.settings = self.get_settings() if PSM.settings: PSM.settings.update(settings) else: PSM.settings = settings conditions = {"plugin_id": PSM.plugin_id} if user_specific: conditions.update({"user_id": current_user.get_userid()}) PSM.update(conditions=conditions, upsert=True, overwrite=overwrite) self.reload() except Exception as e: return self.error_handle.get_error(error=str(e), occurred_at="mad.lib.plugin.save_settings()") def get_settings(self, user_specific=True): """ This function returns the settings stored by the plugin """ try: PSM = PluginSettingsModel(plugin_id=self.id) settings = None conditions = {"plugin_id": PSM.plugin_id} if user_specific: conditions.update({"user_id": current_user.get_userid()}) result = PSM.get(conditions=conditions, findone=True) if result: settings = result["settings"] return settings except Exception as e: self.error_handle.get_error(error=str(e), occurred_at="mad.lib.plugin.get_settings()") return None def save_data(self, data=None, conditions={}, upsert=False, user_specific=True, update=False, allow_duplicate=False, dup_fields=None): """ This function is used to save/insert/update data, specific to a plugin data : data to be saved conditions: conditions, in case data is being updated upsert : if data is to be upserted """ try: if user_specific: data.update({"user_id": current_user.get_userid()}) PDM = PluginDataModel(plugin_id=self.id) PDM.data = data status = None if update: status = PDM.update(conditions=conditions, upsert=upsert) else: dup_fields.update({"plugin_id": PDM.plugin_id}) status = PDM.save(allow_duplicate=allow_duplicate, duplicate_fields_check=dup_fields) self.reload() return status except Exception as e: return self.error_handle.get_error(error=str(e), occurred_at="mad.lib.plugin.save_data()") def get_data(self, conditions=None, sort_by=[("_id", 1)], distinct=False, distinct_fieldname=None, user_specific=True): """ This function is used to retrieve data stored by the pliugin based on the condition provided """ try: PDM = PluginDataModel(plugin_id=self.id) if not conditions: conditions = {} conditions.update({"plugin_id": PDM.plugin_id}) if user_specific: conditions.update({"user_id": current_user.get_userid()}) if distinct==True: return PDM.get(conditions=conditions, sort_by=sort_by, distinct=True, distinct_fieldname=distinct_fieldname) else: return PDM.get(conditions=conditions, sort_by=sort_by) except Exception as e: return self.error_handle.get_error(error=str(e), occurred_at="mad.lib.plugin.get_data()") def plugin_home(self): """This function should return HTML for the plugin's home page""" html = ( '<h2 class="title">Plugin Home</h2>' '<p>Hello, this is your plugin homepage</p>' ) return html def plugin_settings(self): """ This function should return HTML for the plugin's settings page. """ html = ( '<h2 class="title">Plugin Settings</h2>' '<p>Display all your plugin settings here.</p>' ) return html def public_content(self, form=None, params=None): """ This function should return HTML for any public facing content the plugin wants to push""" return None def check_role(self): """ Check if the current user has the required role to access this view. If the function returns: True : User has the required role/permission to access this page. False: User does not have required role/permission to access this page. Available roles: anon - not logged in user viewer - is the default role of a logged in user. provider - Ad space provider, who will have access to add their media to the system. buyer - Consumer looking for ad spaces, has access to the search module. subscriber - A subscribed user """ if not self.required_roles or not self.available_roles: return True elif current_user.is_authenticated(): current_user_role = current_user.get_role() if current_user_role in self.available_roles: if current_user_role==self.admin_role or current_user_role in self.required_roles: return True return False def render_plugin(self, template_src=None, data=None): """ Render html""" try: if not self.check_role(): abort(403) if not data or not type(data)==dict: data = {} data.update({"current_user": current_user}) data.update({"url_for": url_for}) data.update({"plugin_id": self.id}) args = Struct(**data) return self.template.get_template(template_src).render(data=args) except Exception as e: return self.error_handle.get_error(error=str(e), occurred_at="mad.lib.plugin.render_plugin()")