def __init__(self, config, verbose=False, debug=False): super(James, self).__init__(config, verbose=verbose) self.version = VERSION self.debug = debug # Bot state and logger. self.state = ServerState(self) self.config = config self.style = Styler() self.defaultcolor = self.style.gray self.hicolor = self.style.pink # event stuff self.state.events.update({list(i.keys())[0]: Event(list(i.values())[0]) for i in utils.events.Standard}) self.state.apikeys = json.loads(open("apikeys.conf").read()) self.state.data = {"autojoin_channels": []} self.state.data["autojoin_channels"].extend(self.config["autojoin"]) for entry in self.config["admins"]: self.state.admins.add(entry.lower()) self.state.nick = self.config["nick"] for user in self.config["muted"]: self.state.mute(user) # Various things self.cmdhandler = CommandHandler(self, plugins.get_plugins()) self.cmd_thread = HandlerThread(self) self.cmd_thread.daemon = True self.state.messages[self.state.nick] = deque([], MAX_MESSAGE_STORAGE) self.register_callbacks()
def __init__( self, server, port, nick, channels, realname, no_message_logging=[] ): log.debug("Connecting to %s:%s ..." % (server, port)) IRCClient.__init__(self, server, port, no_message_logging) self.nickname = nick self.channels = channels self.trigger_once_commands = [] self.shutdown_trigger_once = False #set nickname, user after MOTD self.user(self.nickname, "hostname", "servername", ":" + realname) self.nick(self.nickname) #get all enabled plugins self.plugins = [] for plugin in get_plugins(): if plugin.ENABLED: #init enabled plugins log.debug("Adding enabled plugin '%s'..." % plugin) self.plugins.append(plugin(self)) self.plugins[-1].on_init() else: log.debug("Skipping disabled plugin '%s'..." % plugin)
def __init__(self, server, port, nick, channels, realname, no_message_logging=[]): log.debug("Connecting to %s:%s ..." % (server, port)) IRCClient.__init__(self, server, port, no_message_logging) self.nickname = nick self.channels = channels self.trigger_once_commands = [] self.shutdown_trigger_once = False #set nickname, user after MOTD self.user(self.nickname, "hostname", "servername", ":" + realname) self.nick(self.nickname) #get all enabled plugins self.plugins = [] for plugin in get_plugins(): if plugin.ENABLED: #init enabled plugins log.debug("Adding enabled plugin '%s'..." % plugin) self.plugins.append(plugin(self)) self.plugins[-1].on_init() else: log.debug("Skipping disabled plugin '%s'..." % plugin)
def announce_user_change(self, user, allowme=False, what=None): """ Report a modified user to plugins and subsystems """ if allowme == False and user == self.myself: return if self.community_gui != None: self.community_gui.user_changes(user, what) for plugin in get_plugins(): plugin.user_changes(user, what)
def backup_account(url='', org='', key='', account='', backupdir='', **kwargs): # create directory structure backup_dir = create_dir(os.getcwd(), backupdir) org_dir = create_dir(backup_dir, org) account_dir = create_dir(org_dir, account) # backup agents agent_dir = create_dir(account_dir, 'agents') for agent_json in agents.get_agents(url=url, org=org, account=account, key=key): agent_path = os.path.join(agent_dir, str(agent_json['name']) + '.json') remove_keys = ['presence_state', 'created', 'modified', 'heartbeat'] for k in remove_keys: if k in agent_json: del agent_json[k] with open(agent_path, 'w') as f: f.write(json.dumps(agent_json, indent=4)) # backup dashboards dashboard_dir = create_dir(account_dir, 'dashboards') for d in dashboards.get_dashboards(url=url, org=org, account=account, key=key): dashboard_path = os.path.join(dashboard_dir, str(d['name']) + '.yaml') with open(dashboard_path, 'w') as f: f.write(yaml.safe_dump(d, default_flow_style=False, explicit_start=True)) # backup plugins plugin_dir = create_dir(account_dir, 'plugins') for p in plugins.get_plugins(url=url, org=org, account=account, key=key): plugin_path = os.path.join(plugin_dir, str(p['name']) + '.' + str(p['extension'])) with open(plugin_path, 'w') as f: f.write(plugins.export_plugin(plugin=p['name'], url=url, org=org, account=account, key=key)) # backup rules rule_dir = create_dir(account_dir, 'rules') for r in rules.get_rules(url=url, org=org, account=account, key=key): rule_path = os.path.join(rule_dir, str(r['name']) + '.yaml') with open(rule_path, 'w') as f: rule_content = yaml.safe_load(rules.export_rule(rule=r['id'], url=url, org=org, account=account, key=key)) if rule_content['actions']: action_count = len(rule_content['actions']) for i in range(action_count): try: del rule_content['actions'][i]['details']['status'] except KeyError: continue f.write(yaml.safe_dump(rule_content, default_flow_style=False, explicit_start=True)) # backup links link_dir = create_dir(account_dir, 'links') for l in links.get_links(url=url, org=org, account=account, key=key): link_path = os.path.join(link_dir, l['id'] + '.json') link_json = links.export_link(link_id=l['id'], url=url, org=org, account=account, key=key) with open(link_path, 'w') as f: f.write(json.dumps(link_json, indent=4))
def _install_plugin_actions(self): """ Tests plug-ins whether they provide method 'export_actions' and if so then attaches functions they provide to itself (if exported function's required controller class matches current instance's one). """ for p in plugins.get_plugins().values(): if callable(getattr(p, 'export_actions', None)): exported = p.export_actions() if self.__class__ in exported: for action in exported[self.__class__]: if not hasattr(self, action.__name__): setattr(self, action.__name__, types.MethodType(action, self)) else: raise Exception( 'Plugins cannot overwrite existing action methods (%s.%s)' % ( self.__class__.__name__, action.__name__))
def announce_user(self, user): """ Report a new user to plugins and subsystems """ if user == self.myself: self.notify.notify('Announce bug, not announcing myself') return self.activeusers[user] = None if get_debug_mode() or user.get('friend'): appearsmsg = 'appears' hops = user.get('hops') if hops != None: appearsmsg += ' at %d hops distance' % hops self.notify.user_notify(user, appearsmsg) if self.community_gui != None: self.community_gui.user_appears(user) for plugin in get_plugins(): plugin.user_appears(user)
def denounce_user(self, user): try: self.activeusers.pop(user) except KeyError: # we got a false bye-bye message return if not user.is_present(): return if get_debug_mode(): self.notify.user_notify(user, 'disappears') user.set('ip', None) user.set('port', None) self.depart_user(user) if self.community_gui != None: self.community_gui.user_disappears(user) for plugin in get_plugins(): plugin.user_disappears(user) if user.dirty: self.save_user(user)
def __init__(self, config): print("Infobot version %s" % (VERSION)) super().__init__(config, verbose=False) self.config = config self.nick = config["nick"] # Arbitrary bot data self.data = {} self.auth = None self.lock = threading.Lock() self.lock.acquire() self.events = DotDict({list(i.keys())[0]: Event(list(i.values())[0]) for i in StandardEvents}) self.cmd_thread = HandlerThread(self, self.lock) self.cmd_thread.daemon = True self.register_callbacks() self.register_plugins(plugins.get_plugins()) for item in self.config["admins"]: self.auth.addadmin(User(item[0], item[1], item[2]))
def announce_community_change(self, com): if self.community_gui != None: self.community_gui.community_changes(com) for plugin in get_plugins(): plugin.community_changes(com)
def getattr(self, path, fh=None): "getattr gets run all the time" # TODO: refactor, this is too messy # Maybe some of this stuff can be moved into open? # Apparently, if something is read several times quickly, # getattr may not be called in subsequent calls. Caching? try: # Firs check if the path is directly accessible target = self._get_path(path) except (KeyError, TypeError): try: # Attribute access needs special treatment parent, child = path.rsplit("/", 1) target = self._get_path(parent) if isinstance(target, DeviceAttribute): if path in self.tmp: value = self.tmp[path] else: # store the value in tmp so we don't have to read # it again in the read method. Also, otherwise the # size might be wrong. if child in ("value", "w_value"): data = getattr(target, child) plugins = get_plugins(target.info, data) try: value = plugins[0].convert(data) except Exception as e: self.log.error("Decoding failed: %s", e) # TODO: handle the case when more than one plugin # matches. I guess each plugin need to give a unique # file extension or something. else: value = str(getattr(target, child)) # TODO: How about quality? self.tmp[path] = value size = len(value) mode = stat.S_IFREG return self.make_node(mode=mode, size=size) # OK, what we're looking for is not in the DB. Let's # check if there is any pending creation operations going # on elif path in self.tmp: if self.tmp[path] == PROPERTY: # This means the user is creating a property del self.tmp[path] elif self.tmp[path] == SERVER: self.log.debug("wheee") return self.make_node(mode=stat.S_IFDIR, size=0) # ... insert other types of pending operations ... return self.make_node(mode=stat.S_IFREG) else: # none raise FuseOSError(ENOENT) except KeyError: raise FuseOSError(ENOENT) # properties correspond to files if type(target) == DeviceProperty: # use last history date as timestamp # Fixme: potential performance issue, commented out for now timestamp = parser.parse(target.history[-1].get_date()) value = self.tmp[path] = "\n".join(target.value) + "\n" return self.make_node( mode=stat.S_IFREG, timestamp=unix_time(timestamp), size=len(value)) # commands are executables elif isinstance(target, DeviceCommand): exe = self.tmp[path] = EXE.format(device=target.devicename, command=target.name) return self.make_node(mode=stat.S_IFREG | 755, size=len(exe)) # If a device is exported, mark the node as executable elif isinstance(target, DeviceDict): # these timestamp formats are completely made up, but # hopefully the dateutils parser will hold together... timestamp = parser.parse(target.info.started_date) mode = stat.S_IFDIR if target.info and target.info.exported: # If the device is exported, mark the node as executable mode |= (stat.S_IEXEC) return self.make_node(mode=mode, timestamp=unix_time(timestamp)) elif isinstance(target, DeviceAttribute): # set mode accordingbi to whether the attr is read/writable mode = stat.S_IFDIR | stat.S_IREAD | stat.S_IRGRP | stat.S_IROTH if target.writable != PyTango.AttrWriteType.READ: mode |= (stat.S_IWRITE | stat.S_IWGRP | stat.S_IWOTH) return self.make_node(mode=mode) # otherwise show it as a directory else: return self.make_node(mode=stat.S_IFDIR, size=0)
from plugins import get_plugins from notes import Note, TextNote, Notebook from clock import ClockEvent FILENAME = 'jotfile' def setup_paths(): import notes file = os.path.abspath(notes.__file__) while file and not os.path.isdir(file): file, ext = os.path.split(file) return file if __name__ == "__main__": path = setup_paths() env = Environment(loader=FileSystemLoader(os.path.join(path, 'templates'))) plugins = [] for plugin in get_plugins(os.path.join(path, 'plugins')): print "Loaded plugin %s" % plugin.name plugins.append(plugin) try: notebook = Notebook.load(FILENAME) print "Loaded a notebook with %d notes" % len(notebook.notes) except Exception, e: print e import util title = util.ask("Enter notebook title:") notebook = Notebook(title) notebook.save(FILENAME) while True: try:
#! /usr/bin/env python from cmd import Cmd from mixin import MixIn, makeWithMixins, makeWithMixinsFromString from plugins import * from plugins import get_plugins class Shell(Cmd): def do_hello(self, args): """Says hello. If you provide a name, it will greet you with it.""" if len(args) == 0: name = 'stranger' else: name = args print "Hello, %s" % name def do_quit(self, args): """Quits the program.""" if __name__ == '__main__': plugins = get_plugins() SuperShell = makeWithMixins(Shell, plugins) shell = SuperShell() shell.prompt = '> ' shell.cmdloop('Cloudmesh Shell...')
f.write('\n100 %\n') # to get proper calculation of total progress return {'message': 'arf already compiled'} with stderr_redirector(open(logfile, 'a')): corp.compile_arf(attr) return {'message': 'OK', 'last_log_record': freq_calc.get_log_last_line(logfile)} @app.task def compile_docf(corp_id, subcorp_path, attr, logfile): """ Precalculate document counts data for collocations and wordlists. (see freq_calc.build_arf_db) """ corp = _load_corp(corp_id, subcorp_path) if is_compiled(corp, attr, 'docf'): with open(logfile, 'a') as f: f.write('\n100 %\n') # to get proper calculation of total progress return {'message': 'docf already compiled'} doc_struct = corp.get_conf('DOCSTRUCTURE') try: doc = corp.get_struct(doc_struct) with stderr_redirector(open(logfile, 'a')): corp.compile_docf(attr, doc.name) return {'message': 'OK', 'last_log_record': freq_calc.get_log_last_line(logfile)} except manatee.AttrNotFound: raise WorkerTaskException('Failed to compile docf: attribute %s.%s not found in %s' % ( doc_struct, attr, corp_id)) custom_tasks = CustomTasks(plugins.get_plugins())
def backup_account(url='', org='', key='', account='', backup_dir='', **kwargs): # create directory structure backup_dir = create_dir(os.getcwd(), backup_dir) org_dir = create_dir(backup_dir, org) account_dir = create_dir(org_dir, account) # backup agents agent_dir = create_dir(account_dir, 'agents') for agent in agents.get_agents(url=url, org=org, account=account, key=key): logging.debug('Exporting JSON for agent "%s"', agent['name']) # some agents can have a name 'http://...' encode name before writing a dir agent_path = os.path.join(agent_dir, str(urllib.quote(agent['name'], safe='')) + '.json') remove_keys = ['presence_state', 'created', 'modified', 'heartbeat'] for k in remove_keys: if k in agent: del agent[k] with open(agent_path, 'w') as f: f.write(json.dumps(agent, indent=4)) # backup dashboards dashboard_dir = create_dir(account_dir, 'dashboards') for dash in dashboards.get_dashboards(url=url, org=org, account=account, key=key): logging.debug('Exporting YAML for dashboard "%s"', dash['name']) dashboard_path = os.path.join(dashboard_dir, str(dash['name']) + '.yaml') with open(dashboard_path, 'w') as f: f.write(yaml.safe_dump(dash, default_flow_style=False, explicit_start=True)) # backup plugins plugin_dir = create_dir(account_dir, 'plugins') for plugin in plugins.get_plugins(url=url, org=org, account=account, key=key): logging.debug('Exporting plugin "%s"', plugin['name']) plugin_path = os.path.join(plugin_dir, str(plugin['name']) + '.' + str(plugin['extension'])) with open(plugin_path, 'w') as f: f.write(plugins.export_plugin(plugin=plugin['name'], url=url, org=org, account=account, key=key)) # backup rules rule_dir = create_dir(account_dir, 'rules') for rule in rules.get_rules(url=url, org=org, account=account, key=key): logging.debug('Exporting YAML for rule "%s" with id %s', rule['name'], rule['id']) rule_path = os.path.join(rule_dir, str(rule['name']) + '.yaml') with open(rule_path, 'w') as f: rule_yaml = rules.export_rule(rule=rule['id'], url=url, org=org, account=account, key=key) try: rule_content = yaml.safe_load(rule_yaml) if rule_content['actions']: action_count = len(rule_content['actions']) for i in range(action_count): try: del rule_content['actions'][i]['details']['status'] except KeyError: continue f.write(yaml.safe_dump(rule_content, default_flow_style=False, explicit_start=True)) except yaml.YAMLError as e: logging.warn('Unable to parse YAML for rule %s: %s', rule['name'], e.problem) f.write(rule_yaml) # backup links link_dir = create_dir(account_dir, 'links') for link in links.get_links(url=url, org=org, account=account, key=key): logging.debug('Exporting JSON for pack "%s" with id %s', link['plugin'], link['id']) link_path = os.path.join(link_dir, link['id'] + '.json') link_json = links.export_link(link_id=link['id'], url=url, org=org, account=account, key=key) with open(link_path, 'w') as f: f.write(json.dumps(link_json, indent=4))
try: import gevent import gevent.monkey gevent.monkey.patch_all() except ImportError: err('gevent', 'python-gevent') try: import chardet except ImportError: err('chardet', 'python-charded') from plugins import * import plugins if __name__ == '__main__': locale.setlocale(locale.LC_ALL, 'ru_RU.UTF-8') try: for site in plugins.get_plugins(sys.argv[1:]): task = site() print 'Handling {0}'.format(task.task_name) gc.collect() task.handle() except ValueError as e: print e print 'You shoud run program with "all" key or with a subset of following keys:' for name in sorted(plugins.__all__): print ' - ' + name
parser.add_argument("--amend-plugin", nargs='+', help="--amend-plugin PLUGIN_ID JSON_FILENAME SECOND_JSON_FILENAME (optional - ip restriction only)") parser.add_argument("--delete-plugin", nargs='+', help="--delete-plugin PLUGIN_ID") parser.add_argument("--list-ca-certificates", action='store_true', help="--list-ca-certificates") parser.add_argument("--add-ca-certificate", nargs='+', help="--add-ca-certificate JSON_FILENAME") parser.add_argument("--retrieve-ca-certificate", nargs='+', help="--retrieve-ca-certificate CA_CERTIFICATE_ID") args = parser.parse_args() if args.create_service_endpoint: create_service_endpoint(args.create_service_endpoint[0], args.create_service_endpoint[1]) elif args.get_service_endpoint: get_service_endpoint(args.get_service_endpoint[0], True) elif args.get_plugins: get_plugins(args.get_plugins, True) elif args.add_plugins: add_plugins(args.add_plugins[0], args.add_plugins[1], args.add_plugins[2] if len(args.add_plugins) > 2 else None, args.add_plugins[3] if len(args.add_plugins) > 3 else None, args.add_plugins[4] if len(args.add_plugins) > 4 else None) elif args.amend_plugin: amend_plugin(args.amend_plugin[0], args.amend_plugin[1], args.amend_plugin[2] if len(args.amend_plugin) > 2 else None, args.amend_plugin[3] if len(args.amend_plugin) > 3 else None, args.amend_plugin[4] if len(args.amend_plugin) > 4 else None) elif args.delete_plugin: delete_plugin(args.delete_plugin[0]) elif args.add_route_to_service: create_route_on_service(args.add_route_to_service[0], args.add_route_to_service[1]) elif args.amend_route_on_service: amend_route(args.amend_route_on_service[0], args.amend_route_on_service[1]) elif args.get_routes_on_service:
} @app.task def compile_docf(corp_id, subcorp_path, attr, logfile): """ Precalculate document counts data for collocations and wordlists. (see freq_calc.build_arf_db) """ corp = _load_corp(corp_id, subcorp_path) if is_compiled(corp, attr, 'docf'): with open(logfile, 'a') as f: f.write('\n100 %\n') # to get proper calculation of total progress return {'message': 'docf already compiled'} doc_struct = corp.get_conf('DOCSTRUCTURE') try: doc = corp.get_struct(doc_struct) with stderr_redirector(open(logfile, 'a')): corp.compile_docf(attr, doc.name) return { 'message': 'OK', 'last_log_record': freq_calc.get_log_last_line(logfile) } except manatee.AttrNotFound: raise WorkerTaskException( 'Failed to compile docf: attribute %s.%s not found in %s' % (doc_struct, attr, corp_id)) custom_tasks = CustomTasks(plugins.get_plugins())