def get_assignment(node, key): """ Returns the value associated with key or a Falsey value if key does not exist. Arguments:- node: scoping tail node key: the key to search for, ie. list of ast nodes NOTE: The current solution is somewhat inefficient. The tail node of key is the identifier. Here we can't do a set comparison on the whole list. Instead, we must compare the tail node based on the value. This can be made more efficient by storing the tail node as its value in `set_assignment` i.e. val = node.assignments.get(frozenset(key)) if val: return list(val) """ if hasattr(node, "assignments"): key = frozenset(key) for k, val in node.assignments.items(): diff = key.symmetric_difference(k) if len(diff) == 2: item1, item2 = diff if unique_id(item1) == unique_id(item2): return val return None
def append(self, value): """ Adds a value to the datastructure. The value is a dependency pair, i.e. (src, dest). e.g. `y = x`, here value would be ('y', 'x'), since y is the src of the dependency (here src should be interpreted as the progenitor, since without y, x would just be and there would be no dependency. We know `y`'s context, since this method is called from y`s context. But what about x. Here, we use the symbol table to resolve `x`'s context. """ #value is a 2-tuple of dependency src (scopes) and dest (node) src, dest = value #find first entry in symbol_table that #(inclusively) contains node.lineno for i, scope in enumerate(self.symbol_table[unique_id(dest)]): #TODO: make sure the following makes sense if dest.lineno >= scope[0] and dest.lineno <= scope[1]: #check if type is a module import if scope.src_module: dest = "{}.{}".format(scope.src_module, unique_id(dest)) else: dest = "{}.{}".format(scope.scopes, unique_id(dest)) src = scopes_to_str(src) super(DTable, self).append((src, dest)) break
async def _run_interval_backups(interval): try: try: guild = await self.bot.fetch_full_guild(interval["guild"]) except (wkr.NotFound, wkr.Forbidden): await self.bot.db.intervals.delete_many( {"guild": interval["guild"]}) return backup = BackupSaver(self.bot, guild) await backup.save() await self.bot.db.backups.delete_one({ "creator": interval["user"], "data.id": guild.id, "interval": True }) await self.bot.db.backups.insert_one({ "_id": utils.unique_id(), "creator": interval["user"], "timestamp": datetime.utcnow(), "interval": True, "data": backup.data }) finally: semaphore.release()
def process_name_node(node, scopestack, symtable): """ Processes Name astnode and returns `src` and `dst` dependency pair """ #there is a dependency from scope -> name #we know a symbol was loaded, but since identifiers are non-unique, #we must look up node in symtable and then resolve based on scopes current = scopestack.get_state() candidates = symtable[unique_id(node)] #first check scopetail for existing assignment key = concatenated(scopestack.get_state(), node) assn = get_assignment(scopestack.get_tail(), key) if assn: return current, assn dependency = resolve_scope(current, candidates) srcmodule = get_src(dependency.astnode) if srcmodule and is_src(dependency.astnode): #if node itself represents the module, then don't prepend module name dst = [node] elif srcmodule: dst = [srcmodule, node] else: dst = concatenated(dependency.scope, node) return current, dst
def __init__(self, lexer): """init a token list from a lexer""" self._tokens = [] while True: t = lexer.next_token() if t is None: break self._tokens.append(t) self.new_id = unique_id() # increasing unique id for each S-expression.
def _test_new(self): _user = self.new( ID =unique_id(), name='Donne', sex=2) log.info('user inited. instance:{0}'.format( _user, ))
def symbify(nodes): """ get representation of nodes where each element is represented by its unique id """ if not nodes: return None root, children = nodes return [(unique_id(root), node_type(root)), [symbify(child) for child in children]]
async def create(self, ctx): """ Create a backup Get more help on the [wiki](https://wiki.xenon.bot/backups#creating-a-backup). __Examples__ ```{b.prefix}backup create``` """ backup_count = await ctx.bot.db.backups.count_documents( {"creator": ctx.author.id}) if backup_count >= MAX_BACKUPS: raise ctx.f.ERROR( f"You have **exceeded the maximum count** of backups. (`{backup_count}/{MAX_BACKUPS}`)\n" f"You need to **delete old backups** with `{ctx.bot.prefix}backup delete <id>` or **buy " f"[Xenon Premium](https://www.patreon.com/merlinfuchs)** to create new backups.\n\n" f"*You can view your current backups by doing `{ctx.bot.prefix}backup list`.*" ) status_msg = await ctx.f_send("**Creating Backup** ...", f=ctx.f.WORKING) guild = await ctx.fetch_full_guild() backup = BackupSaver(ctx.client, guild) await backup.save() backup_id = utils.unique_id() try: await ctx.bot.db.backups.insert_one({ "_id": backup_id, "creator": ctx.author.id, "timestamp": datetime.utcnow(), "data": backup.data }) except mongoerrors.DocumentTooLarge: raise ctx.f.ERROR( f"This backups **exceeds** the maximum size of **16 Megabyte**. Your server probably has a lot of " f"members and channels containing messages. Try to create a new backup with less messages (chatlog)." ) embed = ctx.f.format( f"Successfully **created backup** with the id `{backup_id.upper()}`.", f=ctx.f.SUCCESS)["embed"] embed.setdefault("fields", []).append({ "name": "Usage", "value": f"```{ctx.bot.prefix}backup load {backup_id.upper()}```\n" f"```{ctx.bot.prefix}backup info {backup_id.upper()}```" }) await ctx.client.edit_message(status_msg, embed=embed) await ctx.bot.create_audit_log(utils.AuditLogType.BACKUP_CREATE, [ctx.guild_id], ctx.author.id)
async def _run_interval_backups(interval): guild = await self.bot.get_full_guild(interval["guild"]) if guild is None: return backup = BackupSaver(self.bot, guild) await backup.save() await self.bot.db.backups.delete_one({"creator": interval["user"], "data.id": guild.id, "interval": True}) await self.bot.db.backups.insert_one({ "_id": utils.unique_id(), "creator": interval["user"], "timestamp": datetime.utcnow(), "interval": True, "data": backup.data })
def process_attribute_node(node, scopestack, symtable): """ Processes Attribute astnode and returns `src` and `dst` dependency pair """ #get the current scope current = scopestack.get_state() #node.value may be nested, e.g. x....z, or x()....z() or some combination thereof #therefore need to resolve it attr_chain = resolve_attr_chain(node) #assignments are stored as: scope + identifier key = concatenated(scopestack.get_state(), attr_chain[0]) assn = get_assignment(scopestack.get_tail(), key) if assn: candidates = symtable[assn] else: #resolve the node based on the current scope #only the head of the attr chain needs to be defined candidates = symtable[unique_id(attr_chain[0])] dependency = resolve_scope(current, candidates) if assn: #need to resolve the alias attr_chain[0] = dependency.astnode srcmodule = get_src(dependency.astnode) if srcmodule and is_src(dependency.astnode): dst = attr_chain elif srcmodule: #dependency originates from another module dst = precatenated(srcmodule, attr_chain) else: #dependency is intra-module dst = dependency.scope + attr_chain return current, dst
async def create(self, ctx): """ Create a backup __Examples__ ```{b.prefix}backup create``` """ backup_count = await ctx.bot.db.backups.count_documents({"creator": ctx.author.id}) if backup_count >= MAX_BACKUPS: raise ctx.f.ERROR( f"You have **exceeded the maximum count** of backups. (`{backup_count}/{MAX_BACKUPS}`)\n" f"You need to **delete old backups** with `{ctx.bot.prefix}backup delete <id>` or **buy " f"[Xenon Premium](https://www.patreon.com/merlinfuchs)** to create new backups.." ) status_msg = await ctx.f_send("**Creating Backup** ...", f=ctx.f.WORKING) guild = await ctx.get_full_guild() backup = BackupSaver(ctx.client, guild) await backup.save() backup_id = utils.unique_id() await ctx.bot.db.backups.insert_one({ "_id": backup_id, "creator": ctx.author.id, "timestamp": datetime.utcnow(), "data": backup.data }) embed = ctx.f.format(f"Successfully **created backup** with the id `{backup_id}`.", f=ctx.f.SUCCESS)["embed"] embed.setdefault("fields", []).append({ "name": "Usage", "value": f"```{ctx.bot.prefix}backup load {backup_id}```\n" f"```{ctx.bot.prefix}backup info {backup_id}```" }) await ctx.client.edit_message(status_msg, embed=embed)
def create_symbol_table(root): """ Creates a symbol table. Arguments:- root: root ast node to be analyzed (typically a module node). """ #symbol table #creates mapping from name to scopes symtable = Multidict() #stack of nodes nodes = Stack() set_depth(root, 0) nodes.push(root) #stack of scopes scopestack = Stack() #Iterate over all children node for node in nodes: ntype = node_type(node) #remove any scope nodes that have depth >= node scopestack.predpop(lambda scopenode: scopenode.depth >= node.depth) children = get_children(node) #add children to stack in reverse order for child in reversed(children): #set depth on children nodes set_depth(child, node.depth + 1) nodes.push(child) #set lineno property of children #Not sure if there is a better way to scope objects, since #objects can be redefined, i.e. def foo(): pass\n def foo():pass is valid Python #set_lineno(node, children) #add entries to symbol table if ntype == "ClassDef" or ntype == "FunctionDef": identifier = unique_id(node) symtable[identifier] = scopemap(scope=scopestack.get_state(), astnode=node) elif ntype == "Import": for name in node.names: identifier = name.asname or name.name #Set srcmodule property of ast node `name` set_src(name, name.name) set_is_src(name) #symtable mapping should contain the node itself symtable[identifier] = scopemap(scope=scopestack.get_state(), astnode=name) elif ntype == "ImportFrom": if node.names[0].name == '*': try: imported = importlib.import_module(node.module) #add all names in imported module, except those starting with '_' for attr in dir(imported): if attr[0] != '_': symtable[attr] = scopemap(scope=scopestack.get_state(), astnode=ast_name_node(name=attr, srcmodule=node.module)) except ImportError: print "Error: local system does not have {}. Skipping!".format(node.module) else: for name in node.names: identifier = name.asname or name.name set_src(name, node.module) symtable[identifier] = scopemap(scope=scopestack.get_state(), astnode=name) elif ntype == "arguments": if node.vararg: symtable[node.vararg] = scopemap(scope=scopestack.get_state(), astnode=node) if node.kwarg: symtable[node.kwarg] = scopemap(scope=scopestack.get_state(), astnode=node) #if a name is being loaded then it must already exist in symtable elif ntype == "Name" and not is_load(children) and not has_global(scopestack.get_tail(), node.id): symtable[node.id] = scopemap(scope=scopestack.get_state(), astnode=node) elif ntype == "Global": #add a list global vars on node on the top of scope stack #nonlocal could be handled in similar way #FIXME: ensure this is correct set_globals(scopestack.get_tail(), node.names) #add any scoping nodes #Need to do this after ntype == '...' blocks otherwise scoping nodes #would show up in their own scope mapping. if ntype in scoping_types: scopestack.push(node) return symtable
def create_symbol_table(root): """ Creates a symbol table. Arguments:- root: root ast node to be analyzed (typically a module node). """ #symbol table #creates mapping from name to scopes symtable = Multidict() #stack of nodes nodes = Stack() set_depth(root, 0) nodes.push(root) #stack of scopes scopestack = Stack() #Iterate over all children node for node in nodes: ntype = node_type(node) #remove any scope nodes that have depth >= node scopestack.predpop(lambda scopenode: scopenode.depth >= node.depth) children = get_children(node) #add children to stack in reverse order for child in reversed(children): #set depth on children nodes set_depth(child, node.depth + 1) nodes.push(child) #set lineno property of children #Not sure if there is a better way to scope objects, since #objects can be redefined, i.e. def foo(): pass\n def foo():pass is valid Python #set_lineno(node, children) #add entries to symbol table if ntype == "ClassDef" or ntype == "FunctionDef": identifier = unique_id(node) symtable[identifier] = scopemap(scope=scopestack.get_state(), astnode=node) elif ntype == "Import": for name in node.names: identifier = name.asname or name.name #Set srcmodule property of ast node `name` set_src(name, name.name) set_is_src(name) #symtable mapping should contain the node itself symtable[identifier] = scopemap(scope=scopestack.get_state(), astnode=name) elif ntype == "ImportFrom": if node.names[0].name == '*': try: imported = importlib.import_module(node.module) #add all names in imported module, except those starting with '_' for attr in dir(imported): if attr[0] != '_': symtable[attr] = scopemap( scope=scopestack.get_state(), astnode=ast_name_node(name=attr, srcmodule=node.module)) except ImportError: print "Error: local system does not have {}. Skipping!".format( node.module) else: for name in node.names: identifier = name.asname or name.name set_src(name, node.module) symtable[identifier] = scopemap( scope=scopestack.get_state(), astnode=name) elif ntype == "arguments": if node.vararg: symtable[node.vararg] = scopemap(scope=scopestack.get_state(), astnode=node) if node.kwarg: symtable[node.kwarg] = scopemap(scope=scopestack.get_state(), astnode=node) #if a name is being loaded then it must already exist in symtable elif ntype == "Name" and not is_load(children) and not has_global( scopestack.get_tail(), node.id): symtable[node.id] = scopemap(scope=scopestack.get_state(), astnode=node) elif ntype == "Global": #add a list global vars on node on the top of scope stack #nonlocal could be handled in similar way #FIXME: ensure this is correct set_globals(scopestack.get_tail(), node.names) #add any scoping nodes #Need to do this after ntype == '...' blocks otherwise scoping nodes #would show up in their own scope mapping. if ntype in scoping_types: scopestack.push(node) return symtable
def __init__(self, name): self.name = name self.uid = utils.unique_id() self.session = Session("gcache_" + name) self.storage = {}