def test_set(self): from logilab.common.compat import set s1 = set(range(5)) s2 = set(range(2, 6)) self.assertEquals(len(s1), 5) self.assertEquals(s1 & s2, set([2, 3, 4])) self.assertEquals(s1 | s2, set(range(6)))
def add_checker(self, checker): hasvdefault = False vcids = set() hasldefault = False lcids = set() for member in dir(checker): if member.startswith('visit_'): cid = member[6:] if cid != 'default': cbs = self.visit_events.setdefault(cid, []) cbs.append(getattr(checker, member)) vcids.add(cid) else: hasvdefault = getattr(checker, member) elif member.startswith('leave_'): cid = member[6:] if cid != 'default': cbs = self.leave_events.setdefault(cid, []) cbs.append(getattr(checker, member)) lcids.add(cid) else: hasldefault = getattr(checker, member) if hasvdefault: for cls in nodes.ALL_NODE_CLASSES: cid = cls.__name__.lower() if cid not in vcids: cbs = self.visit_events.setdefault(cid, []) cbs.append(hasvdefault) if hasldefault: for cls in nodes.ALL_NODE_CLASSES: cid = cls.__name__.lower() if cid not in lcids: cbs = self.leave_events.setdefault(cid, []) cbs.append(hasldefault)
def test_set(self): if sys.version_info >= (3, 0): self.skip("don't test 2.4 compat 'set' on >= 3.0") from logilab.common.compat import set s1 = set(range(5)) s2 = set(range(2, 6)) self.assertEqual(len(s1), 5) self.assertEqual(s1 & s2, set([2, 3, 4])) self.assertEqual(s1 | s2, set(range(6)))
def test_knownValues_get_modules_1(self): # XXXFIXME: TOWRITE """given a directory return a list of all available python modules, even in subdirectories """ import data.find_test as data mod_path = ("data", 'find_test') modules = modutils.get_modules(path.join(*mod_path), data.__path__[0]) modules.sort() self.assertSetEquals(set(modules), set([ '.'.join(mod_path + (mod, )) for mod in 'module', 'module2', 'noendingnewline', 'nonregr']))
def optik_option(self, provider, opt_name, opt_dict): """get our personal option definition and return a suitable form for use with optik/optparse """ opt_dict = copy(opt_dict) if 'action' in opt_dict: self._nocallback_options[provider] = opt_name else: opt_dict['action'] = 'callback' opt_dict['callback'] = self.cb_set_provider_option # default is handled here and *must not* be given to optik if you # want the whole machinery to work if 'default' in opt_dict: if (opt.OPTPARSE_FORMAT_DEFAULT and 'help' in opt_dict and opt_dict.get('default') is not None and not opt_dict['action'] in ('store_true', 'store_false')): opt_dict['help'] += ' [current: %default]' del opt_dict['default'] args = ['--' + opt_name] if 'short' in opt_dict: self._short_options[opt_dict['short']] = opt_name args.append('-' + opt_dict['short']) del opt_dict['short'] available_keys = set(self._optik_parser.option_class.ATTRS) # cleanup option definition dict before giving it to optik for key in opt_dict.keys(): if not key in available_keys: opt_dict.pop(key) return args, opt_dict
def reset_parsers(self, usage='', version=None): # configuration file parser self.cfgfile_parser = ConfigParser() # command line parser self.cmdline_parser = optparse.OptionParser(usage=usage, version=version) self.cmdline_parser.options_manager = self self._optik_option_attrs = set(self.cmdline_parser.option_class.ATTRS)
def _repr_tree(node, result, indent='', _done=None, ids=False): """built a tree representation of a node as a list of lines""" if _done is None: _done = set() if not hasattr(node, '_astng_fields'): # not a astng node return if node in _done: result.append( indent + 'loop in tree: %s' % node ) return _done.add(node) node_str = str(node) if ids: node_str += ' . \t%x' % id(node) result.append( indent + node_str ) indent += INDENT for field in node._astng_fields: value = getattr(node, field) if isinstance(value, (list, tuple) ): result.append( indent + field + " = [" ) for child in value: if isinstance(child, (list, tuple) ): # special case for Dict # FIXME _repr_tree(child[0], result, indent, _done, ids) _repr_tree(child[1], result, indent, _done, ids) result.append(indent + ',') else: _repr_tree(child, result, indent, _done, ids) result.append( indent + "]" ) else: result.append( indent + field + " = " ) _repr_tree(value, result, indent, _done, ids)
def native_repr_tree(node, indent='', _done=None): """enhanced compiler.ast tree representation""" if _done is None: _done = set() if node in _done: print('loop in tree: %r (%s)' % (node, getattr(node, 'lineno', None))) return _done.add(node) print indent + "<%s>" % node.__class__ indent += ' ' if not hasattr(node, "__dict__"): # XXX return for field, attr in node.__dict__.items(): if attr is None or field == "_proxied": continue if type(attr) is list: if not attr: continue print indent + field + ' [' for elt in attr: if type(elt) is tuple: for val in elt: native_repr_tree(val, indent, _done) else: native_repr_tree(elt, indent, _done) print indent + ']' continue if isinstance(attr, Node): print indent + field native_repr_tree(attr, indent, _done) else: print indent + field, repr(attr)
def check_types(self, node, name, inferednodes, vtype): """check types assigned to a name (vtype is a string telling if it's a local or attribute node is the starting node (function or class node usually) infered the infered value for the name """ types = set() hasnone = False for infered in inferednodes: if infered is astng.YES: continue # skip None if isinstance(infered, astng.Const) and infered.value is None: hasnone = True continue types.add(infered.pytype()) if len(types) > 1: self.add_message('E1210', node=node, args=(vtype, name)) elif hasnone and types: ptype = types.pop() # XXX long ? they should not been supported but this is not handled # do that in visit_const ? if ptype in ('__builtin__.int', '__builtin__.float'): ptype = ptype.split('.')[1] self.add_message('E1211', node=node, args=(ptype, vtype, name))
def native_repr_tree(node, indent='', _done=None): """enhanced compiler.ast tree representation""" if _done is None: _done = set() if node in _done: print ('loop in tree: %r (%s)' % (node, getattr(node, 'lineno', None))) return _done.add(node) print indent + "<%s>" % node.__class__ indent += ' ' if not hasattr(node, "__dict__"): # XXX return for field, attr in node.__dict__.items(): if attr is None or field == "_proxied": continue if type(attr) is list: if not attr: continue print indent + field + ' [' for elt in attr: if type(elt) is tuple: for val in elt: native_repr_tree(val, indent, _done) else: native_repr_tree(elt, indent, _done) print indent + ']' continue if isinstance(attr, Node): print indent + field native_repr_tree(attr, indent, _done) else: print indent + field, repr(attr)
def _add_imported_module(self, node, importedmodname): """notify an imported module, used to analyze dependencies""" context_name = node.root().name if context_name == importedmodname: # module importing itself ! self.add_message('W0406', node=node) elif not is_standard_module(importedmodname): # handle dependencies importedmodnames = self.stats['dependencies'].setdefault( importedmodname, set()) if not context_name in importedmodnames: importedmodnames.add(context_name) if is_standard_module(importedmodname, (self.package_dir(), )): # update import graph mgraph = self.import_graph.setdefault(context_name, set()) if not importedmodname in mgraph: mgraph.add(importedmodname)
def tryOtherProviders(self, error, nodeConfig, filename, words, host, port, docId, qid): """starts to explore the list of other providers""" providers = self.querier.getProvidersFor(docId, qid) self.providerSet = set(providers) self.providerSet.remove((host, int(port))) return self.retryWithOtherProvider('...', nodeConfig, words, docId, filename)
def __init__(self, path=None): if path is None: self.path = set() else: self.path = path self.lookupname = None self.callcontext = None self.boundnode = None
def _add_imported_module(self, node, importedmodname): """notify an imported module, used to analyze dependencies""" context_name = node.root().name if context_name == importedmodname: # module importing itself ! self.add_message('W0406', node=node) elif not is_standard_module(importedmodname): # handle dependencies importedmodnames = self.stats['dependencies'].setdefault( importedmodname, set()) if not context_name in importedmodnames: importedmodnames.add(context_name) if is_standard_module( importedmodname, (self.package_dir(),) ): # update import graph mgraph = self.import_graph.setdefault(context_name, set()) if not importedmodname in mgraph: mgraph.add(importedmodname)
def visit_dict(self, node): """check duplicate key in dictionary""" keys = set() for k, v in node.items: if isinstance(k, astng.Const): key = k.value if key in keys: self.add_message('W0109', node=node, args=key) keys.add(key)
def runIndexer(self, isPrivate=True): existingFiles = set() state = docState(isPrivate) for filename in self.getFileIterator(isPrivate): existingFiles.add(filename) try: self.indexFile(filename, isPrivate) except FileIndexationFailure, fif: # should be catch-all print fif continue
def test_basic_set(self): from logilab.common.compat import set s = set('abc') self.assertEquals(len(s), 3) s.remove('a') self.assertEquals(len(s), 2) s.add('a') self.assertEquals(len(s), 3) s.add('a') self.assertEquals(len(s), 3) self.assertRaises(KeyError, s.remove, 'd')
def decoratornames(self): """return a list of decorator qualified names""" result = set() decoratornodes = [] if self.decorators is not None: decoratornodes += self.decorators.nodes decoratornodes += getattr(self, 'extra_decorators', []) for decnode in decoratornodes: for infnode in decnode.infer(): result.add(infnode.qname()) return result
def runIndexer(self, isPrivate=True): existingFiles = set() state = docState(isPrivate) for filename in self.getFileIterator(isPrivate): existingFiles.add(filename) try: self.indexFile(filename, isPrivate) # FIXME: a UnicodeError may be raised and is not catched. # except FileIndexationFailure, fif: # should be catch-all except Exception, fif: print fif continue
def __init__(self, usage, config_file=None, version=None, quiet=0): self.config_file = config_file self.reset_parsers(usage, version=version) # list of registered options providers self.options_providers = [] # dictionary associating option name to checker self._all_options = {} self._short_options = {} self._nocallback_options = {} self._mygroups = set() # verbosity self.quiet = quiet
def sort_checkers(self, checkers=None): if checkers is None: checkers = [checker for checkers in self._checkers.values() for checker in checkers] graph = {} cls_instance = {} for checker in checkers: graph[checker.__class__] = set(checker.needs_checkers) cls_instance[checker.__class__] = checker checkers = [cls_instance.get(cls) for cls in ordered_nodes(graph)] checkers.remove(self) checkers.insert(0, self) return checkers
def visit_getattr(self, node): """check that the accessed attribute exists to avoid to much false positives for now, we'll consider the code as correct if a single of the infered nodes has the accessed attribute. function/method, super call and metaclasses are ignored """ if node.attrname in self.config.generated_members: # attribute is marked as generated, stop here return try: infered = list(node.expr.infer()) except astng.InferenceError: return # list of (node, nodename) which are missing the attribute missingattr = set() ignoremim = self.config.ignore_mixin_members inference_failure = False for owner in infered: # skip yes object if owner is astng.YES: inference_failure = True continue # skip None anyway if isinstance(owner, astng.Const) and owner.value is None: continue # XXX "super" / metaclass call if is_super(owner) or getattr(owner, 'type', None) == 'metaclass': continue name = getattr(owner, 'name', 'None') if name in self.config.ignored_classes: continue if ignoremim and name[-5:].lower() == 'mixin': continue try: owner.getattr(node.attrname) except AttributeError: # XXX method / function continue except astng.NotFoundError, ex: if isinstance(owner, astng.Instance) \ and owner.has_dynamic_getattr(): continue # explicit skipping of optparse'Values class if owner.name == 'Values' and \ owner.root().name in ('optik', 'optparse'): continue missingattr.add((owner, name)) continue # stop on the first found break
def test_knownValues_get_modules_1(self): # XXXFIXME: TOWRITE """given a directory return a list of all available python modules, even in subdirectories """ import data.find_test as data mod_path = ("data", 'find_test') modules = modutils.get_modules(path.join(*mod_path), data.__path__[0]) modules.sort() self.assertSetEquals( set(modules), set([ '.'.join(mod_path + (mod, )) for mod in 'module', 'module2', 'noendingnewline', 'nonregr' ]))
def _get_checkers(self): # compute checkers needed according to activated messages and reports neededcheckers = set() for checkers in self._checkers.values(): for checker in checkers: for msgid in checker.msgs: if self._msgs_state.get(msgid, True): neededcheckers.add(checker) break else: for reportid, _, _ in checker.reports: if self.is_report_enabled(reportid): neededcheckers.add(checker) break return self.sort_checkers(neededcheckers)
def wrapped(node, context=None, _func=func, **kwargs): """wrapper function handling context""" if context is None: context = InferenceContext() context.push(node) yielded = set() for res in _func(node, context, **kwargs): # unproxy only true instance, not const, tuple, dict... if res.__class__ is Instance: ares = res._proxied else: ares = res if not ares in yielded: yield res yielded.add(ares)
def sort_checkers(self, checkers=None): if checkers is None: checkers = [ checker for checkers in self._checkers.values() for checker in checkers ] graph = {} cls_instance = {} for checker in checkers: graph[checker.__class__] = set(checker.needs_checkers) cls_instance[checker.__class__] = checker checkers = [cls_instance.get(cls) for cls in ordered_nodes(graph)] checkers.remove(self) checkers.insert(0, self) return checkers
def _compute_sims(self): """compute similarities in appended files""" no_duplicates = {} for num, lineset1, idx1, lineset2, idx2 in self._iter_sims(): duplicate = no_duplicates.setdefault(num, []) for couples in duplicate: if (lineset1, idx1) in couples or (lineset2, idx2) in couples: couples.add( (lineset1, idx1) ) couples.add( (lineset2, idx2) ) break else: duplicate.append( set([(lineset1, idx1), (lineset2, idx2)]) ) sims = [] for num, ensembles in no_duplicates.iteritems(): for couples in ensembles: sims.append( (num, couples) ) sims.sort() sims.reverse() return sims
def _compute_sims(self): """compute similarities in appended files""" no_duplicates = {} for num, lineset1, idx1, lineset2, idx2 in self._iter_sims(): duplicate = no_duplicates.setdefault(num, []) for couples in duplicate: if (lineset1, idx1) in couples or (lineset2, idx2) in couples: couples.add((lineset1, idx1)) couples.add((lineset2, idx2)) break else: duplicate.append(set([(lineset1, idx1), (lineset2, idx2)])) sims = [] for num, ensembles in no_duplicates.iteritems(): for couples in ensembles: sims.append((num, couples)) sims.sort() sims.reverse() return sims
def interfaces(self, herited=True, handler_func=_iface_hdlr): """return an iterator on interfaces implemented by the given class node """ # FIXME: what if __implements__ = (MyIFace, MyParent.__implements__)... try: implements = Instance(self).getattr('__implements__')[0] except NotFoundError: return if not herited and not implements.frame() is self: return found = set() for iface in unpack_infer(implements): if iface is YES: continue if not iface in found and handler_func(iface): found.add(iface) yield iface if not found: raise InferenceError()
def visit_list(self, node): """check list contains homogeneous types""" if not self._rpython: return types = set() for node in node.nodes: try: # XXX use ifilter + filter to factorize filtering below for infered in node.infer(): if infered is astng.YES: continue # XXX skip None ? if isinstance(infered, astng.Const) and \ infered.value is None: continue types.add(str(infered)) except astng.InferenceError: continue if len(types) > 1: self.add_message('E1212', node=node)
def walk(self, node, _done=None): """walk on the tree from <node>, getting callbacks from handler""" if _done is None: _done = set() if node in _done: raise AssertionError((id(node), node, node.parent)) _done.add(node) try: self.visit(node) except IgnoreChild: pass else: try: for child_node in node.get_children(): self.handler.set_context(node, child_node) assert child_node is not node self.walk(child_node, _done) except AttributeError: print node.__class__, id(node.__class__) raise self.leave(node) assert node.parent is not node
def __init__(self, sender, port, query, ttl=5, qid=None, host=None): """ :param sender: really a nodeId :type sender: str :param port: the originator rpc port :type port: int :param query: the query to wrap :type query: `maay.query.Query` :param qid: query identifier :type qid: str :param host: IP adress of sender :type host: str """ if qid: self.qid = qid else: self.qid = hashIt(sender) self.sender = sender self.port = port self.ttl = ttl self.query = query self.documents_ids = set() self.host = host
def __init__(self, sender, query, ttl=5, client_host=None, client_port=None): """ :param sender: really a nodeId :type sender: str :param port: the originator rpc port :type port: int :param query: the query to wrap :type query: `maay.query.Query` :param qid: query identifier :type qid: str """ self.sender = sender #self.port = originator_port self.ttl = ttl self.query = query # explicitly set the 'limit' attribute for P2P queries self.query.limit = LIMIT self.documents_ids = set() # *** client_{host, port} belong to the immediate client # *** default args are typically used from webapplication instantiation # *** but NOT at rpc level, where we MUST use the transmited values self.client_host = client_host or NODE_HOST self.client_port = client_port or NODE_CONFIG.rpcserver_port
def _getIndexedFiles(self): return set(self.querier.getIndexedFiles())
def _purgeEverything(self): indexedFiles = set(self.querier.getIndexedFiles()) self.purgeFiles(indexedFiles)
def _getIndexedFiles(self): return set(self.serverProxy.getIndexedFiles(self.cnxId))
def _purgeEverything(self): indexedFiles = set(self.serverProxy.getIndexedFiles(self.cnxId)) self.purgeFiles(indexedFiles)
# You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., # 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. """checker for use of Python logging """ from logilab import astng from pylint import checkers from pylint import interfaces from logilab.common.compat import set EAGER_STRING_INTERPOLATION = 'W6501' CHECKED_CONVENIENCE_FUNCTIONS = set([ 'critical', 'debug', 'error', 'exception', 'fatal', 'info', 'warn', 'warning']) class LoggingChecker(checkers.BaseChecker): """Checks use of the logging module.""" __implements__ = interfaces.IASTNGChecker name = 'logging' msgs = {EAGER_STRING_INTERPOLATION: ('Specify string format arguments as logging function parameters', 'Used when a logging statement has a call form of ' '"logging.<logging method>(format_string % (format_args...))". ' 'Such calls should leave string interpolation to the logging '
def read_old_config(newconfig, changes, configfile): """initialize newconfig from a deprecated configuration file possible changes: * ('renamed', oldname, newname) * ('moved', option, oldgroup, newgroup) * ('typechanged', option, oldtype, newvalue) """ # build an index of changes changesindex = {} for action in changes: if action[0] == 'moved': option, oldgroup, newgroup = action[1:] changesindex.setdefault(option, []).append( (action[0], oldgroup, newgroup)) continue if action[0] == 'renamed': oldname, newname = action[1:] changesindex.setdefault(newname, []).append((action[0], oldname)) continue if action[0] == 'typechanged': option, oldtype, newvalue = action[1:] changesindex.setdefault(option, []).append( (action[0], oldtype, newvalue)) continue if action[1] in ('added', 'removed'): continue # nothing to do here raise Exception('unknown change %s' % action[0]) # build a config object able to read the old config options = [] for optname, optdef in newconfig.options: for action in changesindex.pop(optname, ()): if action[0] == 'moved': oldgroup, newgroup = action[1:] optdef = optdef.copy() optdef['group'] = oldgroup elif action[0] == 'renamed': optname = action[1] elif action[0] == 'typechanged': oldtype = action[1] optdef = optdef.copy() optdef['type'] = oldtype options.append((optname, optdef)) if changesindex: raise Exception('unapplied changes: %s' % changesindex) oldconfig = Configuration(options=options, name=newconfig.name) # read the old config oldconfig.load_file_configuration(configfile) # apply values reverting changes changes.reverse() done = set() for action in changes: if action[0] == 'renamed': oldname, newname = action[1:] newconfig[newname] = oldconfig[oldname] done.add(newname) elif action[0] == 'typechanged': optname, oldtype, newvalue = action[1:] newconfig[optname] = newvalue done.add(optname) for optname, optdef in newconfig.options: if optdef.get('type') and not optname in done: newconfig.set_option(optname, oldconfig[optname], optdict=optdef)
def __eq__(self, other): return isinstance(other, NoOverlap) and \ set(self._variables) == set(other._variables)
def get_column(self, col_index, distinct=False): """get a column by index""" col = [row[col_index] for row in self.data] if distinct: col = list(set(col)) return col