def test_normal_function(self): i = [iter(range(100)) for x in range(3)] e = expandable_chain() e.extend(i) assert list(e) == list(range(100))*3 for x in i + [e]: pytest.raises(StopIteration, x.__next__)
def flatten_restricts(self, v): i = expandable_chain(v) depth = 0 conditionals = [] for x in i: for t, s in ((boolean.OrRestriction, "||"), (boolean.AndRestriction, "&&")): if isinstance(x, t): yield s yield "(" i.appendleft(")") i.appendleft(x.restrictions) depth += 1 break else: if isinstance(x, packages.Conditional): self.assertTrue(x.attr == "use") conditionals.insert( depth, list(self.mangle_cond_payload(x.restriction))) yield set(iflatten_instance(conditionals[:depth + 1])) yield "(" i.appendleft(")") i.appendleft(x.payload) depth += 1 else: if x == ")": self.assertTrue(depth) depth -= 1 yield x self.assertFalse(depth)
def test_normal_function(self): i = [iter(range(100)) for x in range(3)] e = expandable_chain() e.extend(i) assert list(e) == list(range(100)) * 3 for x in i + [e]: pytest.raises(StopIteration, x.__next__)
def test_normal_function(self): i = [iter(xrange(100)) for x in xrange(3)] e = expandable_chain() e.extend(i) self.assertEqual(list(e), range(100)*3) for x in i + [e]: self.assertRaises(StopIteration, x.next)
def find_cond_nodes(restriction_set, yield_non_conditionals=False): conditions_stack = [] new_set = expandable_chain(restriction_set) for cur_node in new_set: if isinstance(cur_node, packages.Conditional): conditions_stack.append(cur_node.restriction) new_set.appendleft(list(cur_node.payload) + [None]) elif isinstance(cur_node, transitive_use_atom): new_set.appendleft(cur_node.convert_to_conditionals()) elif (isinstance(cur_node, boolean.base) and not isinstance(cur_node, atom)): new_set.appendleft(cur_node.restrictions) elif cur_node is None: conditions_stack.pop() elif conditions_stack or yield_non_conditionals: # leaf yield (cur_node, conditions_stack[:])
def use_validate(self, klasses, pkg, seq, reporter, attr=None): skip_filter = (packages.Conditional,) + klasses unstated = set() stated = pkg.iuse_stripped i = expandable_chain(iflatten_instance(seq, skip_filter)) for node in i: if isinstance(node, packages.Conditional): # invert it; get only whats not in pkg.iuse unstated.update(ifilterfalse(stated.__contains__, node.restriction.vals)) i.append(iflatten_instance(node.payload, skip_filter)) continue yield node # implicit IUSE flags unstated.difference_update(self.unstated_iuse) if unstated: reporter.add_report(UnstatedIUSE(pkg, attr, unstated))
def native_iflatten_func(l, skip_func): """collapse [[1],2] into [1,2] :param skip_func: a callable that returns True when iflatten_func should descend no further :return: this generator yields each item that cannot be flattened (or is skipped due to a True result from skip_func) """ if skip_func(l): yield l return iters = expandable_chain(l) try: while True: x = iters.next() if hasattr(x, '__iter__') and not skip_func(x): iters.appendleft(x) else: yield x except StopIteration: pass
def native_iflatten_instance(l, skip_flattening=_str_kls): """collapse [[1],2] into [1,2] :param skip_flattening: list of classes to not descend through :return: this generator yields each item that cannot be flattened (or is skipped due to being a instance of ``skip_flattening``) """ if isinstance(l, skip_flattening): yield l return iters = expandable_chain(l) try: while True: x = iters.next() if (hasattr(x, '__iter__') and not (isinstance(x, skip_flattening) or (isinstance(x, _str_kls) and len(x) == 1))): iters.appendleft(x) else: yield x except StopIteration: pass
def use_validate(self, klasses, pkg, seq, reporter=None, attr=None): skip_filter = (packages.Conditional, ) + klasses unstated = set() stated = pkg.iuse_stripped i = expandable_chain(iflatten_instance(seq, skip_filter)) for node in i: if isinstance(node, packages.Conditional): # invert it; get only whats not in pkg.iuse unstated.update( filterfalse(stated.__contains__, node.restriction.vals)) i.append(iflatten_instance(node.payload, skip_filter)) continue elif attr == 'required_use': unstated.update(filterfalse(stated.__contains__, node.vals)) yield node # implicit IUSE flags if reporter is not None and attr is not None: unstated.difference_update(self.unstated_iuse) if unstated: reporter.add_report(UnstatedIUSE(pkg, attr, sorted(unstated)))
def native_iflatten_instance(l, skip_flattening=_str_kls): """collapse [[1],2] into [1,2] :param skip_flattening: list of classes to not descend through :return: this generator yields each item that cannot be flattened (or is skipped due to being a instance of ``skip_flattening``) """ if isinstance(l, skip_flattening): yield l return iters = expandable_chain(l) try: while True: x = iters.next() if (hasattr(x, '__iter__') and not ( isinstance(x, skip_flattening) or ( isinstance(x, _str_kls) and len(x) == 1))): iters.appendleft(x) else: yield x except StopIteration: pass
def test_extend(self): e = expandable_chain() e.extend(xrange(100) for i in (1, 2)) self.assertEqual(list(e), range(100)*2) self.assertRaises(StopIteration, e.extend, [[]])
def test_appendleft(self): e = expandable_chain(range(10, 20)) e.appendleft(range(10)) assert list(e) == list(range(20)) with pytest.raises(StopIteration): e.appendleft([])
def test_extendleft(self): e = expandable_chain(range(20, 30)) e.extendleft([range(10, 20), range(10)]) assert list(e) == list(range(30)) with pytest.raises(StopIteration): e.extendleft([[]])
def test_extend(self): e = expandable_chain() e.extend(range(100) for i in (1, 2)) assert list(e) == list(range(100)) * 2 with pytest.raises(StopIteration): e.extend([[]])
def parse(cls, dep_str, element_class, operators=None, attr=None, element_func=None, transitive_use_atoms=False, allow_src_uri_file_renames=False): """ :param dep_str: string abiding by DepSet syntax :param operators: mapping of node -> callable for special operators in DepSet syntax :param element_func: if None, element_class is used for generating elements, else it's used to generate elements. Mainly useful for when you need to curry a few args for instance generation, since element_class _must_ be a class :param element_class: class of generated elements :param attr: name of the DepSet attribute being parsed """ if element_func is None: element_func = element_class restrictions = [] if operators is None: operators = { "||": boolean.OrRestriction, "": boolean.AndRestriction } raw_conditionals = [] depsets = [restrictions] node_conds = False words = iter(dep_str.split()) # we specifically do it this way since expandable_chain has a bit of nasty # overhead to the tune of 33% slower if allow_src_uri_file_renames: words = expandable_chain(words) k = None try: for k in words: if ")" == k: # no elements == error. if closures don't map up, # indexerror would be chucked from trying to pop # the frame so that is addressed. if not depsets[-1] or not raw_conditionals: raise DepsetParseError(dep_str, attr=attr) elif raw_conditionals[-1] in operators: if len(depsets[-1]) == 1: depsets[-2].append(depsets[-1][0]) else: depsets[-2].append( operators[raw_conditionals[-1]](*depsets[-1])) else: node_conds = True c = raw_conditionals[-1] if c[0] == "!": c = values.ContainmentMatch(c[1:-1], negate=True) else: c = values.ContainmentMatch(c[:-1]) depsets[-2].append( packages.Conditional("use", c, tuple(depsets[-1]))) raw_conditionals.pop() depsets.pop() elif "(" == k: k = '' # push another frame on depsets.append([]) raw_conditionals.append(k) elif k[-1] == '?' or k in operators: # use conditional or custom op. # no tokens left == bad dep_str. k2 = next(words) if k2 != "(": raise DepsetParseError(dep_str, k2, attr=attr) # push another frame on depsets.append([]) raw_conditionals.append(k) elif "|" in k: raise DepsetParseError(dep_str, k, attr=attr) elif allow_src_uri_file_renames: try: k2 = next(words) except StopIteration: depsets[-1].append(element_func(k)) else: if k2 != '->': depsets[-1].append(element_func(k)) words.appendleft((k2, )) else: k3 = next(words) # file rename depsets[-1].append(element_func(k, k3)) else: # node/element depsets[-1].append(element_func(k)) except IGNORED_EXCEPTIONS: raise except DepsetParseError: # [][-1] for a frame access, which means it was a parse error. raise except StopIteration: if k is None: raise raise DepsetParseError(dep_str, k, attr=attr) except Exception as e: raise DepsetParseError(dep_str, e, attr=attr) from e # check if any closures required if len(depsets) != 1: raise DepsetParseError(dep_str, attr=attr) if transitive_use_atoms and not node_conds: element_class = transitive_use_atom # we can't rely on iter(self) here since it doesn't # descend through boolean restricts. node_conds = cls._has_transitive_use_atoms(restrictions) return cls(tuple(restrictions), element_class, node_conds)
def parse(cls, dep_str, element_class, operators=None, element_func=None, transitive_use_atoms=False, allow_src_uri_file_renames=False): """ :param dep_str: string abiding by DepSet syntax :param operators: mapping of node -> callable for special operators in DepSet syntax :param element_func: if None, element_class is used for generating elements, else it's used to generate elements. Mainly useful for when you need to curry a few args for instance generation, since element_class _must_ be a class :param element_class: class of generated elements """ if not isinstance(element_class, type): # yes, this blocks non new style classes. touch cookies. raise ValueError("element_class must be a new style class") if element_func is None: element_func = element_class if cls.parse_depset is not None and not (allow_src_uri_file_renames): restrictions = None if operators is None: has_conditionals, restrictions = cls.parse_depset( dep_str, element_func, boolean.AndRestriction, boolean.OrRestriction) else: for x in operators: if x not in ("", "||"): break else: has_conditionals, restrictions = cls.parse_depset( dep_str, element_func, operators.get(""), operators.get("||")) if restrictions is not None: if not has_conditionals and transitive_use_atoms: has_conditionals = cls._has_transitive_use_atoms( restrictions) return cls(restrictions, element_class, has_conditionals) restrictions = [] if operators is None: operators = { "||": boolean.OrRestriction, "": boolean.AndRestriction } raw_conditionals = [] depsets = [restrictions] node_conds = False words = iter(dep_str.split()) # we specifically do it this way since expandable_chain has a bit of nasty # overhead to the tune of 33% slower if allow_src_uri_file_renames: words = expandable_chain(words) k = None try: for k in words: if ")" == k: # no elements == error. if closures don't map up, # indexerror would be chucked from trying to pop # the frame so that is addressed. if not depsets[-1] or not raw_conditionals: raise ParseError(dep_str) elif raw_conditionals[-1] in operators: if len(depsets[-1]) == 1: depsets[-2].append(depsets[-1][0]) elif raw_conditionals[-1] == '' and ( len(raw_conditionals) == 1 or ('' == raw_conditionals[-2])): # if the frame is an and and the parent is an and, collapse it in. depsets[-2].extend(depsets[-1]) else: depsets[-2].append( operators[raw_conditionals[-1]](*depsets[-1])) else: node_conds = True c = raw_conditionals[-1] if c[0] == "!": c = values.ContainmentMatch(c[1:-1], negate=True) else: c = values.ContainmentMatch(c[:-1]) depsets[-2].append( packages.Conditional("use", c, tuple(depsets[-1]))) raw_conditionals.pop() depsets.pop() elif "(" == k: k = '' # push another frame on depsets.append([]) raw_conditionals.append(k) elif k[-1] == '?' or k in operators: # use conditional or custom op. # no tokens left == bad dep_str. k2 = words.next() if k2 != "(": raise ParseError(dep_str, k2) # push another frame on depsets.append([]) raw_conditionals.append(k) elif "|" in k: raise ParseError(dep_str, k) elif allow_src_uri_file_renames: try: k2 = words.next() except StopIteration: depsets[-1].append(element_func(k)) else: if k2 != '->': depsets[-1].append(element_func(k)) words.appendleft((k2, )) else: k3 = words.next() # file ename. depsets[-1].append(element_func(k, k3)) else: # node/element. depsets[-1].append(element_func(k)) except IGNORED_EXCEPTIONS: raise except IndexError: # [][-1] for a frame access, which means it was a parse error. raise except StopIteration: if k is None: raise raise ParseError(dep_str, k) except Exception as e: raise_from(ParseError(dep_str, e)) # check if any closures required if len(depsets) != 1: raise ParseError(dep_str) if transitive_use_atoms and not node_conds: # localize to this scope for speed. element_class = transitive_use_atom # we can't rely on iter(self) here since it doesn't # descend through boolean restricts. node_conds = cls._has_transitive_use_atoms(restrictions) return cls(tuple(restrictions), element_class, node_conds)
def parse(cls, dep_str, element_class, operators=None, element_func=None, transitive_use_atoms=False, allow_src_uri_file_renames=False): """ :param dep_str: string abiding by DepSet syntax :param operators: mapping of node -> callable for special operators in DepSet syntax :param element_func: if None, element_class is used for generating elements, else it's used to generate elements. Mainly useful for when you need to curry a few args for instance generation, since element_class _must_ be a class :param element_class: class of generated elements """ if not isinstance(element_class, type): # yes, this blocks non new style classes. touch cookies. raise ValueError("element_class must be a new style class") if element_func is None: element_func = element_class if cls.parse_depset is not None and not (allow_src_uri_file_renames): restrictions = None if operators is None: has_conditionals, restrictions = cls.parse_depset(dep_str, element_func, boolean.AndRestriction, boolean.OrRestriction) else: for x in operators: if x not in ("", "||"): break else: has_conditionals, restrictions = cls.parse_depset(dep_str, element_func, operators.get(""), operators.get("||")) if restrictions is not None: if not has_conditionals and transitive_use_atoms: has_conditionals = cls._has_transitive_use_atoms(restrictions) return cls(restrictions, element_class, has_conditionals) restrictions = [] if operators is None: operators = {"||":boolean.OrRestriction, "":boolean.AndRestriction} raw_conditionals = [] depsets = [restrictions] node_conds = False words = iter(dep_str.split()) # we specifically do it this way since expandable_chain has a bit of nasty # overhead to the tune of 33% slower if allow_src_uri_file_renames: words = expandable_chain(words) k = None try: for k in words: if ")" == k: # no elements == error. if closures don't map up, # indexerror would be chucked from trying to pop # the frame so that is addressed. if not depsets[-1] or not raw_conditionals: raise ParseError(dep_str) elif raw_conditionals[-1] in operators: if len(depsets[-1]) == 1: depsets[-2].append(depsets[-1][0]) elif raw_conditionals[-1] == '' and (len(raw_conditionals) == 1 or ('' == raw_conditionals[-2])): # if the frame is an and and the parent is an and, collapse it in. depsets[-2].extend(depsets[-1]) else: depsets[-2].append( operators[raw_conditionals[-1]](*depsets[-1])) else: node_conds = True c = raw_conditionals[-1] if c[0] == "!": c = values.ContainmentMatch(c[1:-1], negate=True) else: c = values.ContainmentMatch(c[:-1]) depsets[-2].append( packages.Conditional("use", c, tuple(depsets[-1]))) raw_conditionals.pop() depsets.pop() elif "(" == k: k = '' # push another frame on depsets.append([]) raw_conditionals.append(k) elif k[-1] == '?' or k in operators: # use conditional or custom op. # no tokens left == bad dep_str. k2 = words.next() if k2 != "(": raise ParseError(dep_str, k2) # push another frame on depsets.append([]) raw_conditionals.append(k) elif "|" in k: raise ParseError(dep_str, k) elif allow_src_uri_file_renames: try: k2 = words.next() except StopIteration: depsets[-1].append(element_func(k)) else: if k2 != '->': depsets[-1].append(element_func(k)) words.appendleft((k2,)) else: k3 = words.next() # file ename. depsets[-1].append(element_func(k, k3)) else: # node/element. depsets[-1].append(element_func(k)) except IGNORED_EXCEPTIONS: raise except IndexError: # [][-1] for a frame access, which means it was a parse error. raise except StopIteration: if k is None: raise raise ParseError(dep_str, k) except Exception as e: raise_from(ParseError(dep_str, e)) # check if any closures required if len(depsets) != 1: raise ParseError(dep_str) if transitive_use_atoms and not node_conds: # localize to this scope for speed. element_class = transitive_use_atom # we can't rely on iter(self) here since it doesn't # descend through boolean restricts. node_conds = cls._has_transitive_use_atoms(restrictions) return cls(tuple(restrictions), element_class, node_conds)
def test_extendleft(self): e = expandable_chain(xrange(20, 30)) e.extendleft([xrange(10, 20), xrange(10)]) self.assertEqual(list(e), range(30)) self.assertRaises(StopIteration, e.extendleft, [[]])
def test_extend(self): e = expandable_chain() e.extend(range(100) for i in (1, 2)) assert list(e) == list(range(100))*2 with pytest.raises(StopIteration): e.extend([[]])
def test_append(self): e = expandable_chain() e.append(xrange(100)) self.assertEqual(list(e), range(100)) self.assertRaises(StopIteration, e.append, [])
def test_appendleft(self): e = expandable_chain(xrange(10, 20)) e.appendleft(xrange(10)) self.assertEqual(list(e), range(20)) self.assertRaises(StopIteration, e.appendleft, [])