def test_repeat(self): for m in range(4): s = tuple(range(m)) for n in range(-3, 5): self.assertEqual(self.type2test(s*n), self.type2test(s)*n) self.assertEqual(self.type2test(s)*(-4), self.type2test([])) self.assertEqual(id(s), id(s*1))
def test_get_filter(): """Test filter resolving. """ # By name - here using one of the builtins. assert isinstance(get_filter('jsmin'), Filter) assert_raises(ValueError, get_filter, 'notafilteractually') # By class. class MyFilter(Filter): pass assert isinstance(get_filter(MyFilter), MyFilter) assert_raises(ValueError, get_filter, object()) # Passing an instance doesn't do anything. f = MyFilter() assert id(get_filter(f)) == id(f) # Passing a lone callable will give us a a filter back as well. assert hasattr(get_filter(lambda: None), 'output') # Arguments passed to get_filter are used for instance creation. assert get_filter('sass', scss=True).use_scss == True # However, this is not allowed when a filter instance is passed directly, # or a callable object. assert_raises(AssertionError, get_filter, f, 'test') assert_raises(AssertionError, get_filter, lambda: None, 'test')
def _add_deferred_event(self, context, added, removed, modified): """ Defer this event. Parameters ---------- context : IContext added : list of str removed : list of str modified : list of str """ if id(context) not in self._deferred_events: self._deferred_events[id(context)] = ItemsModified(context=context) event = self._deferred_events[id(context)] event.added = list(set(event.added) | set(added)) for key in removed: if key in event.added: # If we've already deferred the addition of this key, then # removing the key should cancel the addition. event.added.remove(key) else: event.removed.append(key) # Don't record prior modifications. if key in event.modified: event.modified.remove(key) event.removed = list(set(event.removed)) for key in modified: if key not in event.added: event.modified.append(key) event.modified = list(set(event.modified))
def __and__(self,other_tester): ids1 = dict([(id(r),r) for r in self.records]) ids2 = dict([(id(r),r) for r in other_tester.records]) ids = set(ids1.keys()) & set(ids2.keys()) res = Tester(self.db,self.key) res.records = [ids1[_id] for _id in ids] return res
def _handle_edge_constraint( constraint, json_constraints_dictory_rep, edge): if isinstance(constraint, AbstractKeyAllocatorConstraint): if isinstance(constraint, KeyAllocatorContiguousRangeContraint): key_constraint = dict() key_constraint['type'] = "reserve_resource" key_constraint['edge'] = str(id(edge)) key_constraint['resource'] = "keys" key_constraint['restriction'] = "continious_range" json_constraints_dictory_rep.append(key_constraint) if isinstance(constraint, KeyAllocatorFixedKeyAndMaskConstraint): key_constraint = dict() key_constraint['type'] = "reserve_resource" key_constraint['edge'] = str(id(edge)) key_constraint['resource'] = "keys" key_constraint['restriction'] = "[key, mask]" constraint_string = "[" for key_and_mask in constraint.keys_and_masks: constraint_string += "[{}, {}]"\ .format(key_and_mask.key, key_and_mask.mask) constraint_string += "]" key_constraint['key'] = constraint_string json_constraints_dictory_rep.append(key_constraint) if isinstance(constraint, KeyAllocatorFixedMaskConstraint): key_constraint = dict() key_constraint['type'] = "reserve_resource" key_constraint['edge'] = str(id(edge)) key_constraint['resource'] = "keys" key_constraint['restriction'] = "[mask]" key_constraint['mask'] = constraint.mask json_constraints_dictory_rep.append(key_constraint)
def check(builders): self.assertEqual(sorted(builders.keys()), sorted(["mybld", "yourbld"])) self.assertTrue(os.path.exists(os.path.join(self.basedir, "myblddir"))) self.assertTrue(os.path.exists(os.path.join(self.basedir, "yourblddir"))) # 'my' should still be the same slavebuilder object self.assertEqual(id(slavebuilders["my"]), id(builders["mybld"])) slavebuilders["your"] = builders["yourbld"]
def dump_transitions(self, state, file): chars_leading_to_state = {} special_to_state = {} for (c, s) in state.iteritems(): if len(c) == 1: chars = chars_leading_to_state.get(id(s), None) if chars is None: chars = [] chars_leading_to_state[id(s)] = chars chars.append(c) elif len(c) <= 4: special_to_state[c] = s ranges_to_state = {} for state in self.states: char_list = chars_leading_to_state.get(id(state), None) if char_list: ranges = self.chars_to_ranges(char_list) ranges_to_state[ranges] = state ranges_list = ranges_to_state.keys() ranges_list.sort() for ranges in ranges_list: key = self.ranges_to_string(ranges) state = ranges_to_state[ranges] file.write(" %s --> State %d\n" % (key, state['number'])) for key in ('bol', 'eol', 'eof', 'else'): state = special_to_state.get(key, None) if state: file.write(" %s --> State %d\n" % (key, state['number']))
def test_copy(self): #dup = self.s.copy() dup = self.s.mutableCopy() self.assertEqual(self.s, dup) self.assertNotEqual(id(self.s), id(dup)) #self.assertEqual(type(dup), self.basetype) self.assertIsInstance(dup, self.basetype)
def test_sort_index_inplace(self): frame = DataFrame(np.random.randn(4, 4), index=[1, 2, 3, 4], columns=['A', 'B', 'C', 'D']) # axis=0 unordered = frame.ix[[3, 2, 4, 1]] a_id = id(unordered['A']) df = unordered.copy() df.sort_index(inplace=True) expected = frame assert_frame_equal(df, expected) self.assertNotEqual(a_id, id(df['A'])) df = unordered.copy() df.sort_index(ascending=False, inplace=True) expected = frame[::-1] assert_frame_equal(df, expected) # axis=1 unordered = frame.ix[:, ['D', 'B', 'C', 'A']] df = unordered.copy() df.sort_index(axis=1, inplace=True) expected = frame assert_frame_equal(df, expected) df = unordered.copy() df.sort_index(axis=1, ascending=False, inplace=True) expected = frame.ix[:, ::-1] assert_frame_equal(df, expected)
def test_copy_pickle(self): d = Deque('abc') e = d.__copy__() self.assertEqual(type(d), type(e)) self.assertEqual(list(d), list(e)) e = Deque(d) self.assertEqual(type(d), type(e)) self.assertEqual(list(d), list(e)) for proto in range(pickle.HIGHEST_PROTOCOL + 1): s = pickle.dumps(d, proto) e = pickle.loads(s) self.assertNotEqual(id(d), id(e)) self.assertEqual(type(d), type(e)) self.assertEqual(list(d), list(e)) d = Deque('abcde', maxlen=4) e = d.__copy__() self.assertEqual(type(d), type(e)) self.assertEqual(list(d), list(e)) e = Deque(d) self.assertEqual(type(d), type(e)) self.assertEqual(list(d), list(e)) for proto in range(pickle.HIGHEST_PROTOCOL + 1): s = pickle.dumps(d, proto) e = pickle.loads(s) self.assertNotEqual(id(d), id(e)) self.assertEqual(type(d), type(e)) self.assertEqual(list(d), list(e))
def addpage(self, page): self._trailer = None if page.Type != PdfName.Page: raise PdfOutputError('Bad /Type: Expected %s, found %s' % (PdfName.Page, page.Type)) inheritable = page.inheritable # searches for resources newpage = IndirectPdfDict( page, Resources=inheritable.Resources, MediaBox=inheritable.MediaBox, CropBox=inheritable.CropBox, Rotate=inheritable.Rotate, ) self.pagearray.append(newpage) self.pagemap[id(page)] = newpage # Add parents in the hierarchy to objects we # don't want to output killobj = self.killobj obj = page.Parent while obj is not None: objid = id(obj) if objid in killobj: break killobj[objid] = obj obj = obj.Parent return self
def register_method(self, method, fn): """Register an HTTP method and handler function. :param method: string, HTTP verb :param fn: python function handling the request :raises: RouteAlreadyRegisteredError if the route is already registered :returns: n/a """ # ensure the HTTP verb is not already registered if method not in self.methods.keys(): logger.debug('Service Router ({0} - {1}): Adding method {2} on ' 'route {3}' .format(id(self), self.service_name, method, self.uri)) self.methods[method] = fn else: raise RouteAlreadyRegisteredError( 'Service Router ({0} - {1}): Method {2} already registered ' 'on Route {3}' .format(id(self), self.service_name, method, self.uri))
def set_subservice(self, obj): """Add a sub-service object. :param obj: stackinabox.services.StackInABoxService instance :raises: RouteAlreadyRegisteredError if the route is already registered :returns: n/a """ # ensure there is not already a sub-service if self.obj is not None: raise RouteAlreadyRegisteredError( 'Service Router ({0} - {1}): Route {2} already has a ' 'sub-service handler' .format(id(self), self.service_name, self.uri)) # warn if any methods are already registered if len(self.methods): logger.debug( 'WARNING: Service Router ({0} - {1}): Methods detected ' 'on Route {2}. Sub-Service {3} may be hidden.' .format(id(self), self.service_name, self.uri, obj.name)) # Ensure we do not have any circular references assert(obj != self.parent_obj) # if no errors, save the object and update the URI self.obj = obj self.obj.base_url = '{0}/{1}'.format(self.uri, self.service_name)
def __repr__(self, _track=[]): if id(self) in _track: return '...' _track.append(id(self)) r = 'deque(%r)' % (list(self),) _track.remove(id(self)) return r
def simplify(axioms): """Remove duplicate axioms, duplicates within axioms, and dominated axioms.""" # Remove duplicates from axiom conditions. for axiom in axioms: axiom.condition.sort() remove_duplicates(axiom.condition) # Remove dominated axioms. axioms_by_literal = {} for axiom in axioms: for literal in axiom.condition: axioms_by_literal.setdefault(literal, set()).add(id(axiom)) axioms_to_skip = set() for axiom in axioms: if id(axiom) in axioms_to_skip: continue # Required to keep one of multiple identical axioms. if not axiom.condition: # empty condition: dominates everything return [axiom] literals = iter(axiom.condition) dominated_axioms = axioms_by_literal[next(literals)] for literal in literals: dominated_axioms &= axioms_by_literal[literal] for dominated_axiom in dominated_axioms: if dominated_axiom != id(axiom): axioms_to_skip.add(dominated_axiom) return [axiom for axiom in axioms if id(axiom) not in axioms_to_skip]
def test_same_sets_same_hash(self): t1 = {1, 3, 2} t2 = {2, 3, 1} t1_hash = DeepHash(t1) t2_hash = DeepHash(t2) self.assertEqual(t1_hash[id(t1)], t2_hash[id(t2)])
def test_same_sets_in_lists_same_hash(self): t1 = ["a", {1, 3, 2}] t2 = [{2, 3, 1}, "a"] t1_hash = DeepHash(t1) t2_hash = DeepHash(t2) self.assertEqual(t1_hash[id(t1)], t2_hash[id(t2)])
def test_nested_lists_same_hash3(self): t1 = [{1: [2, 3], 4: [5, [6, 7]]}] t2 = [{4: [[7, 6], 5], 1: [3, 2]}] t1_hash = DeepHash(t1) t2_hash = DeepHash(t2) self.assertEqual(t1_hash[id(t1)], t2_hash[id(t2)])
def test_nested_lists_in_dictionary_same_hash(self): t1 = [{"c": 4}, {"c": 3}] t2 = [{"c": 3}, {"c": 4}] t1_hash = DeepHash(t1) t2_hash = DeepHash(t2) self.assertEqual(t1_hash[id(t1)], t2_hash[id(t2)])
def test_identity_vs_id_primitives(self): if self.cpython_apptest: skip("cpython behaves differently") import sys l = range(-10, 10) for i in range(10): l.append(float(i)) l.append(i + 0.1) l.append(long(i)) l.append(i + sys.maxint) l.append(i - sys.maxint) l.append(i + 1j) l.append(i - 1j) l.append(1 + i * 1j) l.append(1 - i * 1j) s = str(i) l.append(s) u = unicode(s) l.append(u) l.append(-0.0) l.append(None) l.append(True) l.append(False) s = "s" l.append(s) s = u"s" l.append(s) for i, a in enumerate(l): for b in l[i:]: assert (a is b) == (id(a) == id(b)) if a is b: assert a == b
def test_nested_lists_same_hash2(self): t1 = [1, 2, [3, [4, 5]]] t2 = [[[5, 4], 3], 2, 1] t1_hash = DeepHash(t1) t2_hash = DeepHash(t2) self.assertEqual(t1_hash[id(t1)], t2_hash[id(t2)])
def _substitute(self, tree, nodes, node_stack): ''' Let (parent(tree), parent(parent(tree)), ..., parent(...(parent(tree))...)) the parental hierarchy of tree. It can be denoted as (P1, ..., P(n)) where P(i) is the node id of the i-th grandparent of tree. The substitution algorithm seeks for P(i) with repl_id == P(i). If found it replaces P(i) in P(i+1) with nodes = (N1, ..., N(k)). It is guaranteed that the node id of N(j) is repl_id. ''' repl_id = nodes[0][0] if tree[0] == repl_id: return (tree, node_stack) if is_token(repl_id): # replace token if is_token(tree[0]): tree[:] = nodes[0] return (tree, node_stack) else: i = len(node_stack)-1 while i>=0: P = node_stack[i] i-=1 if repl_id == P[0]: try: nd_list = node_stack[i] i-=1 for j, N in enumerate(nd_list): if id(N) == id(P): nd_list[:] = nd_list[:j]+list(nodes)+nd_list[j+1:] return (nd_list, node_stack[i-1:]) except IndexError: # nothing to pop from node_stack P[:] = nodes[0] return (P, node_stack[i-1:]) self._create_translation_error(tree, nodes, node_stack)
def _find_reference_cycle(self): # Shorthand variables, useful if attached with PDB. # g = "unfetchable objects" # i2o = "id to object, for objects in `g`" # gr = "get referrers of an object in the unfetchable objects" # gri = "get the id of the above referrers" g = gc.garbage i2o = {id(o): o for o in g} gr = lambda o: [r for r in gc.get_referrers(o) if id(r) in i2o] gri = lambda o: [id(r) for r in gr(o)] # Find a loop by walking unfetched objects, stepping to an arbitrary # referrer each time. When an object that has already been encountered # is encountered again a loop has been found. # # The loop is described in terms of object ids, to avoid having to # invoke objects' __eq__ method. def find_loop(start_idx=0): path = [id(g[start_idx])] while True: path.append(gri(i2o[path[-1]])[0]) # This check could be made more efficient using a set to track # elements in `path`. if path[-1] in path[:-1]: return path[path.index(path[-1]):] loop = find_loop() logger.error("Reference cycle of size {} found:".format(len(loop) - 1)) for obj_id in loop: logger.error(" {!r} (id: {})".format(i2o[obj_id], obj_id)) if _DEBUG_REFERENCE_CYCLES: loop = [i2o[o] for o in loop] import pdb pdb.set_trace()
def testBasicD(self): from music21 import stream, note, converter, spanner import copy s = stream.Stream() n1 = note.Note('d2', quarterLength=2.0) n2 = note.Note('e2', quarterLength=2.0) sp = spanner.Slur(n1, n2) s.append(n1) s.append(n2) s.append(sp) # the deepcopy is what creates the bug in the preservation of a weakref #temp = converter.freezeStr(s) sCopy = copy.deepcopy(s) temp = converter.freezeStr(sCopy) post = converter.thawStr(temp) self.assertEqual(len(post.notes), 2) self.assertEqual(str(post.notes[0].pitch), 'D2') spPost = post.spanners[0] self.assertEqual(spPost.getSpannedElements(), [post.notes[0], post.notes[1]]) self.assertEqual(spPost.getSpannedElementIds(), [id(post.notes[0]), id(post.notes[1])])
def test_constructor(self): assert self.ts.index.is_all_dates # Pass in Series derived = Series(self.ts) assert derived.index.is_all_dates assert tm.equalContents(derived.index, self.ts.index) # Ensure new index is not created assert id(self.ts.index) == id(derived.index) # Mixed type Series mixed = Series(['hello', np.NaN], index=[0, 1]) assert mixed.dtype == np.object_ assert mixed[1] is np.NaN assert not self.empty.index.is_all_dates assert not Series({}).index.is_all_dates pytest.raises(Exception, Series, np.random.randn(3, 3), index=np.arange(3)) mixed.name = 'Series' rs = Series(mixed).name xp = 'Series' assert rs == xp # raise on MultiIndex GH4187 m = MultiIndex.from_arrays([[1, 2], [3, 4]]) pytest.raises(NotImplementedError, Series, m)
def jellyToNode(self, obj): objType = type(obj) if objType is types.DictionaryType: if self.prepared.has_key(id(obj)): oldNode = self.prepared[id(obj)][1] if oldNode.hasAttribute("reference"): # it's been referenced already key = oldNode.getAttribute("reference") else: # it hasn't been referenced yet self._ref_id = self._ref_id + 1 key = str(self._ref_id) oldNode.setAttribute("reference", key) node = self.document.createElement("reference") node.setAttribute("key", key) return node node = self.document.createElement("UNNAMED") self.prepareElement(node, obj) node.tagName = "dictionary" for k, v in sorted(obj.items(), key=self.priorize): n = self.jellyToNode(k) n.setAttribute("role", "key") n2 = self.jellyToNode(v) self.setExtendedAttributes(n, n2) node.appendChild(n) node.appendChild(n2) return node elif objType is types.UnicodeType: node = self.document.createElement("unicode") s = obj.encode('utf-8') node.setAttribute("value", s) return node else: return DOMJellier.jellyToNode(self, obj)
def updateLEDTable(self, current=None): self.LEDtable.clear() self.LEDtable.setRowCount(len(self.LED_LIST)) self.LEDtable.setColumnCount(3) self.LEDtable.setHorizontalHeaderLabels(["Wavelength (nm)", "Amplitude (mW)", " Duty Cycle "]) self.LEDtable.setEditTriggers(QTableWidget.NoEditTriggers) # Don't allow edits self.LEDtable.setSelectionBehavior(QTableWidget.SelectRows) # Select whole row self.LEDtable.setSelectionMode(QTableWidget.SingleSelection) # Select one row at a time selected = None self.points.sort() for row, (wavelength, amplitude, dummy1, dummy2) in enumerate(self.LED_LIST): item = QTableWidgetItem("%d" % wavelength) item.setTextAlignment(Qt.AlignCenter) if current is not None and current == id(row): selected = item item.setData(Qt.UserRole, QVariant(long(id(row)))) self.LEDtable.setItem(row, 0, item) #void setItem ( int row, int column, QTableWidgetItem * item ) if row < len(self.led_outputs): item = QTableWidgetItem("%.2f" % self.led_outputs[row]) item.setTextAlignment(Qt.AlignCenter) self.LEDtable.setItem(row, 1, item) item = QTableWidgetItem("%.3f" % (self.led_outputs[row]/amplitude)) item.setTextAlignment(Qt.AlignCenter) self.LEDtable.setItem(row, 2, item) self.LEDtable.resizeColumnsToContents() if selected is not None: selected.setSelected(True) self.LEDtable.setCurrentItem(selected) self.LEDtable.scrollToItem(selected)
def __repr__(self): if self.cdn: return '<RH cdn Repo: {0} within distro:{1}, object: {2}>'.format( self.data['repository'], self.distro, hex(id(self))) else: return '<RH custom Repo url: {0} object: {1}>'.format( self.url, hex(id(self)))
def append(self, client): # TODO make this much more clever... we just need to loop to the # right point list.append(self, client) print id(self), "append", self self.x_sort() self.update_x_stack()
def test_id_on_strs(self): if self.appdirect: skip("cannot run this test as apptest") u = u"a" assert id(self.unwrap_wrap_unicode(u)) == id(u) s = "a" assert id(self.unwrap_wrap_str(s)) == id(s)
def _dotEdge(self, child): return '%i -> %i [label="%s"]' % (id(self), id(child), child._dotBlurb())
def singleton(cls): # 单下划线的作用是这个变量只能在当前模块里访问,仅仅是一种提示作用 # 创建一个字典用来保存类的实例对象 _instance = {} def _singleton(*args, **kwargs): # 先判断这个类有没有对象 if cls not in _instance: _instance[cls] = cls(*args, **kwargs) # 创建一个对象,并保存到字典当中 # 将实例对象返回 return _instance[cls] return _singleton @singleton class A(object): a = 1 def __init__(self, x=0): self.x = x print('这是A的类的初始化方法') a1 = A(2) print(a1.x) a2 = A(3) print(a2.x) print(id(a1), id(a2))
def before_interact(roots): """ Called before each interaction to choose the focused and grabbed displayables. """ global override global new_grab global grab # a list of focusable, name, screen tuples. fwn = [ ] def callback(f, n): fwn.append((f, n, renpy.display.screen._current_screen)) for root in roots: try: root.find_focusable(callback, None) except renpy.display.layout.IgnoreLayers: pass # Assign a full name to each focusable. namecount = { } fwn2 = [ ] for fwn_tuple in fwn: f, n, screen = fwn_tuple serial = namecount.get(n, 0) namecount[n] = serial + 1 if f is None: continue f.full_focus_name = n, serial replaced_by[id(f)] = f fwn2.append(fwn_tuple) fwn = fwn2 # Is this a default change? default = True # We assume id(None) is not in replaced_by. replaced_by.pop(None, None) # If there's something with the same full name as the current widget, # it becomes the new current widget. current = get_focused() current = replaced_by.get(id(current), current) # Update the grab. grab = replaced_by.get(id(grab), None) if override is not None: d = renpy.exports.get_displayable(base=True, *override) if (d is not None) and (current is not d) and not grab: current = d default = False override = None if current is not None: current_name = current.full_focus_name for f, n, screen in fwn: if f.full_focus_name == current_name: current = f set_focused(f, argument, screen) break else: current = None if grab is not None: current = grab # Otherwise, focus the default widget. if (current is None) and renpy.display.interface.start_interact: defaults = [ ] for f, n, screen in fwn: if f.default: defaults.append((f.default, f, screen)) if defaults: if len(defaults) > 1: defaults.sort(key=operator.itemgetter(0)) _, f, screen = defaults[-1] current = f set_focused(f, None, screen) if current is None: set_focused(None, None, None) # Finally, mark the current widget as the focused widget, and # all other widgets as unfocused. for f, n, screen in fwn: if f is not current: renpy.display.screen.push_current_screen(screen) try: f.unfocus(default=default) finally: renpy.display.screen.pop_current_screen() if current: renpy.display.screen.push_current_screen(screen_of_focused) try: current.focus(default=default) finally: renpy.display.screen.pop_current_screen() # Clear replaced_by. replaced_by.clear()
def differentiate(expr, wrt=None, wrt_list=None): """Return derivative of expression. This function returns an expression or list of expression objects corresponding to the derivative of the passed expression 'expr' with respect to a variable 'wrt' or list of variables 'wrt_list' Args: expr (Expression): Pyomo expression wrt (Var): Pyomo variable wrt_list (list): list of Pyomo variables Returns: Expression or list of Expression objects """ if not _sympy_available: raise RuntimeError( "The sympy module is not available. " "Cannot perform automatic symbolic differentiation.") if not ((wrt is None) ^ (wrt_list is None)): raise ValueError( "differentiate(): Must specify exactly one of wrt and wrt_list") if wrt is not None: wrt_list = [wrt] else: # Copy the list because we will normalize things in place below wrt_list = list(wrt_list) pyomo_vars = list(EXPR.identify_variables(expr)) sympy_vars = [sympy.var('x%s' % i) for i in range(len(pyomo_vars))] sympy2pyomo = dict(zip(sympy_vars, pyomo_vars)) pyomo2sympy = dict( (id(pyomo_vars[i]), sympy_vars[i]) for i in range(len(pyomo_vars))) ans = [] for i, target in enumerate(wrt_list): if target.__class__ is not tuple: wrt_list[i] = target = (target, ) mismatch_target = False for var in target: if id(var) not in pyomo2sympy: mismatch_target = True break wrt_list[i] = tuple(pyomo2sympy.get(id(var), None) for var in target) ans.append(0 if mismatch_target else None) # If there is nothing to do, do nothing if all(i is not None for i in ans): return ans if wrt is None else ans[0] tmp_expr = EXPR.clone_expression(expr, substitute=pyomo2sympy) tmp_expr = _map_intrinsic_functions(tmp_expr, sympy2pyomo) tmp_expr = str(tmp_expr) sympy_expr = sympy.sympify(tmp_expr, locals=dict((str(x), x) for x in sympy_vars)) for i, target in enumerate(wrt_list): if ans[i] is None: sympy_ans = sympy_expr.diff(*target) ans[i] = _map_sympy2pyomo(sympy_ans, sympy2pyomo) return ans if wrt is None else ans[0]
def __hash__(self) -> int: return hash(id(self.expr))
def ID(obj): """Get an unique ID from object for dot node names""" return hex(id(obj)).replace('-','_')
def test_using_cache_after_loader(self): # [from cache on return] with self.create_mock('module') as mock: with util.import_state(meta_path=[mock]): module = import_util.import_('module') self.assertEqual(id(module), id(sys.modules['module']))
def _braneyBlurb(self, history, avg, cost, poset): nodes = sorted(avg.nodes()) prevalence = history.weights[self] edgeIndex = 0 lines = [] rank = poset.depth[self] leftFlank = getNode(nodes, self.parent.genome[(self.start - 1) % len(self.parent.genome)], True) previous = leftFlank i = 0 while i < self.length: start = getNode(nodes, self.parent.genome[(self.start + i) % len(self.parent.genome)], False) shift = 1; while i + shift < self.length and self.followsAncestralSequence(history, self.start + i + shift): shift += 1 finish = getNode(nodes, self.parent.genome[(self.start + i + shift - 1) % len(self.parent.genome)], True) lines.append("\t".join(map(str, ['A', previous.chr, previous.pos, orientString(previous), start.chr, start.pos, orientString(start),-prevalence,prevalence, 0,0,rank,edgeIndex,rank,cost,cost,1,1,id(self)]))) lines.append("\t".join(map(str, [start.chr, start.pos, finish.pos, prevalence,prevalence, 0,0,rank,edgeIndex + 1,rank,cost,cost,1,1,id(self)]))) previous = finish edgeIndex += 2 i += shift rightFlank = getNode(nodes, self.parent.genome[(self.start + self.length) % len(self.parent.genome)], False) lines.append("\t".join(map(str, ['A', previous.chr, previous.pos, orientString(previous), rightFlank.chr, rightFlank.pos, orientString(rightFlank),-prevalence,prevalence, 0,0,rank,edgeIndex,rank,cost,cost,1,1,id(self)]))) lines.append("\t".join(map(str, ['A', rightFlank.chr, rightFlank.pos, orientString(rightFlank), leftFlank.chr, leftFlank.pos, orientString(leftFlank),prevalence,prevalence, 0,0,rank,edgeIndex+1,rank,cost,cost,1,1,id(self)]))) return "\n".join(lines)
def __repr__(self): return "<Project %r at %s (%s modules)>" % ( self.name, id(self), len(self.modules), )
def _dotLabel(self, weights): label = str(self.genome) if len(self.children) == 0: label += " (%f)" % weights[self] return '%i [label="%s"]' % (id(self), label)
def simple_dict_to_object(self, doc, cls, validator=None, req_enc=None): """Converts a flat dict to a native python object. See :func:`spyne.model.complex.ComplexModelBase.get_flat_type_info`. """ if issubclass(cls, AnyDict): return doc if not issubclass(cls, ComplexModelBase): raise NotImplementedError("Interestingly, deserializing non complex" " types is not yet implemented. You can" " use a ComplexModel to wrap that field." " Otherwise, patches are welcome.") # this is for validating cls.Attributes.{min,max}_occurs frequencies = defaultdict(lambda: defaultdict(int)) if validator is self.SOFT_VALIDATION: _fill(cls, frequencies) retval = cls.get_deserialization_instance() simple_type_info = cls.get_simple_type_info(cls, hier_delim=self.hier_delim) idxmap = defaultdict(dict) for orig_k, v in sorted(doc.items(), key=lambda _k: _k[0]): k = RE_HTTP_ARRAY_INDEX.sub("", orig_k) member = simple_type_info.get(k, None) if member is None: logger.debug("discarding field %r" % k) continue # extract native values from the list of strings in the flat dict # entries. value = [] for v2 in v: # some wsgi implementations pass unicode strings, some pass str # strings. we get unicode here when we can and should. if v2 is not None and req_enc is not None \ and not issubclass(member.type, String) \ and issubclass(member.type, Unicode) \ and not isinstance(v2, unicode): try: v2 = v2.decode(req_enc) except UnicodeDecodeError as e: raise ValidationError(v2, "%r while decoding %%r" % e) try: if (validator is self.SOFT_VALIDATION and not member.type.validate_string(member.type, v2)): raise ValidationError((orig_k, v2)) except TypeError: raise ValidationError((orig_k, v2)) if issubclass(member.type, File): if isinstance(v2, File.Value): native_v2 = v2 else: native_v2 = self.from_unicode(member.type, v2, self.binary_encoding) elif issubclass(member.type, ByteArray): native_v2 = self.from_unicode(member.type, v2, self.binary_encoding) else: try: native_v2 = self.from_unicode(member.type, v2) except ValidationError as e: raise ValidationError(str(e), "Validation failed for %r.%r: %%r" % (cls, k)) if (validator is self.SOFT_VALIDATION and not member.type.validate_native(member.type, native_v2)): raise ValidationError((orig_k, v2)) value.append(native_v2) # assign the native value to the relevant class in the nested object # structure. cinst = retval ctype_info = cls.get_flat_type_info(cls) idx, nidx = 0, 0 pkey = member.path[0] cfreq_key = cls, idx indexes = deque(RE_HTTP_ARRAY_INDEX.findall(orig_k)) for pkey in member.path[:-1]: nidx = 0 ncls, ninst = ctype_info[pkey], getattr(cinst, pkey, None) if issubclass(ncls, Array): ncls, = ncls._type_info.values() mo = ncls.Attributes.max_occurs if mo > 1: if len(indexes) == 0: nidx = 0 else: nidx = int(indexes.popleft()) if ninst is None: ninst = [] cinst._safe_set(pkey, ninst, ncls) if self.strict_arrays: if len(ninst) == 0: newval = ncls.get_deserialization_instance() ninst.append(newval) frequencies[cfreq_key][pkey] += 1 if nidx > len(ninst): raise ValidationError(orig_k, "%%r Invalid array index %d." % idx) if nidx == len(ninst): ninst.append(ncls.get_deserialization_instance()) frequencies[cfreq_key][pkey] += 1 cinst = ninst[nidx] else: _m = idxmap[id(ninst)] cidx = _m.get(nidx, None) if cidx is None: cidx = _s2cmi(_m, nidx) newval = ncls.get_deserialization_instance() ninst.insert(cidx, newval) frequencies[cfreq_key][pkey] += 1 cinst = ninst[cidx] assert cinst is not None, ninst else: if ninst is None: ninst = ncls.get_deserialization_instance() cinst._safe_set(pkey, ninst, ncls) frequencies[cfreq_key][pkey] += 1 cinst = ninst cfreq_key = cfreq_key + (ncls, nidx) idx = nidx ctype_info = ncls.get_flat_type_info(ncls) frequencies[cfreq_key][member.path[-1]] += len(value) if member.type.Attributes.max_occurs > 1: _v = getattr(cinst, member.path[-1], None) if _v is None: cinst._safe_set(member.path[-1], value, member.type) else: _v.extend(value) logger.debug("\tset array %r(%r) = %r" % (member.path, pkey, value)) else: cinst._safe_set(member.path[-1], value[0], member.type) logger.debug("\tset default %r(%r) = %r" % (member.path, pkey, value)) if validator is self.SOFT_VALIDATION: for k, d in frequencies.items(): for path_cls in k[:-1:2]: if not path_cls.Attributes.validate_freq: break else: _check_freq_dict(path_cls, d) return retval
def _braneyBlurb(self, history, avg, cost, poset): nodes = sorted(avg.nodes()) rank = poset.depth[self] startA = getNode(nodes, self.parent.genome[(self.start - 1) % len(self.parent.genome)], True) startB = getNode(nodes, self.parent.genome[(self.start) % len(self.parent.genome)], False) finishB = getNode(nodes, self.parent.genome[(self.start + self.length - 1) % len(self.parent.genome)], True) finishA = getNode(nodes, self.parent.genome[(self.start + self.length) % len(self.parent.genome)], False) prevalence = history.weights[self] return "\n".join([ "\t".join(map(str, ['A', startA.chr, startA.pos, orientString(startA), startB.chr, startB.pos, orientString(startB),-prevalence,prevalence, 0,0,rank,0,rank,cost,cost,1,1,id(self)])), "\t".join(map(str, ['A', startB.chr, startB.pos, orientString(startB), finishA.chr, finishA.pos, orientString(finishA),prevalence,prevalence, 0,0,rank,1,rank,cost,cost,1,1,id(self)])), "\t".join(map(str, ['A', finishA.chr, finishA.pos, orientString(finishA), finishB.chr, finishB.pos, orientString(finishB),-prevalence,prevalence, 0,0,rank,2,rank,cost,cost,1,1,id(self)])), "\t".join(map(str, ['A', finishB.chr, finishB.pos, orientString(finishB), startA.chr, startA.pos, orientString(startA),prevalence,prevalence, 0,0,rank,3,rank,cost,cost,1,1,id(self)])), ])
""" 여러 줄 주석 """ # 한줄 주석 v1 = '안녕 파이선' # 전부 객체(instance)이기 때문에 타입 무관 v1 = 5 # 덮어쓰기 print(v1) # 들여쓰기는 { } 와 같음 v2 = 20.5 v3 = v2 # 객체의 주소를 기억 print(v1, v2, v3) print(id(v1), id(v2), id(v3)) # id() : 주소값 반환 print(v1 is v2, v1 == v2) # is 는 주소를 == 는 값을 비교 print(v2 is v3, v2 == v3) print() print(1000 is 10 ** 3) print(1000 == 10 ** 3) print(1000, id(1000)) print(10 ** 3, id(10 ** 3)) print() A = 1; a = 2 # 한 줄에 두 개의 입력이 있으면 ; 으로 구분
def object_to_simple_dict(self, inst_cls, value, retval=None, prefix=None, subvalue_eater=lambda prot, v, t: v, tags=None): """Converts a native python object to a flat dict. See :func:`spyne.model.complex.ComplexModelBase.get_flat_type_info`. """ if retval is None: retval = {} if prefix is None: prefix = [] if value is None and inst_cls.Attributes.min_occurs == 0: return retval if tags is None: tags = set([id(value)]) else: if id(value) in tags: return retval if issubclass(inst_cls, ComplexModelBase): fti = inst_cls.get_flat_type_info(inst_cls) for k, v in fti.items(): new_prefix = list(prefix) new_prefix.append(k) subvalue = getattr(value, k, None) if (issubclass(v, Array) or v.Attributes.max_occurs > 1) and \ subvalue is not None: if issubclass(v, Array): subtype, = v._type_info.values() else: subtype = v if issubclass(subtype, SimpleModel): key = self.hier_delim.join(new_prefix) l = [] for ssv in subvalue: l.append(subvalue_eater(self, ssv, subtype)) retval[key] = l else: last_prefix = new_prefix[-1] for i, ssv in enumerate(subvalue): new_prefix[-1] = '%s[%d]' % (last_prefix, i) self.object_to_simple_dict(subtype, ssv, retval, new_prefix, subvalue_eater=subvalue_eater, tags=tags) else: self.object_to_simple_dict(v, subvalue, retval, new_prefix, subvalue_eater=subvalue_eater, tags=tags) else: key = self.hier_delim.join(prefix) if key in retval: raise ValueError("%r.%s conflicts with previous value %r" % (inst_cls, key, retval[key])) retval[key] = subvalue_eater(self, value, inst_cls) return retval
class Frame: """A single frame in a traceback.""" def __init__(self, exc_type, exc_value, tb, app): self._cache = {} self._app = app self.lineno = tb.tb_lineno self.function_name = tb.tb_frame.f_code.co_name self.locals = tb.tb_frame.f_locals self.globals = tb.tb_frame.f_globals fn = inspect.getsourcefile(tb) or inspect.getfile(tb) if fn[-4:] in ('.pyo', '.pyc'): fn = fn[:-1] # if it's a file on the file system resolve the real filename. if os.path.isfile(fn): fn = os.path.realpath(fn) self.filename = fn self.module = self.globals.get('__name__') self.loader = self.globals.get('__loader__') self.code = tb.tb_frame.f_code # support for paste's traceback extensions self.hide = self.locals.get('__traceback_hide__', False) info = self.locals.get('__traceback_info__') if info is not None: try: info = str(info) except UnicodeError: info = str(info).decode('utf-8', 'replace') self.info = info def render(self): """Render a single frame in a traceback.""" return FRAME_HTML % { 'id': self.id, 'filename': escape(self.filename), 'lineno': self.lineno, 'function_name': escape(self.function_name), 'current_line': escape(self.current_line.strip()) } def get_in_frame_range(self): # find function definition and mark lines if hasattr(self.code, 'co_firstlineno'): lineno = self.code.co_firstlineno - 1 while lineno > 0: if _funcdef_re.match(self.sourcelines[lineno]): break lineno -= 1 try: offset = len(inspect.getblock([x + '\n' for x in self.sourcelines[lineno:]])) except TokenError: offset = 0 return (lineno, lineno + offset) return None def eval(self, code, mode='single'): """Evaluate code in the context of the frame.""" if isinstance(code, str): if isinstance(code, str): code = UTF8_COOKIE + code.encode('utf-8') code = compile(code, '<interactive>', mode) if mode != 'exec': return eval(code, self.globals, self.locals) exec(code, self.globals, self.locals) @reify def sourcelines(self): """The sourcecode of the file as list of unicode strings.""" # get sourcecode from loader or file source = None if self.loader is not None: try: if hasattr(self.loader, 'get_source'): source = self.loader.get_source(self.module) elif hasattr(self.loader, 'get_source_by_code'): source = self.loader.get_source_by_code(self.code) except Exception: # we munch the exception so that we don't cause troubles # if the loader is broken. pass if source is None: try: f = open(self.filename) except IOError: return [] try: source = f.read() finally: f.close() # already unicode? return right away if isinstance(source, str): return source.splitlines() # yes. it should be ascii, but we don't want to reject too many # characters in the debugger if something breaks charset = 'utf-8' if source.startswith(UTF8_COOKIE): source = source[3:] else: for idx, match in enumerate(_line_re.finditer(source)): match = _line_re.search(match.group()) if match is not None: charset = match.group(1) break if idx > 1: break # on broken cookies we fall back to utf-8 too try: codecs.lookup(charset) except LookupError: charset = 'utf-8' return source.decode(charset, 'replace').splitlines() @property def current_line(self): try: return self.sourcelines[self.lineno - 1] except IndexError: return text_('') @reify def console(self): return Console(self._app, self.globals, self.locals) id = property(lambda x: id(x))
def __repr__(self): return '<%s at 0x%x for %s at 0x%x>' % ( type(self).__name__, id(self), type(self.__wrapped__).__name__, id(self.__wrapped__))
def _SpawnExplosion(self, warheadIdx): if not self.model: self.LogWarn('Missile::_SpawnExplosion no model') return modelPosition = self.model.worldPosition if warheadIdx < len(self.model.warheads): warheadPosition = self.model.warheads[warheadIdx].explosionPosition else: warheadPosition = modelPosition self.warheadsReleased += 1 if self.exploded: return if self.warheadsReleased == self.totalWarheadCount: if self.model: self.model.target = None self.model.explosionCallback = None self.RemoveAndClearModel(self.model, self.globalsGlob.GetScene()) self.model = None if self.delayedBall: self.globalsGlob.DestroyClientBall(self.delayedBall) self.delayedBall = None self.exploded = True actualModel = self.explosionManager.GetExplosion(self.explosionPath, preloaded=True, callback=self.CleanupExplosion) if actualModel is None: self.LogError('missile::LoadModel failed to get explosion ' + str(self.explosionPath)) self.explosionManager.Cancel(self.explosionPath, 1) return explosionBall = None if self.enabled: targetBall = self.globalsGlob.GetTargetBall(self.targetId) if targetBall is not None: explosionPosition = warheadPosition elif type(modelPosition) == tuple: explosionPosition = modelPosition else: explosionPosition = (modelPosition.x, modelPosition.y, modelPosition.z) explosionBall = self.globalsGlob.SpawnClientBall(explosionPosition) actualModel.translationCurve = explosionBall rndRotation = geo2.QuaternionRotationSetYawPitchRoll(random.random() * 2.0 * math.pi, random.random() * 2.0 * math.pi, random.random() * 2.0 * math.pi) actualModel.rotation = rndRotation scene = self.globalsGlob.GetScene() if scene is not None: scene.objects.append(actualModel) audio = audio2.AudEmitter('effect_source_%s' % str(id(self))) obs = trinity.TriObserverLocal() obs.front = (0.0, -1.0, 0.0) obs.observer = audio del actualModel.observers[:] actualModel.observers.append(obs) def AudioSetup(*args): for eachSet in actualModel.active.curveSets: for eachCurve in eachSet.curves: if eachCurve.__typename__ == 'TriEventCurve': audio.SendEvent(eachCurve.GetKeyValue(0)) break loadedEventHandler = blue.BlueEventToPython() loadedEventHandler.handler = AudioSetup actualModel.loadedCallback = loadedEventHandler shakeMagnitude = min(actualModel.boundingSphereRadius, 250) shakeMagnitude = max(shakeMagnitude, 50) sm.GetService('camera').ShakeCamera(shakeMagnitude, explosionPosition)
class Traceback: """Wraps a traceback.""" def __init__(self, exc_type, exc_value, tb, app): self._cache = {} self._app = app self.exc_type = exc_type self.exc_value = exc_value if not isinstance(exc_type, str): exception_type = exc_type.__name__ if exc_type.__module__ not in ('__builtin__', 'exceptions'): exception_type = exc_type.__module__ + '.' + exception_type else: exception_type = exc_type self.exception_type = exception_type # we only add frames to the list that are not hidden. This follows # the the magic variables as defined by paste.exceptions.collector self.frames = [] while tb: self.frames.append(Frame(exc_type, exc_value, tb, self._app)) tb = tb.tb_next def filter_hidden_frames(self): """Remove the frames according to the paste spec.""" if not self.frames: return new_frames = [] hidden = False for frame in self.frames: hide = frame.hide if hide in ('before', 'before_and_this'): new_frames = [] hidden = False if hide == 'before_and_this': continue elif hide in ('reset', 'reset_and_this'): hidden = False if hide == 'reset_and_this': continue elif hide in ('after', 'after_and_this'): hidden = True if hide == 'after_and_this': continue elif hide or hidden: continue new_frames.append(frame) # if we only have one frame and that frame is from the codeop # module, remove it. if len(new_frames) == 1 and self.frames[0].module == 'codeop': del self.frames[:] # if the last frame is missing something went terrible wrong :( elif self.frames[-1] in new_frames: self.frames[:] = new_frames def is_syntax_error(self): """Is it a syntax error?""" return isinstance(self.exc_value, SyntaxError) is_syntax_error = property(is_syntax_error) def exception(self): """String representation of the exception.""" buf = traceback.format_exception_only(self.exc_type, self.exc_value) return ''.join(buf).strip() exception = property(exception) def log(self, logfile=None): """Log the ASCII traceback into a file object.""" if logfile is None: logfile = sys.stderr tb = self.plaintext.encode('utf-8', 'replace').rstrip() + '\n' logfile.write(tb) # TODO: Looks like dead code # def paste(self, lodgeit_url): # """Create a paste and return the paste id.""" # from xmlrpclib import ServerProxy # srv = ServerProxy('%sxmlrpc/' % lodgeit_url) # return srv.pastes.newPaste('pytb', self.plaintext) def render_summary(self, app, include_title=True): """Render the traceback for the interactive console.""" title = '' frames = [] classes = ['traceback'] if not self.frames: classes.append('noframe-traceback') if include_title: if self.is_syntax_error: title = text_('Syntax Error') else: title = text_('Traceback <small>(most recent call last)' '</small>') for frame in self.frames: frames.append( text_('<li%s>%s') % ( frame.info and text_(' title="%s"' % escape(frame.info)) or text_(''), frame.render() )) if self.is_syntax_error: description_wrapper = text_('<pre class=syntaxerror>%s</pre>') else: description_wrapper = text_('<blockquote>%s</blockquote>') vars = { 'classes': text_(' '.join(classes)), 'title': title and text_('<h3 class="traceback">%s</h3>' % title) or text_(''), 'frames': text_('\n'.join(frames)), 'description': description_wrapper % escape(self.exception), } return render('exception_summary.jinja2', app, vars) def render_full(self, request, lodgeit_url=None): """Render the Full HTML page with the traceback info.""" static_path = request.app.router[STATIC_ROUTE_NAME].url(filename='') root_path = request.app.router[ROOT_ROUTE_NAME].url() exc = escape(self.exception) summary = self.render_summary(request.app, include_title=False) token = request.app[APP_KEY]['pdtb_token'] qs = {'token': token, 'tb': str(self.id)} url = request.app.router[EXC_ROUTE_NAME].url(query=qs) evalex = request.app[APP_KEY]['exc_history'].eval_exc vars = { 'evalex': evalex and 'true' or 'false', 'console': 'false', 'lodgeit_url': escape(lodgeit_url), 'title': exc, 'exception': exc, 'exception_type': escape(self.exception_type), 'summary': summary, 'plaintext': self.plaintext, 'plaintext_cs': re.sub('-{2,}', '-', self.plaintext), 'traceback_id': self.id, 'static_path': static_path, 'token': token, 'root_path': root_path, 'url': url, } return render('exception.jinja2', request.app, vars, request=request) def generate_plaintext_traceback(self): """Like the plaintext attribute but returns a generator""" yield text_('Traceback (most recent call last):') for frame in self.frames: yield text_(' File "%s", line %s, in %s' % ( frame.filename, frame.lineno, frame.function_name )) yield text_(' ' + frame.current_line.strip()) yield text_(self.exception, 'utf-8') @reify def plaintext(self): return text_('\n'.join(self.generate_plaintext_traceback())) id = property(lambda x: id(x))
def __getattr__(self, *args): log.debug('__getattr__ %d %s' % (id(self), args)) return getattr(self.src[self.start:self.end], *args)
def scopefunc(): return id(dict())
def make_rcparams_key(self): return [id(fontManager) ] + [rcParams[param] for param in self.invalidating_rcparams]
def __hash__(self): return id(self)
def find(instance): return self.states[id(instance)]
def __getattribute__(self, *args): log.debug('__getattribute__ %d %s' % (id(self), args)) if len(args) == 1 and args[0] == 'src': return getattr(self, 'src') return self.src[self.start:self.end] # .__getattribute__(*args)
:param kwargs: :return: ''' ''' *args 表示任何多个无名参数,它是一个tuple; **kwargs 表示关键字参数,它是一个dict。 并且同时使用*args和**kwargs时,必须*args参数列要在**kwargs前 ''' # 判断 obj = cls.pool.get(kwargs['tree_type'], None) if not obj: obj = object.__new__(cls) cls.pool[kwargs['tree_type']] = obj obj.tree_type = kwargs['tree_type'] return obj t1 = Tree(tree_type=TreeType.apple_tree) t2 = Tree(tree_type=TreeType.apple_tree) print(id(t1)) print(id(t2)) def main(): pass if __name__ == '__main__': main()
################################ # We are ready to run *onnxruntime*. sess = rt.InferenceSession("pipeline_titanic.onnx") pred_onx = sess.run(None, inputs) print("predict", pred_onx[0][:5]) print("predict_proba", pred_onx[1][:2]) ################################## # The output of onnxruntime is a list of dictionaries. # Let's swith to an array but that requires to convert again with # an additional option zipmap. model_onnx = convert_sklearn(clf, 'pipeline_titanic', initial_inputs, target_opset=12, options={id(clf): {'zipmap': False}}) with open("pipeline_titanic_nozipmap.onnx", "wb") as f: f.write(model_onnx.SerializeToString()) sess = rt.InferenceSession("pipeline_titanic_nozipmap.onnx") pred_onx = sess.run(None, inputs) print("predict", pred_onx[0][:5]) print("predict_proba", pred_onx[1][:2]) ############################################# # Let's check they are the same. assert_almost_equal(clf.predict_proba(X_test), pred_onx[1]) ################################## # .. _l-plot-complex-pipeline-graph: #
def __init__(self, cell, is_floating): self.h = id(cell) self.is_floating = is_floating
def install_state(self, class_, instance, state): self.states[id(instance)] = state
def __repr__(self): return '<%s at %s %s>' % (type(self).__name__, hex( id(self)), self._formatinfo())
def find_cell(h): for i, c in enumerate(cells): if id(c) == h: return i, c raise KeyError