def example1(): "creation of a new file using manual, step-by-step procedure" f = new() own = makeOwner(f,"Yorik van Havre") plac = makePlacement(f) grep = makeContext(f,plac) proj = makeProject(f,grep,own) site = makeSite(f,proj,own) bldg = makeBuilding(f,site,own) stor = makeStorey(f,bldg,own) poly = makePolyline(f,[(0,0,0),(0,200,0),(5000,200,0),(5000,0,0),(0,0,0)]) solid = makeExtrusion(f,poly,(0,0,3500)) wall = makeWall(f,stor,own,grep,solid) poly2 = makePolyline(f,[(0,200,0),(0,2000,0),(200,2000,0),(200,200,0),(0,200,0)]) solid2 = makeExtrusion(f,poly2,(0,0,3500)) wall2 = makeWall(f,stor,own,grep,solid2) poly3 = makePolyline(f,[(0,2000,0),(0,2200,0),(5000,2200,0),(5000,2000,0),(0,2000,0)]) solid3 = makeExtrusion(f,poly3,(0,0,3500)) wall3 = makeWall(f,stor,own,grep,solid3) poly4 = makePolyline(f,[(5000,200,0),(5000,2000,0),(4800,2000,0),(4800,200,0),(5000,200,0)]) solid4 = makeExtrusion(f,poly4,(0,0,3500)) wall4 = makeWall(f,stor,own,grep,solid4) relate(f,stor,own,[wall,wall2,wall3,wall4]) f.write("/home/yorik/test1.ifc") print dir(f) print f.by_type("IfcWallStandardCase") w = f.by_type("IfcWallStandardCase")[0] print w print dir(w) print w.is_a("IfcWallStandardCase")
def test_frozen(self): with captured_stdout() as stdout: try: import __hello__ except ImportError as x: self.fail("import __hello__ failed:" + str(x)) self.assertEqual(__hello__.initialized, True) expect = set(self.module_attrs) expect.add('initialized') self.assertEqual(set(dir(__hello__)), expect) self.assertEqual(stdout.getvalue(), 'Hello world!\n') with captured_stdout() as stdout: try: import __phello__ except ImportError as x: self.fail("import __phello__ failed:" + str(x)) self.assertEqual(__phello__.initialized, True) expect = set(self.package_attrs) expect.add('initialized') if not "__phello__.spam" in sys.modules: self.assertEqual(set(dir(__phello__)), expect) else: expect.add('spam') self.assertEqual(set(dir(__phello__)), expect) self.assertEqual(__phello__.__path__, [__phello__.__name__]) self.assertEqual(stdout.getvalue(), 'Hello world!\n') with captured_stdout() as stdout: try: import __phello__.spam except ImportError as x: self.fail("import __phello__.spam failed:" + str(x)) self.assertEqual(__phello__.spam.initialized, True) spam_expect = set(self.module_attrs) spam_expect.add('initialized') self.assertEqual(set(dir(__phello__.spam)), spam_expect) phello_expect = set(self.package_attrs) phello_expect.add('initialized') phello_expect.add('spam') self.assertEqual(set(dir(__phello__)), phello_expect) self.assertEqual(stdout.getvalue(), 'Hello world!\n') try: import __phello__.foo except ImportError: pass else: self.fail("import __phello__.foo should have failed") try: import __phello__.foo except ImportError: pass else: self.fail("import __phello__.foo should have failed") del sys.modules['__hello__'] del sys.modules['__phello__'] del sys.modules['__phello__.spam']
def __init__(self, fn="depthFirstSearch", prob="PositionSearchProblem", heuristic="nullHeuristic"): # Warning: some advanced Python magic is employed below to find the right functions and problems # Get the search function from the name and heuristic if fn not in dir(search): raise AttributeError, fn + " is not a search function in search.py." func = getattr(search, fn) if "heuristic" not in func.func_code.co_varnames: print ("[SearchAgent] using function " + fn) self.searchFunction = func else: if heuristic in dir(searchAgents): heur = getattr(searchAgents, heuristic) elif heuristic in dir(search): heur = getattr(search, heuristic) else: raise AttributeError, heuristic + " is not a function in searchAgents.py or search.py." print ("[SearchAgent] using function %s and heuristic %s" % (fn, heuristic)) # Note: this bit of Python trickery combines the search algorithm and the heuristic self.searchFunction = lambda x: func(x, heuristic=heur) # Get the search problem type from the name if prob not in dir(searchAgents) or not prob.endswith("Problem"): raise AttributeError, prob + " is not a search problem type in SearchAgents.py." self.searchType = getattr(searchAgents, prob) print ("[SearchAgent] using problem type " + prob)
def example2(): "creation of a new file using advanced IfcDocument object" ifc = IfcDocument("/home/yorik/test2.ifc") ifc.Name = "Test Project" ifc.Owner = "Yorik van Havre" ifc.Organization = "FreeCAD" w1 = ifc.addWall( ifc.addExtrudedPolyline([(0,0,0),(0,200,0),(5000,200,0),(5000,0,0),(0,0,0)], (0,0,3500)) ) ifc.addWall( ifc.addExtrudedPolyline([(0,200,0),(0,2000,0),(200,2000,0),(200,200,0),(0,200,0)],(0,0,3500)) ) ifc.addWall( ifc.addExtrudedPolyline([(0,2000,0),(0,2200,0),(5000,2200,0),(5000,2000,0),(0,2000,0)],(0,0,3500)) ) ifc.addWall( ifc.addExtrudedPolyline([(5000,200,0),(5000,2000,0),(4800,2000,0),(4800,200,0),(5000,200,0)],(0,0,3500)) ) ifc.addWall( ifc.addFacetedBrep([[[(0,0,0),(100,0,0),(100,-1000,0),(0,-1000,0)]], [[(0,0,0),(100,0,0),(100,0,1000),(0,0,1000)]], [[(0,0,0),(0,0,1000),(0,-1000,1000),(0,-1000,0)]], [[(0,-1000,0),(0,-1000,1000),(100,-1000,1000),(100,-1000,0)]], [[(100,-1000,0),(100,-1000,1000),(100,0,1000),(100,0,0)]], [[(0,0,1000),(0,-1000,1000),(100,-1000,1000),(100,0,1000)]]]) ) ifc.addStructure( "IfcColumn", ifc.addExtrudedPolyline([(0,0,0),(0,-200,0),(-500,-200,0),(-500,0,0),(0,0,0)], (0,0,3500)) ) ifc.addWindow( "IfcDoor", 200, 200, ifc.addExtrudedPolyline([(200,200,0),(200,400,0),(400,400,0),(400,200,0),(200,200,0)], (0,0,200)), w1 ) ifc.write() print dir(ifc._fileobject) print ifc._fileobject.by_type("IfcDoor") w = ifc._fileobject.by_type("IfcDoor")[0] print w print dir(w) print w.is_a("IfcDoor") for i in range(w.get_argument_count()): print i,": ",w.get_argument_name(i)," : ",w.get_argument(i)
def expose(self, name, new_name=None, position=-1): """Expose an attribute from a filter of the minipeline. Once called, the pipeline instance has a new Set/Get set of methods to access directly the corresponding method of one of the filter of the pipeline. Ex: p.expose( "Radius" ) p.SetRadius( 5 ) p.GetRadius( 5 ) By default, the attribute usable on the pipeline instance has the same name than the one of the filter, but it can be changed by providing a value to new_name. The last filter of the pipeline is used by default, but another one may be used by giving its position. Ex: p.expose("Radius", "SmoothingNeighborhood", 2) p.GetSmoothingNeighborhood() """ if new_name is None: new_name = name src = self.filters[position] ok = False set_name = "Set" + name if set_name in dir(src): setattr(self, "Set" + new_name, getattr(src, set_name)) ok = True get_name = "Get" + name if get_name in dir(src): setattr(self, "Get" + new_name, getattr(src, get_name)) ok = True if not ok: raise RuntimeError( "No attribute %s at position %s." % (name, position))
def obTex(ob, active, context): if 'texspace_location' in dir(ob.data) and 'texspace_location' in dir( active.data): ob.data.texspace_location[:] = active.data.texspace_location[:] if 'texspace_size' in dir(ob.data) and 'texspace_size' in dir(active.data): ob.data.texspace_size[:] = active.data.texspace_size[:] return('INFO', "texture space copied")
def __init__(self, fn='depthFirstSearch', prob='PositionSearchProblem', heuristic='nullHeuristic'): # Warning: some advanced Python magic is employed below to find the right functions and problems # Get the search function from the name and heuristic if fn not in dir(search): raise AttributeError, fn + ' is not a search function in search.py.' func = getattr(search, fn) if 'heuristic' not in func.func_code.co_varnames: print('[SearchAgent] using function ' + fn) self.searchFunction = func else: if heuristic in globals().keys(): heur = globals()[heuristic] elif heuristic in dir(search): heur = getattr(search, heuristic) else: raise AttributeError, heuristic + ' is not a function in searchAgents.py or search.py.' print('[SearchAgent] using function %s and heuristic %s' % (fn, heuristic)) # Note: this bit of Python trickery combines the search algorithm and the heuristic self.searchFunction = lambda x: func(x, heuristic=heur) # Get the search problem type from the name if prob not in globals().keys() or not prob.endswith('Problem'): raise AttributeError, prob + ' is not a search problem type in SearchAgents.py.' self.searchType = globals()[prob] print('[SearchAgent] using problem type ' + prob)
def test_sftp_client(psftp): '''test for access to the underlying, active sftpclient''' # with pysftp.Connection(**SFTP_PUBLIC) as sftp: # assert 'normalize' in dir(sftp.sftp_client) # assert 'readlink' in dir(sftp.sftp_client) assert 'normalize' in dir(psftp.sftp_client) assert 'readlink' in dir(psftp.sftp_client)
def sciopero_scraper(): """ This function is used to generate the requests for URL containing data. It's a generator which returns always a sciopero dict, with at least its id. """ min_id, max_id = find_id_range() #print max_id print 'scraping from %d to %d' % (min_id, max_id) for i in xrange(min_id, max_id): try: resp = requests.get(sciopero_prefix % str(i)) except Exception as e: print type(e) print e print dir(e) exit() print "Sleeping for %s seconds (requests)" % (SLEEP_TIME or 0) time.sleep(SLEEP_TIME or 0) if resp.status_code == 500 or 'HTTP Status 500' in resp.text: continue #print '%s%s' % (sciopero_prefix, str(i)) yield parse_sciopero(resp.content, i)
def show_depth(): global threshold global current_depth depth, timestamp = freenect.sync_get_depth() depth = 255 * np.logical_and(depth >= current_depth - threshold, depth <= current_depth + threshold) depth = depth.astype(np.uint8) image = cv.CreateImageHeader((depth.shape[1], depth.shape[0]), cv.IPL_DEPTH_8U, 1) cv.SetData(image, depth.tostring(), depth.dtype.itemsize * depth.shape[1]) cv.ShowImage("Depth", image) print "depth:", type(depth), dir(depth) print "" print "image:", type(image), dir(image) print "" x = cv.CreateMat(image.height, image.width, cv.IPL_DEPTH_8U) cv.Convert(image, x) smaller = cv.CreateMat(image.height / 10, image.width / 10, cv.IPL_DEPTH_8U) print "smaller:", type(smaller), dir(smaller) print "" cv.Resize(image, smaller) cv.ShowImage("Reduced", smaller)
def test_strategies_names_introspection(self): sd = StrategyDict() sd.strategy("first", "abc")(lambda val: "abc" + val) sd.strategy("second", "def")(lambda val: "def" + val) # Neglect 2nd name sd.strategy("third", "123")(lambda val: "123" + val) # Neglect 2nd name # Nothing new here: strategies do what they should... assert sd("x") == "abcx" assert sd.default("p") == "abcp" assert sd.first("w") == "abcw" == sd["first"]("w") assert sd.second("zsc") == "defzsc" == sd["second"]("zsc") assert sd.third("blah") == "123blah" == sd["third"]("blah") assert sd.abc("y") == "abcy" == sd["abc"]("y") assert sd["def"]("few") == "deffew" assert sd["123"]("lots") == "123lots" # Valid names for attributes all_names = {"first", "second", "third", "abc", "def", "123"} assert all(name in dir(sd) for name in all_names) assert all(name in vars(sd) for name in all_names) assert "default" in dir(sd) assert "default" in vars(sd) all_keys_tuples = sd.keys() all_keys = reduce(operator.concat, all_keys_tuples) assert set(all_keys) == all_names # Default not in keys assert set(all_keys_tuples) == {("first", "abc"), ("second", "def"), ("third", "123")} # First name is the __name__ assert sd["abc"].__name__ == "first" assert sd["def"].__name__ == "second" assert sd["123"].__name__ == "third"
def find_unit(quantity): """ Return a list of matching units names. if quantity is a string -- units containing the string `quantity` if quantity is a unit -- units having matching base units Examples ======== >>> from sympy.physics import units as u >>> u.find_unit('charge') ['charge'] >>> u.find_unit(u.charge) ['C', 'charge', 'coulomb', 'coulombs'] >>> u.find_unit('volt') ['volt', 'volts', 'voltage'] >>> u.find_unit(u.inch**3)[:5] ['l', 'cl', 'dl', 'ml', 'liter'] """ import sympy.physics.units as u rv = [] if isinstance(quantity, str): rv = [i for i in dir(u) if quantity in i] else: units = quantity.as_coeff_Mul()[1] for i in dir(u): try: if units == eval('u.' + i).as_coeff_Mul()[1]: rv.append(str(i)) except Exception: pass return sorted(rv, key=len)
def convert_dir( fill = None, dirname = '.', stroke = None, strokeWidth = None, dest = None): files = [ f for f in os.listdir(dirname) if f[-3:] == 'svg'] print "{0} SVG Files: ".format(len(files)) print '\n'.join(files) print "Converting fill to: {0}{1}{2}"\ .format(fill, ", stroke to {0}".format(stroke) if stroke else '', ", strokeWidth to {0}".format(strokeWidth) if strokeWidth else '') if dest == None: dest = ("{0}-fill{1}{2}"\ .format(fill, "-{0}-stroke".format(stroke) if stroke else '', "-{0}-width".format(strokeWidth) if strokeWidth else '') ) if not os.path.isdir(dest): os.mkdir(dest) for f in files: tree = svgparser.parse(os.path.join(dirname,f)) elts = svgu.list_elements(tree) for e in elts: if 'set_fill' in dir(e) and fill: e.set_fill(fill) if 'set_stroke' in dir(e) and stroke: e.set_stroke(stroke) if 'set_stroke_width' in dir(e) and strokeWidth: e.set_stroke_width("{0}px".format(strokeWidth)) out = os.path.join(dest, f) tree.save(out)
def profile(request, next=SNAP_PREFIX): ''' Allow user to edit his/her profile. Requires login. There are several newforms bugs that affect this. See http://code.google.com/p/snapboard/issues/detail?id=7 We'll use generic views to get around this for now. ''' if COOKIE_SNAP_PROFILE_KEY in request.session: # reset any cookie variables request.session[COOKIE_SNAP_PROFILE_KEY] = {} try: userdata = SnapboardProfile.objects.get(user=request.user) except: userdata = SnapboardProfile(user=request.user) userdata.save() print dir(RequestContext(request).dicts) from django.views.generic.create_update import update_object return update_object(request, model=SnapboardProfile, object_id=userdata.id, template_name='snapboard/profile.html', post_save_redirect=next )
def get_config(config_path): __import__('errbot.config-template') # - is on purpose, it should not be imported normally ;) template = sys.modules['errbot.config-template'] config_fullpath = config_path if not path.exists(config_fullpath): log.error( 'I cannot find the file %s \n' '(You can change this path with the -c parameter see --help)' % config_path ) log.info( 'You can use the template %s as a base and copy it to %s. \nYou can then customize it.' % ( path.dirname(template.__file__) + sep + 'config-template.py', config_path + sep) ) exit(-1) # noinspection PyBroadException try: config = __import__(path.splitext(path.basename(config_fullpath))[0]) diffs = [item for item in set(dir(template)) - set(dir(config)) if not item.startswith('_')] if diffs: log.error('You are missing configs defined from the template :') for diff in diffs: log.error('Missing config : %s' % diff) exit(-1) except Exception as _: log.exception('I could not import your config from %s, please check the error below...' % config_fullpath) exit(-1) log.info('Config check passed...') return config
def test_dir(): t = TableSymbol('t', '{name: string, amount: int, dt: datetime}') assert 'day' in dir(t.dt) assert 'mean' not in dir(t.dt) assert 'mean' in dir(t.amount) assert 'like' not in dir(t[['amount', 'dt']]) assert 'any' not in dir(t.name)
def test_usage(self): gen = (i for i in range(3)) lazy_list = lazyutils.LazyKit(lambda: list(gen)) self.assertEqual(next(gen), 0) self.assertEqual(lazy_list[1], 1) # pylint: disable=unsubscriptable-object self.assertEqual(lazy_list[2], 2) # pylint: disable=unsubscriptable-object self.assertSetEqual(set(dir([])) - set(dir(lazy_list)), set())
def main(): """ Create a client, parse the arguments received on the command line, and call the appropriate method on the client. """ cli = DeisClient() args = docopt(__doc__, version='Deis CLI {}'.format(__version__), options_first=True) cmd = args['<command>'] cmd, help_flag = parse_args(cmd) # print help if it was asked for if help_flag: if cmd != 'help': if cmd in dir(cli): print trim(getattr(cli, cmd).__doc__) return docopt(__doc__, argv=['--help']) # re-parse docopt with the relevant docstring if cmd in dir(cli): docstring = trim(getattr(cli, cmd).__doc__) if 'Usage: ' in docstring: args.update(docopt(docstring)) # find the method for dispatching if hasattr(cli, cmd): method = getattr(cli, cmd) else: raise DocoptExit('Found no matching command') # dispatch the CLI command try: method(args) except EnvironmentError: print 'Could not find git remote for deis' raise DocoptExit()
def dir_only_shows_real_keys(self): "dir() only shows real keys-as-attrs, not aliases" a = Lexicon({'key1': 'val1', 'key2': 'val2'}) a.alias('myalias', 'key1') assert 'key1' in dir(a) assert 'key2' in dir(a) assert 'myalias' not in dir(a)
def uniformCostSearch(problem): "Search the node of least total cost first. " "*** YOUR CODE HERE ***" closedSet = set() dataStructure = util.PriorityQueueWithFunction(lambda (path): problem.getCostOfActions(getListOfActions(path))) path = [] pathTuple = () if "startState" in dir(problem): nodeCoordStartState = problem.startState pathTuple = ((nodeCoordStartState, "", 0), ) elif "getStartState" in dir(problem): nodeCoordStartState = problem.getStartState() pathTuple = ((nodeCoordStartState, "", 0), ) else: raise Exception("No recognizable function for getting the Start State") dataStructure.push(pathTuple) result = findSolution(problem, pathTuple, dataStructure, closedSet) if result is None: raise Exception("No solution exists!") path = getListOfActions(result) #print "[Final Path] [%s] with length %d" % (str(result), len(result)) #print "Path: %s with length %d" % (str(path), len(path)) return path
def aStarSearch(problem, heuristic=nullHeuristic): closedSet = set() dataStructure = util.PriorityQueueWithFunction(getHeuristicFunction(problem, heuristic)) path = [] pathTuple = () if "startState" in dir(problem): nodeCoordStartState = problem.startState pathTuple = ((nodeCoordStartState, "", 0), ) elif "getStartState" in dir(problem): nodeCoordStartState = problem.getStartState() pathTuple = ((nodeCoordStartState, "", 0), ) else: raise Exception("No recognizable function for getting the Start State") dataStructure.push(pathTuple) result = findSolution(problem, pathTuple, dataStructure, closedSet) if result is None: raise Exception("No solution exists!") path = getListOfActions(result) #print "[Final Path] [%s] with length %d" % (str(result), len(result)) #print "Path: %s with length %d" % (str(path), len(path)) return path
def prop(interpeter, stack, scopes, stream): val2, val1 = stack.pop().VAL, stack.pop().VAL if isinstance(val2, dict): if not val2 in val1.keys(): interpeter.report_error( "OOP_ERROR", "propof", "Property not in object!") elif "__all__" in dir(val2): if not val2 in val1.__all__: interpeter.report_error( "OOP_ERROR", "propof", "Property not in object!") elif not val2 in dir(val1): interpeter.report_error( "OOP_ERROR", "propof", "Property not in object!") if isinstance(val1, types.ModuleType): value = val1.__dict__[val2] else: value = val1[val2] tok = interpeter.Token(TYPE='py-obj', VAL=value) stack.append(tok)
def recommendCompletionsFor(self, word): parts = word.split('.') if len(parts) > 1: # has a . so it must be a module or class or something # using eval, which shouldn't normally have side effects # unless there's descriptors/metaclasses doing some nasty # get magic objname = '.'.join(parts[:-1]) try: obj = eval(objname, self.locals) except: return None, 0 wordlower = parts[-1].lower() if wordlower == '': # they just punched in a dot, so list all attributes # that don't look private or special prefix = '.'.join(parts[-2:]) check = [ (prefix+_method) for _method in dir(obj) if _method[:1] != '_' and _method.lower().startswith(wordlower) ] else: # they started typing the method name check = filter(lambda s:s.lower().startswith(wordlower), dir(obj)) else: # no dots, must be in the normal namespaces.. no eval necessary check = sets.Set(dir(__builtins__)) check.update(keyword.kwlist) check.update(self.locals) wordlower = parts[-1].lower() check = filter(lambda s:s.lower().startswith(wordlower), check) check.sort() return check, 0
def testRecusiveNesting(self): l = [] d = {1:l} i = a_classic_class() i.attr = d l.append(i) buf = self.archiverClass.archivedDataWithRootObject_(l) self.assertIsInstance(buf, NSData) v = self.unarchiverClass.unarchiveObjectWithData_(buf) self.assertEqual(len(v), 1) self.assertEqual(dir(v[0]), dir(i)) self.assertEqual(list(v[0].attr.keys()), [1]) if self.isKeyed: self.assertIs(v[0].attr[1], v) else: # See 'TestArchiveNative' self.assertIsNot(v[0].attr[1], v) buf = self.archiverClass.archivedDataWithRootObject_(d) self.assertIsInstance(buf, NSData) v = self.unarchiverClass.unarchiveObjectWithData_(buf) if self.isKeyed: self.assertIs(v[1][0].attr, v) else: # See 'TestArchiveNative' self.assertIsNot(v[1][0].attr, v)
def test_methods(self): ## class X(Structure): ## _fields_ = [] self.assertIn("in_dll", dir(type(Structure))) self.assertIn("from_address", dir(type(Structure))) self.assertIn("in_dll", dir(type(Structure)))
def __init__(self): self.db = Database() # classe database self.i2c = 0 self.getBusValue() # Setta il corretto device self.A = {} self.P = {} self.mBoard = [] # matrive board self.mBoard_io = [] # matrice IO self.mProg = [] # matrice programma self.area_id = () self.dir_root = os.path.dirname(os.path.abspath(__file__)) self.initialize() """ Example pigpio port """ self.pi = pigpio.pi("localhost", 8888) # Instance host, port print dir(self.pi) print self.pi.write(16, False) print self.pi.read(16) # Read status if IO 16 self.pi.write(16, True) # Write 1 to IO 16 print self.pi.read(16) for n in range(32): # print mode of GPIO: 0 = INPUT 1 = OUTPUT, 2 = ALT5, 3 = ALT4, 4 = ALT0, 5 = ALT1, 6 = ALT2, 7 = ALT3 print "GPIO n:%s, mode: %s" %(n, self.pi.get_mode(n))
def toXML(self,name=None): """ Method that creates the XML document for the instance of python class. Return a string with the xml document. """ nameroot = None if name == None: nameroot = self.__class__.__name__ else: nameroot = name xml = '<%s>'%nameroot default_attr = dir(type('default',(object,),{})) for key in dir(self): if default_attr.count(key) > 0: continue element = findElementFromDict(self.__dict__,key) if element == None: continue if isinstance(element,list): for e in element: if isinstance(e,ComplexType): xml += e.toXML(name=key) else: xml += '<%s>%s</%s>'%(key,e,key) elif isinstance(element,Property): xml += '<%s>%s</%s>'%(key,element.value,key) elif isinstance(element,ComplexType): xml += element.toXML(name=key) else: xml += '<%s>%s</%s>'%(key,convert(type(element).__name__,element),key) xml += '</%s>'%nameroot return str(xml)
def test_methods(self): weaksetmethods = dir(WeakSet) for method in dir(set): if method == 'test_c_api' or method.startswith('_'): continue self.assertIn(method, weaksetmethods, "WeakSet missing method " + method)
def update(self, new_mac, packet, packet_in): srcaddr = EthAddr(packet.src) dstaddr = EthAddr(packet.dst) log.debug("---------------------------------") log.debug(srcaddr) log.debug(dstaddr) log.debug("MAC Address Store") log.debug(self.macStore) log.debug("Done") log.debug("---------------------------------") fm = of.ofp_flow_mod(command=of.OFPFC_MODIFY) fm.match = of.ofp_match.from_packet(packet) #fm.match.in_port = packet_in.in_port #fm.match.dl_dst = new_mac fm.actions.append(of.ofp_action_output(port = self.macStore[new_mac])) print "dicic" print dir(fm) #fm.idle_timeout = 10 #fm.hard_timeout = 30 log.debug("UPDATING FLOWTABLE ENTRY") self.connection.send(fm) # We wont loose this package, so we forward it # to its destination self.send_packet(packet_in.buffer_id, packet_in.data, self.macStore[new_mac], packet_in.in_port) self.send_packet(packet_in.buffer_id, packet_in.data, self.macStore[new_mac], packet_in.in_port) log.debug("-------------Package <"+str(packet_in.buffer_id)+"> forwarded by controller over Port "+str(self.macStore[new_mac]))
def breadthFirstSearch(problem): """ Search the shallowest nodes in the search tree first. """ "*** YOUR CODE HERE ***" closedSet = set() dataStructure = util.Queue() path = [] pathTuple = () if "startState" in dir(problem): nodeCoordStartState = problem.startState pathTuple = ((nodeCoordStartState, "", 0), ) elif "getStartState" in dir(problem): nodeCoordStartState = problem.getStartState() pathTuple = ((nodeCoordStartState, "", 0), ) else: raise Exception("No recognizable function for getting the Start State") dataStructure.push(pathTuple) result = findSolution(problem, pathTuple, dataStructure, closedSet) if result is None: raise Exception("No solution exists!") path = getListOfActions(result) #print "[Final Path] [%s] with length %d" % (str(result), len(result)) #print "Path: %s with length %d" % (str(path), len(path)) return path
import datetime print(dir(datetime.datetime)) nafisa = datetime.datetime.now() print(nafisa.day)
def mutate(self): """ Perform mutations on each genotype. """ # some simulations do not require mutation, skip this step if so. if self.mutation_rate == 0: return abundances = [freq * self.population_size for freq in self.population.vs['frequency']] num_mutants = nbinom(n=abundances, p=1-np.exp(-self.mutation_rate)) self.population.vs['frequency'] = self.population.vs['frequency'] - (num_mutants / self.population_size) k = 1 while(any(num_mutants > 0)): new_num_mutants = nbinom(n=num_mutants, p=gamma(k+1, self.mutation_rate)/gamma(k, self.mutation_rate), size=len(num_mutants)) # we want a list of how many mutants will have k mutations for each genotype num_k_mutants = nsubtract(num_mutants, new_num_mutants) # skip over any count of mutants that is 0 for name in nnonzero(num_k_mutants): if not len(name): continue name = name[0] num_mutant = num_k_mutants[name] # if we have a mutational neighborhood function, it will be MUCH faster if k == 1 and '__mutational_neighborhood__' in dir(self.population.vs[0]['genotype']): # get a list of genotypes that are 1 mutation away from the current genotype mutational_neighborhood = self.population.vs[name]['genotype'].__mutational_neighborhood__() # randomly distribute num_mutant amount a list of size=mutational_neighborhood distribution = self.neighborhood_distribution(num_mutant, len(mutational_neighborhood)) for dist_index in nnonzero(distribution): dist_index = dist_index[0] mutant_genotype = mutational_neighborhood[dist_index] mutant_count = distribution[dist_index] if mutant_genotype in self.graph_node_to_genotype: self.graph_node_to_genotype[mutant_genotype]['frequency'] += mutant_count / self.population_size else: current_counter = str(next(self.counter)) self.population.add_vertex(name=current_counter, parent=self.population.vs[name]['name'], frequency=mutant_count/self.population_size, max_frequency=mutant_count/self.population_size, genotype=mutant_genotype, num_mutations=k, dirty_flag=1) self.graph_node_to_genotype[mutant_genotype] = self.population.vs.find(name=current_counter) # if no mutational neighborhood function or k > 1, just call num_mutant mutations else: for _ in range(num_mutant): mutant_genotype = self.genotype_deepcopy(self.population.vs[name]['genotype']) # do k mutations on the genotype to get the new mutant for _ in range(k): mutant_genotype.__mutate__() # if this is not a new genotype, update the frequency if mutant_genotype in self.graph_node_to_genotype: self.graph_node_to_genotype[mutant_genotype]['frequency'] += 1 / self.population_size # if it is new, add it to the map and the population else: current_counter = str(next(self.counter)) self.population.add_vertex(name=current_counter, parent=self.population.vs[name]['name'], frequency=1/self.population_size, max_frequency=1/self.population_size, genotype=mutant_genotype, num_mutations=k, dirty_flag=1) self.graph_node_to_genotype[mutant_genotype] = self.population.vs.find(name=current_counter) k += 1 num_mutants = new_num_mutants
# You may not use this work except in compliance with the Licence. # You may obtain a copy of the Licence at: http://ec.europa.eu/idabc/eupl """ Python equivalents of various excel functions. """ import functools import collections import math import numpy as np import schedula as sh from ..errors import FunctionError, FoundError from ..tokens.operand import XlError, Error ufuncs = { item: getattr(np, item) for item in dir(np) if isinstance(getattr(np, item), np.ufunc) } def _replace_empty(x, empty=0): if isinstance(x, np.ndarray): y = x.ravel().tolist() if sh.EMPTY in y: y = [empty if v is sh.EMPTY else v for v in y] return np.asarray(y, object).reshape(*x.shape) return x def xpower(number, power): if number == 0: if power == 0:
def get_Lstar2(pos, date, alpha = 90., params = None, coord_system='GSM', Bfield = 'Lgm_B_OP77', internal_model = 'Lgm_B_IGRF', LstarThresh = 10.0, # beyond this Lsimple don't compute Lstar;; not used in get_Lstar2 extended_out = False, LstarQuality = 3, FootpointHeight=100., Colorize=False, cverbosity=0, QinDenton=False): ## void Lgm_ComputeLstarVersusPA( long int Date, double UTC, Lgm_Vector *u, int nAlpha, double *Alpha, int Quality, int Colorize, Lgm_MagEphemInfo *MagEphemInfo ) { # setup a datamodel object to hold the answer ans = Lstar_Data() # change datetime to Lgm Datelong and UTC try: datelong = Lgm_CTrans.dateToDateLong(date) utc = Lgm_CTrans.dateToFPHours(date) except AttributeError: raise(TypeError("Date must be a datetime object")) else: ans['Epoch'] = datamodel.dmarray([date]) # pitch angles to calculate if isinstance(alpha, numbers.Real): Alpha = numpy.asanyarray([alpha], dtype=float) else: Alpha = numpy.asanyarray(alpha, dtype=float) # required setup MagEphemInfo = Lgm_MagEphemInfo.Lgm_MagEphemInfo(len(Alpha), cverbosity) # setup a shortcut to MagModelInfo mmi = MagEphemInfo.LstarInfo.contents.mInfo.contents Lgm_Set_Coord_Transforms( datelong, utc, mmi.c) # dont think mmi.c needs a pointer() # setup a shortcut to LstarInfo MagEphemInfo.LstarInfo.contents.VerbosityLevel = cverbosity MagEphemInfo.LstarQuality = LstarQuality MagEphemInfo.LstarInfo.contents.SaveShellLines = False MagEphemInfo.LstarInfo.contents.FindShellPmin = extended_out MagEphemInfo.LstarInfo.contents.LSimpleMax = 10.0; mmi.VerbosityLevel = 0; mmi.Lgm_LossConeHeight = FootpointHeight; #MagEphemInfo->LstarInfo->mInfo->Bfield = Lgm_B_T89; # mmi.Bfield = Lgm_Wrap.__getattribute__(Bfield) Lgm_Wrap.__getattribute__('Lgm_Set_'+Bfield)(MagEphemInfo.LstarInfo.contents.mInfo) Lgm_Wrap.__getattribute__('Lgm_Set_'+internal_model+'_InternalModel')(MagEphemInfo.LstarInfo.contents.mInfo) MagEphemInfo.nAlpha = len(Alpha) #if len(Alpha) > 1 and Bfield == 'Lgm_B_TS04': # raise(NotImplementedError('TS04 is not thread safe!! Can only do 1 PA at a time')) for i in range(len(Alpha)): MagEphemInfo.Alpha[i] = Alpha[i] # convert to **GSM** if coord_system == 'GSM': try: Pgsm = Lgm_Vector.Lgm_Vector(*pos) except TypeError: raise(TypeError("Position must be listlike" ) ) ans['position']['GSM'] = datamodel.dmarray(pos, attrs={'units':'Re'}) elif coord_system == 'SM': try: Psm = Lgm_Vector.Lgm_Vector(*pos) except TypeError: raise(TypeError("Position must be listlike" ) ) Pgsm = Lgm_Vector.Lgm_Vector() Lgm_Convert_Coords( pointer(Psm), pointer(Pgsm), SM_TO_GSM, mmi.c ) ans['position']['SM'] = datamodel.dmarray(pos, attrs={'units':'Re'}) ans['position']['GSM'] = datamodel.dmarray(Pgsm.tolist(), attrs={'units':'Re'}) else: raise(NotImplementedError("Only GSM or SM input currently supported")) ## void Lgm_ComputeLstarVersusPA( long int Date, double UTC, Lgm_Vector *u, int nAlpha, ## double *Alpha, int Quality, int Colorize, Lgm_MagEphemInfo *MagEphemInfo ) { if QinDenton:# and Bfield == 'Lgm_B_TS04': # these are the params we will use. # Grab the QinDenton data # Lgm_get_QinDenton_at_JD( JD, &p, 1 ); # JD = Lgm_Date_to_JD( Date, UTC, mInfo->c ); JD = Lgm_Wrap.Lgm_Date_to_JD(datelong, utc, pointer(mmi.c)) qd_one = Lgm_Wrap.Lgm_QinDentonOne() Lgm_Wrap.Lgm_get_QinDenton_at_JD( JD, pointer(qd_one), cverbosity) Lgm_Wrap.Lgm_set_QinDenton(pointer(qd_one), pointer(mmi.c)) ans['params'] = dm.SpaceData() for att in dir(qd_one): if att[0] != '_': ans['params'][att] = getattr(qd_one, att) else: # save params ans['params'] = params if params == None: params = {} # step through the params dict and populate MagEphemInfo for key in params: if key == 'W': double6 = c_double*6 W = double6(*params[key]) MagEphemInfo.LstarInfo.contents.mInfo.contents.__setattr__(key, W) else: MagEphemInfo.LstarInfo.contents.mInfo.contents.__setattr__(key, params[key]) Lgm_ComputeLstarVersusPA( ctypes.c_long(datelong), ctypes.c_double(utc), ctypes.pointer(Pgsm), ctypes.c_int(len(Alpha)), np.require(Alpha, requirements=['C']).ctypes.data_as(ctypes.POINTER(ctypes.c_double)), ctypes.c_int(LstarQuality), ctypes.c_int(Colorize), ctypes.pointer(MagEphemInfo) ) for ii, pa in enumerate(Alpha): if int(pa) == pa: pa = int(pa) ans[pa] = dm.SpaceData() ans[pa]['LHilton'] = MagEphemInfo.LHilton[ii] ans[pa]['LMcIlwain'] = MagEphemInfo.LMcIlwain[ii] ans[pa]['Lstar'] = MagEphemInfo.Lstar[ii] # think in here, there are not owned by pyhton so there is no easy way to free the memory... if extended_out: ans[pa]['Bmin'] = dm.dmarray(np.zeros([MagEphemInfo.nShellPoints[ii], 3])) ans[pa]['I'] = dm.dmarray(np.zeros([MagEphemInfo.nShellPoints[ii]])) ans[pa]['Pmin'] = dm.dmarray(np.zeros([MagEphemInfo.nShellPoints[ii], 3])) ans[pa]['Bmin'][:, 0] = [val.x for val in MagEphemInfo.Shell_Bmin[ii][0:MagEphemInfo.nShellPoints[ii]]] ans[pa]['Bmin'][:, 1] = [val.y for val in MagEphemInfo.Shell_Bmin[ii][0:MagEphemInfo.nShellPoints[ii]]] ans[pa]['Bmin'][:, 2] = [val.z for val in MagEphemInfo.Shell_Bmin[ii][0:MagEphemInfo.nShellPoints[ii]]] ans[pa]['I'][:] = [val for val in MagEphemInfo.ShellI[ii][0:MagEphemInfo.nShellPoints[ii]]] ans[pa]['Pmin'][:, 0] = [val.x for val in MagEphemInfo.Shell_Pmin[ii][0:MagEphemInfo.nShellPoints[ii]]] ans[pa]['Pmin'][:, 1] = [val.y for val in MagEphemInfo.Shell_Pmin[ii][0:MagEphemInfo.nShellPoints[ii]]] ans[pa]['Pmin'][:, 2] = [val.z for val in MagEphemInfo.Shell_Pmin[ii][0:MagEphemInfo.nShellPoints[ii]]] return ans
def _get_methods(class_input): """ A rather hacky method to get the methods of a class. """ return [f for f in dir(class_input) if callable(getattr(class_input, f)) and not f.startswith('__')]
im = mapnik.Image(m.width, m.height) mapnik.render(m, im) eq_(get_unique_colors(im), ['rgba(255,255,85,191)']) def test_background_image_with_alpha_and_background_color_against_composited_control( ): m = mapnik.Map(10, 10) m.background = mapnik.Color('rgba(255,255,255,.5)') m.background_image = '../data/images/yellow_half_trans.png' im = mapnik.Image(m.width, m.height) mapnik.render(m, im) # create and composite the expected result im1 = mapnik.Image(10, 10) im1.background = mapnik.Color('rgba(255,255,255,.5)') im1.premultiply() im2 = mapnik.Image(10, 10) im2.background = mapnik.Color('rgba(255,255,0,.5)') im2.premultiply() im1.composite(im2) im1.demultiply() # compare image rendered (compositing in `agg_renderer<T>::setup`) # vs image composited via python bindings #raise Todo("looks like we need to investigate PNG color rounding when saving") #eq_(get_unique_colors(im),get_unique_colors(im1)) if __name__ == "__main__": setup() exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
_NoTextCoercion, RoleImpl, roles.CompoundElementRole ): def _raise_for_expected(self, element, argname=None, resolved=None, **kw): if isinstance(element, roles.FromClauseRole): if element._is_subquery: advice = ( "Use the plain select() object without " "calling .subquery() or .alias()." ) else: advice = ( "To SELECT from any FROM clause, use the .select() method." ) else: advice = None return super(CompoundElementImpl, self)._raise_for_expected( element, argname=argname, resolved=resolved, advice=advice, **kw ) _impl_lookup = {} for name in dir(roles): cls = getattr(roles, name) if name.endswith("Role"): name = name.replace("Role", "Impl") if name in globals(): impl = globals()[name](cls) _impl_lookup[cls] = impl
def print_obj_methods(obj): print('----------debug %s---------' % obj) print(dir(obj))
def dump(obj): for attr in dir(obj): if hasattr(obj, attr): print("obj.%s = %s" % (attr, getattr(obj, attr)))
'KIND': Fore.CYAN, 'CONT': Style.BRIGHT + Fore.BLACK, 'VARS': Style.BRIGHT + Fore.MAGENTA, 'VARS-NAME': Style.NORMAL + Fore.MAGENTA, 'INTERNAL-FAILURE': Style.BRIGHT + Back.RED + Fore.RED, 'INTERNAL-DETAIL': Fore.WHITE, 'SOURCE-FAILURE': Style.BRIGHT + Back.YELLOW + Fore.YELLOW, 'SOURCE-DETAIL': Fore.WHITE, 'RESET': Style.RESET_ALL, } for name, group in [ ('', Style), ('fore', Fore), ('back', Back), ]: for key in dir(group): OTHER_COLORS['{}({})'.format(name, key) if name else key] = getattr( group, key) CALL_COLORS = { 'call': Style.BRIGHT + Fore.BLUE, 'line': Fore.RESET, 'return': Style.BRIGHT + Fore.GREEN, 'exception': Style.BRIGHT + Fore.RED, } CODE_COLORS = { 'call': Fore.RESET + Style.BRIGHT, 'line': Fore.RESET, 'return': Fore.YELLOW, 'exception': Fore.RED, } MISSING = type('MISSING', (), {'__repr__': lambda _: '?'})()
def test_main(): mod = sys.modules[__name__] test_support.run_unittest( *[getattr(mod, name) for name in dir(mod) if name.startswith('Test')] )
def dump_obj(obj): for attr in dir(obj): logging.debug(" obj.%s = %s" % (attr, getattr(obj, attr))) logservice.flush()
def question3d(): answerDiscount = 0.95 answerNoise = 0.3 answerLivingReward = 0 return answerDiscount, answerNoise, answerLivingReward # If not possible, return 'NOT POSSIBLE' def question3e(): answerDiscount = 1 answerNoise = 0 answerLivingReward = 10 return answerDiscount, answerNoise, answerLivingReward # If not possible, return 'NOT POSSIBLE' def question8(): answerEpsilon = None answerLearningRate = None return 'NOT POSSIBLE' # If not possible, return 'NOT POSSIBLE' if __name__ == '__main__': print 'Answers to analysis questions:' import analysis for q in [q for q in dir(analysis) if q.startswith('question')]: response = getattr(analysis, q)() print ' Question %s:\t%s' % (q, str(response))
print("""iptest is used to run IronPython tests. Notes: - ??? Typical usage would be: ipy harness.py interop.net --plan """) sys.exit(0) #--no_testing if "--no_testing" in sys.argv: options.RUN_TESTS = False sys.argv.remove("--no_testing") #--plan if "--plan" in sys.argv: options.GEN_TEST_PLAN = True sys.argv.remove("--plan") #Do a little post processing if options.GEN_TEST_PLAN: import pydoc #Dump the flags l.debug("sys.argv after processing: %s" % str(sys.argv)) l.debug("Command-line options:") for x in [temp for temp in dir(options) if not temp.startswith("__")]: y = eval("options." + x) l.debug("\t%s = %s" % (x, y)) l.debug("")
def has_form_class(model_name): from idcops import forms name = model_name.capitalize() has_add_form = "{}NewForm".format(name) in dir(forms) has_edit_form = "{}Form".format(name) in dir(forms) return has_add_form or has_edit_form
# >>> isinstance(d, Dog) and isinstance(d, Animal) # True # 但是,d不是Husky类型: # >>> isinstance(d, Husky) # False # 能用type()判断的基本类型也可以用isinstance()判断: print(isinstance('a', str)) print(isinstance(123, int)) print(isinstance(b'a', bytes)) # 并且还可以判断一个变量是否是某些类型中的一种,比如下面的代码就可以判断是否是list或者tuple: print(isinstance([1, 2, 3], (list, tuple))) print(isinstance((1, 2, 3), (list, tuple))) print('===========================================================') print('3--------------------使用dir()') # 如果要获得一个对象的所有属性和方法,可以使用dir()函数,它返回一个包含字符串的list,比如,获得一个str对象的所有属性和方法: print(dir('ABC')) # 类似__xxx__的属性和方法在Python中都是有特殊用途的,比如__len__方法返回长度。 # 在Python中,如果你调用len()函数试图获取一个对象的长度,实际上,在len()函数内部,它自动去调用该对象的__len__()方法,所以,下面的代码是等价的: print(len('ABC')) print('ABC'.__len__()) # 我们自己写的类,如果也想用len(myObj)的话,就自己写一个__len__()方法: class MyDog(object): def __len__(self): return 100 myDog = MyDog() print(myDog.__len__()) # 剩下的都是普通属性或方法,比如lower()返回小写的字符串:
green = make_colorizer('darkgreen') yellow = make_colorizer('darkyellow') blue = make_colorizer('darkblue') logger = logging.getLogger(__name__) class StopRequested(Exception): pass def compact(l): return [x for x in l if x is not None] _signames = dict((getattr(signal, signame), signame) for signame in dir(signal) if signame.startswith('SIG') and '_' not in signame) def signal_name(signum): try: if sys.version_info[:2] >= (3, 5): return signal.Signals(signum).name else: return _signames[signum] except KeyError: return 'SIG_UNKNOWN' except ValueError: return 'SIG_UNKNOWN'
def get_forms_names(): forms = import_string('idcops.forms') return dir(forms)
tokenize(readline, tokeneater=printtoken) are the same, except instead of generating tokens, tokeneater is a callback function to which the 5 fields described above are passed as 5 arguments, each time a new token is found.""" __author__ = 'Ka-Ping Yee <*****@*****.**>' __credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, ' 'Skip Montanaro, Raymond Hettinger') from itertools import chain import string, re from token import * import token x = None __all__ = [x for x in dir(token) if not x.startswith("_")] __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"] del x del token COMMENT = N_TOKENS tok_name[COMMENT] = 'COMMENT' NL = N_TOKENS + 1 tok_name[NL] = 'NL' N_TOKENS += 2 def group(*choices): return '(' + '|'.join(choices) + ')'
def construct_model(self, input_tensors=None, prefix='metatrain_', test_num_updates=0): """a: training data for inner gradient, b: test data for meta gradient.""" self.inputa = input_tensors['inputa'] self.inputb = input_tensors['inputb'] self.labela = input_tensors['labela'] self.labelb = input_tensors['labelb'] with tf.variable_scope('model', reuse=None) as training_scope: if 'weights' in dir(self): training_scope.reuse_variables() weights = self.weights else: # Define the weights self.weights = weights = self.construct_weights() # outputbs[i] and lossesb[i] is the output and loss after i+1 gradient # updates num_updates = max(test_num_updates, FLAGS.num_updates) def task_metalearn(inp, reuse=True): """Run meta learning.""" TRAIN = 'train' in prefix # pylint: disable=invalid-name # Perform gradient descent for one task in the meta-batch. inputa, inputb, labela, labelb = inp task_outputbs, task_lossesb = [], [] task_msesb = [] # support_pred and loss, (n_data_per_task, out_dim) task_outputa = self.forward( inputa, weights, reuse=reuse) # only not reuse on the first iter # labela is (n_data_per_task, out_dim) task_lossa = self.loss_func(task_outputa, labela) # INNER LOOP (no change with ib) grads = tf.gradients(task_lossa, list(weights.values())) if FLAGS.stop_grad: grads = [tf.stop_gradient(grad) for grad in grads] gradients = dict(zip(weights.keys(), grads)) # theta_pi = theta - alpha * grads fast_weights = dict( zip(weights.keys(), [ weights[key] - self.update_lr * gradients[key] for key in weights.keys() ])) # use theta_pi to forward meta-test output = self.forward(inputb, weights, reuse=True) task_outputbs.append(output) # meta-test loss task_kl_loss = sum(self.encoder_w.losses) task_msesb.append(self.loss_func(output, labelb)) task_lossesb.append( self.loss_func(output, labelb) + self.beta * task_kl_loss) def while_body(fast_weights_values): """Update params.""" loss = self.loss_func( self.forward( inputa, dict(zip(fast_weights.keys(), fast_weights_values)), reuse=True), labela) grads = tf.gradients(loss, fast_weights_values) fast_weights_values = [ v - self.update_lr * g for v, g in zip(fast_weights_values, grads) ] return fast_weights_values fast_weights_values = tf.while_loop( lambda _: True, while_body, loop_vars=[fast_weights.values()], maximum_iterations=num_updates - 1, back_prop=TRAIN) fast_weights = dict(zip(fast_weights.keys(), fast_weights_values)) output = self.forward(inputb, fast_weights, reuse=True) task_outputbs.append(output) task_msesb.append(self.loss_func(output, labelb)) task_lossesb.append( self.loss_func(output, labelb) + self.beta * task_kl_loss) task_output = [ task_outputa, task_outputbs, task_lossa, task_lossesb, task_msesb ] return task_output if FLAGS.norm is not None: # to initialize the batch norm vars, might want to combine this, and # not run idx 0 twice. _ = task_metalearn( (self.inputa[0], self.inputb[0], self.labela[0], self.labelb[0]), False) out_dtype = [ tf.float32, [tf.float32] * 2, tf.float32, [tf.float32] * 2, [tf.float32] * 2 ] result = tf.map_fn(task_metalearn, elems=(self.inputa, self.inputb, \ self.labela, self.labelb), dtype=out_dtype, \ parallel_iterations=FLAGS.meta_batch_size) outputas, outputbs, lossesa, lossesb, msesb = result ## Performance & Optimization if 'train' in prefix: # lossesa is length(meta_batch_size) self.total_loss1 = tf.reduce_sum(lossesa) / tf.to_float( FLAGS.meta_batch_size) self.total_losses2 = total_losses2 = [ tf.reduce_sum(msesb[j]) / tf.to_float(FLAGS.meta_batch_size) for j in range(len(msesb)) ] self.total_losses3 = total_losses3 = [ tf.reduce_sum(lossesb[j]) / tf.to_float(FLAGS.meta_batch_size) for j in range(len(lossesb)) ] # after the map_fn self.outputas, self.outputbs = outputas, outputbs # OUTER LOOP if FLAGS.metatrain_iterations > 0: optimizer = tf.train.AdamOptimizer(self.meta_lr) THETA = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='model') # pylint: disable=invalid-name PHI = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='encoder') # pylint: disable=invalid-name self.gvs_theta = gvs_theta = optimizer.compute_gradients( self.total_losses2[-1], THETA) metatrain_theta_op = optimizer.apply_gradients(gvs_theta) self.gvs_phi = gvs_phi = optimizer.compute_gradients( self.total_losses3[-1], PHI) metatrain_phi_op = optimizer.apply_gradients(gvs_phi) with tf.control_dependencies([metatrain_theta_op, metatrain_phi_op]): self.metatrain_op = tf.no_op() scale_v = [ v for v in self.encoder_w.trainable_variables if 'scale' in v.name ] scale_norm = [tf.reduce_mean(v) for v in scale_v] scale_norm = tf.reduce_mean(scale_norm) tf.summary.scalar(prefix + 'full_loss', total_losses3[-1]) tf.summary.scalar(prefix + 'regularizer', total_losses3[-1] - total_losses2[-1]) tf.summary.scalar(prefix + 'untransformed_scale', scale_norm) else: self.metaval_total_loss1 = tf.reduce_sum( lossesa) / tf.to_float(FLAGS.meta_batch_size) self.metaval_total_losses2 = total_losses2 = [ tf.reduce_sum(msesb[j]) / tf.to_float(FLAGS.meta_batch_size) for j in range(len(msesb)) ] self.metaval_total_losses3 = total_losses3 = [ tf.reduce_sum(lossesb[j]) / tf.to_float(FLAGS.meta_batch_size) for j in range(len(lossesb)) ] tf.summary.scalar(prefix + 'Pre-mse', total_losses2[0]) tf.summary.scalar(prefix + 'Post-mse_' + str(num_updates), total_losses2[-1])
from math import sqrt print(sqrt(13689)) import math everything = dir(math) print(everything)
def test_property_generator(): sampler = Sampler(0) properties = [x for x in dir(sampler) if x != "context" and not x.startswith("_") and type(getattr(Sampler, x)) == property] for p in properties: yield check_property, sampler, p
#data = eval('(' + obj.get() + ')') data = '(' + obj.get() + ')' # Show error message if no values found #if eval(data).count() == 0: # messagebox.showinfo('Error','No data found') # return data return data # Left frame frameLeft = ttk.Frame(root) frameLeft.pack(side=tk.LEFT, anchor=tk.NW, fill=tk.Y, padx=4, pady=2) # Combobox to select a data frame comboboxDataframes = ttk.Combobox(frameLeft) variables= [var for var in dir() if isinstance(eval(var), pd.core.frame.DataFrame)] comboboxDataframes['values'] = variables comboboxDataframes.bind('<Return>',ListColumns) comboboxDataframes.bind('<<ComboboxSelected>>',ListColumns) comboboxDataframes.pack(pady=4) # Properties panel frameProperties = ttk.Frame(frameLeft, relief='ridge', borderwidth=4) frameProperties.pack(side=tk.BOTTOM, anchor=tk.SW, fill=tk.Y, pady=4) # Properties panel Histogram frameHistogram = ttk.Frame(frameProperties) frameHistogram.pack(side=tk.BOTTOM, anchor=tk.SW, fill=tk.Y, pady=4) labelHistogram = ttk.Label(frameHistogram, text='number of bins:') labelHistogram.pack(side=tk.TOP, anchor=tk.NW, padx=4, pady=2)
def load_methods(self, module, user_modules): """ Read the given user-written py3status class file and store its methods. Those methods will be executed, so we will deliberately ignore: - private methods starting with _ - decorated methods such as @property or @staticmethod - 'on_click' methods as they'll be called upon a click_event - 'kill' methods as they'll be called upon this thread's exit """ if not self.module_class: # user provided modules take precedence over py3status provided modules if self.module_name in user_modules: include_path, f_name = user_modules[self.module_name] self._py3_wrapper.log('loading module "{}" from {}{}'.format( module, include_path, f_name)) self.module_class = self.load_from_file(include_path + f_name) # load from py3status provided modules else: self._py3_wrapper.log( 'loading module "{}" from py3status.modules.{}'.format( module, self.module_name)) self.module_class = self.load_from_namespace(self.module_name) class_inst = self.module_class if class_inst: try: # containers have items attribute set to a list of contained # module instance names. If there are no contained items then # ensure that we have a empty list. if class_inst.Meta.container: class_inst.items = [] except AttributeError: pass # module configuration mod_config = self.config['py3_config'].get(module, {}) # process any deprecated configuration settings try: deprecated = class_inst.Meta.deprecated except AttributeError: deprecated = None if deprecated: def deprecation_log(item): # log the deprecation # currently this is just done to the log file as the user # does not need to take any action. if 'msg' in item: msg = item['msg'] param = item.get('param') if param: msg = '`{}` {}'.format(param, msg) msg = 'DEPRECATION WARNING: {} {}'.format( self.module_full_name, msg) self._py3_wrapper.log(msg) if 'rename' in deprecated: # renamed parameters for item in deprecated['rename']: param = item['param'] new_name = item['new'] if param in mod_config: if new_name not in mod_config: mod_config[new_name] = mod_config[param] # remove from config del mod_config[param] deprecation_log(item) if 'format_fix_unnamed_param' in deprecated: # format update where {} was previously allowed for item in deprecated['format_fix_unnamed_param']: param = item['param'] placeholder = item['placeholder'] if '{}' in mod_config.get(param, ''): mod_config[param] = mod_config[param].replace( '{}', '{%s}' % placeholder) deprecation_log(item) if 'rename_placeholder' in deprecated: # rename placeholders placeholders = {} for item in deprecated['rename_placeholder']: placeholders[item['placeholder']] = item['new'] format_strings = item['format_strings'] for format_param in format_strings: format_string = mod_config.get(format_param) if not format_string: continue format = Formatter().update_placeholders( format_string, placeholders) mod_config[format_param] = format if 'update_placeholder_format' in deprecated: # update formats for placeholders if a format is not set for item in deprecated['update_placeholder_format']: placeholder_formats = item.get('placeholder_formats', {}) if 'function' in item: placeholder_formats.update( item['function'](mod_config)) format_strings = item['format_strings'] for format_param in format_strings: format_string = mod_config.get(format_param) if not format_string: continue format = Formatter().update_placeholder_formats( format_string, placeholder_formats) mod_config[format_param] = format if 'substitute_by_value' in deprecated: # one parameter sets the value of another for item in deprecated['substitute_by_value']: param = item['param'] value = item['value'] substitute = item['substitute'] substitute_param = substitute['param'] substitute_value = substitute['value'] if (mod_config.get(param) == value and substitute_param not in mod_config): mod_config[substitute_param] = substitute_value deprecation_log(item) if 'function' in deprecated: # parameter set by function for item in deprecated['function']: updates = item['function'](mod_config) for name, value in updates.items(): if name not in mod_config: mod_config[name] = value if 'remove' in deprecated: # obsolete parameters forcibly removed for item in deprecated['remove']: param = item['param'] if param in mod_config: del mod_config[param] deprecation_log(item) # apply module configuration for config, value in mod_config.items(): # names starting with '.' are private if not config.startswith('.'): setattr(self.module_class, config, value) # process any update_config settings try: update_config = class_inst.Meta.update_config except AttributeError: update_config = None if update_config: if 'update_placeholder_format' in update_config: # update formats for placeholders if a format is not set for item in update_config['update_placeholder_format']: placeholder_formats = item.get('placeholder_formats', {}) format_strings = item['format_strings'] for format_param in format_strings: format_string = getattr(class_inst, format_param, None) if not format_string: continue format = Formatter().update_placeholder_formats( format_string, placeholder_formats) setattr(class_inst, format_param, format) # Add the py3 module helper if modules self.py3 is not defined if not hasattr(self.module_class, 'py3'): setattr(self.module_class, 'py3', Py3(self)) # allow_urgent # get the value form the config or use the module default if # supplied. fn = self._py3_wrapper.get_config_attribute param = fn(self.module_full_name, 'allow_urgent') if hasattr(param, 'none_setting'): param = True self.allow_urgent = param # get the available methods for execution for method in sorted(dir(class_inst)): if method.startswith('_'): continue else: m_type = type(getattr(class_inst, method)) if 'method' in str(m_type): params_type = self._params_type(method, class_inst) if method == 'on_click': self.click_events = params_type elif method == 'kill': self.has_kill = params_type elif method == 'post_config_hook': self.has_post_config_hook = True else: # the method_obj stores infos about each method # of this module. method_obj = { 'cached_until': time(), 'call_type': params_type, 'instance': None, 'last_output': { 'name': method, 'full_text': '' }, 'method': method, 'name': None } self.methods[method] = method_obj # done, log some debug info if self.config['debug']: self._py3_wrapper.log( 'module "{}" click_events={} has_kill={} methods={}'.format( module, self.click_events, self.has_kill, self.methods.keys()))
def list(self, pattern=None): ls = super(K8sPersistentVolume, self).list() vols = list(map(lambda x: PersistentVolume(x), ls)) if pattern is not None: vols = list(filter(lambda x: pattern in x.name, vols)) k8s = [] for x in vols: _types = list(filter(lambda z: z in PersistentVolumeSpec.VOLUME_TYPES_TO_SOURCE_MAP, dir(x.spec))) j = K8sPersistentVolume(config=self.config, name=self.name, type=_types[0]) j.model = x k8s.append(j) return k8s
def __introspect_validation_rules(self): rules = [ '_'.join(x.split('_')[2:]) for x in dir(self) if x.startswith('_validate') ] return tuple(rules)
class jeans: def __init__(self, waist, length, color): self.waist = waist self.length = length self.color = color self.wearing = False def put_on(self): print('Putting on {}x{} {} jeans'.format(self.waist, self.length, self.color)) self.wearing = True def take_off(self): print('Taking off {}x{} {} jeans'.format(self.waist, self.length, self.color)) self.wearing = False # create and examine a pair of jeans my_jeans = jeans(31, 32, 'blue') print(type(my_jeans)) print(dir(my_jeans)) # don and remove the jeans my_jeans.put_on() print(my_jeans.wearing) my_jeans.take_off() print(my_jeans.wearing)
def inner(default_project_path): sys.path.append(SRC_PATH) sys_argv = sys.argv PROJECT_PATH = False while "--pythonpath" in sys_argv: index = sys_argv.index("--pythonpath") PROJECT_PATH = os.path.join(os.getcwd(), sys_argv[index + 1]) sys.path.insert(0, PROJECT_PATH) # We prevent the pythonpath to be handled later on by removing it from # sys_argv sys_argv = sys_argv[:index] + sys_argv[index + 2 :] if not PROJECT_PATH: PROJECT_PATH = default_project_path sys.path.insert(0, PROJECT_PATH) os.environ.setdefault("PROJECT_PATH", PROJECT_PATH) if "--settings" in sys_argv: index = sys_argv.index("--settings") SETTINGS_MODULE = sys_argv[index + 1] else: SETTINGS_MODULE = "configuration" mod = False # There are three levels of settings, each overiding the previous one: # global_settings.py, settings.py and configuration.py from django.conf import global_settings as CONFIGURATION from base import settings as SETTINGS SETTINGS_PATHS = [SETTINGS.__file__] for setting in dir(SETTINGS): setting_value = getattr(SETTINGS, setting) setattr(CONFIGURATION, setting, setting_value) try: mod = import_module(SETTINGS_MODULE) except ModuleNotFoundError: SETTINGS_MODULE = None if mod: SETTINGS_PATHS.append(mod.__file__) for setting in dir(mod): if setting.isupper(): setattr(CONFIGURATION, setting, getattr(mod, setting)) INSTALLED_APPS = CONFIGURATION.BASE_INSTALLED_APPS + list( CONFIGURATION.INSTALLED_APPS ) for app in CONFIGURATION.REMOVED_APPS: INSTALLED_APPS.remove(app) from django.conf import settings settings.configure( CONFIGURATION, SETTINGS_MODULE=SETTINGS_MODULE, SETTINGS_PATHS=SETTINGS_PATHS, INSTALLED_APPS=INSTALLED_APPS, MIDDLEWARE=( CONFIGURATION.BASE_MIDDLEWARE + list(CONFIGURATION.MIDDLEWARE) ), ) os.environ["TZ"] = settings.TIME_ZONE if sys_argv[1] in ["version", "--version"]: from base import get_version sys.stdout.write(get_version() + "\n") return from django.core.management import execute_from_command_line execute_from_command_line(sys_argv)
def __init__(self): self._attrs = [ name for name in dir(_os) if not name.startswith('_') and hasattr(self, name) ]
import cv2 import numpy as np #This will display all the available mouse click events events = [i for i in dir(cv2) if 'EVENT' in i] # print(events) #This variable we use to store the pixel location refPt = [] Point_Per = [] #click event function def click_event(event, x, y, flags, param): if event == cv2.EVENT_LBUTTONDOWN: print(x,",",y) refPt.append([x,y]) font = cv2.FONT_HERSHEY_SIMPLEX strXY = str(x)+", "+str(y) Point_Per.append(strXY) cv2.putText(img, strXY, (x,y), font, 0.5, (255,255,0), 2) cv2.imshow("image", img) #Here, you need to change the image name and it's path according to your directory img = cv2.imread(r"C:\Users\aminb\Desktop\FIBO\Image\Moduel_image\test_field.jpg") cv2.imshow("image", img) #calling the mouse click event cv2.setMouseCallback("image", click_event)