def openUrl(url, timeout = 10): '''A simple way to open url url: the URI to be opened return:the return status code ''' if None == url: return HTTP_ERR_RETCODE; if (type("") == type(url)) and ("" == url.strip()): return HTTP_ERR_RETCODE; try: from urllib2 import socket; socket.setdefaulttimeout(timeout) from urllib2 import urlopen; import inspect; print inspect.getsource(urlopen); response = urlopen(url,timeout=10); if not response: return HTTP_ERR_RETCODE; try: response.close(); except Exception, closeE: print "closeE:" + str(closeE); pass; return response.getcode();
def getsource(obj, is_binary=False): """Wrapper around inspect.getsource. This can be modified by other projects to provide customized source extraction. Inputs: - obj: an object whose source code we will attempt to extract. Optional inputs: - is_binary: whether the object is known to come from a binary source. This implementation will skip returning any output for binary objects, but custom extractors may know how to meaningfully process them.""" if is_binary: return None else: # get source if obj was decorated with @decorator if hasattr(obj, "__wrapped__"): obj = obj.__wrapped__ try: src = inspect.getsource(obj) except TypeError: if hasattr(obj, "__class__"): src = inspect.getsource(obj.__class__) encoding = get_encoding(obj) return cast_unicode(src, encoding=encoding)
def print_p_box(): print """\nP-Box ----------------------------------------------- There are a variety of ways to achieve diffusion. The first today is called a P-Box. The goal of applying the P-Box is to spread input bits around to different bytes of output. Being a bit level shuffle can make it relatively expensive to implement in software, but relatively efficient in hardware. Here's an example P-Box, which simply applies the definition of diffusion: make it so that each input bit influences the output evenly. The P-Box works on eight eight-bit bytes. It takes the first bit from each byte and outputs that as byte 1, the second bit from each byte and outputs that as byte 2, and so on through all 8 bytes. {} If p_box is applied before cipher_with_poor_diffusion, a stronger cipher is created: {} This is visible by repeating our diffusion test from earlier; If you cipher two similar messages as before, more then one byte will have changed. If the cipher is applied multiple times in succession (called applying rounds), the difference becomes more pronounced, as is shown below: """.format(inspect.getsource(p_box), inspect.getsource(cipher_with_better_diffusion_p_box))
def codegen(self, extracted, emitter): self.m.stmt(self.get_source_code(emitter)) self.m.stmt(self.get_source_code(emitter.env)) self.m.stmt(self.get_source_code(emitter.config)) self.m.stmt(self.get_source_code(emitter.config.name_scanner)) self.m.stmt(self.get_source_code(emitter.config.template_scanner)) # module self.toplevel.import_("sys") self.toplevel.import_("os.path") self.toplevel.import_("re") self.toplevel.from_("collections", "defaultdict") self.toplevel.from_("collections", "Mapping") self.toplevel.from_("collections", "OrderedDict") self.toplevel.import_("logging") self.toplevel.stmt("logger = logging.getLogger(__name__)") self.toplevel.sep() self.toplevel.stmt(inspect.getsource(reify)) self.toplevel.stmt(inspect.getsource(InputWrapper)) # main with self.m.def_("main", "args"): self.build_main(extracted, emitter) with self.m.if_("__name__ == '__main__'"): self.m.stmt("logging.basicConfig(level=logging.INFO)") self.m.stmt("main(sys.argv[1:])") return self.m
def setup_from_filter_class(self): # cache filter class source code so it only has to be calculated once if not hasattr(self.filter_class, 'SOURCE_CODE'): # get source code of this filter class + all parent filter classes. source = "" klass = self.filter_class # get source code from filter class and all parent classes while klass != dexy.dexy_filter.DexyFilter: source += inspect.getsource(klass) klass = klass.__base__ # and then get source code of DexyFilter class source += inspect.getsource(dexy.dexy_filter.DexyFilter) filter_class_source = source self.filter_class.SOURCE_CODE = self.compute_hash(filter_class_source) if not hasattr(self.filter_class, 'VERSION'): filter_version = self.filter_class.version(self.log) self.filter_class.VERSION = filter_version self.filter_name = self.filter_class.__name__ self.filter_source = self.filter_class.SOURCE_CODE self.filter_version = self.filter_class.VERSION if self.final is None: self.final = self.filter_class.FINAL
def setup_from_filter_class(self): # cache filter class source code so it only has to be calculated once filter_class_source_const = "SOURCE_CODE_%s" % self.filter_class.__name__ if not hasattr(self.filter_class, filter_class_source_const): # get source code of this filter class + all parent filter classes. source = "" klass = self.filter_class # get source code from filter class and all parent classes while klass != dexy.dexy_filter.DexyFilter: source += inspect.getsource(klass) klass = klass.__base__ # and then get source code of DexyFilter class source += inspect.getsource(dexy.dexy_filter.DexyFilter) filter_class_source_hash = self.compute_hash(source) setattr(self.filter_class, filter_class_source_const, filter_class_source_hash) assert filter_class_source_hash == getattr(self.filter_class, filter_class_source_const) self.log.debug("Source code hash for %s is %s" % (self.filter_class.__name__, filter_class_source_hash)) if not hasattr(self.filter_class, 'VERSION'): filter_version = self.filter_class.version(self.log) self.filter_class.VERSION = filter_version self.filter_name = self.filter_class.__name__ self.filter_source = getattr(self.filter_class, filter_class_source_const) self.filter_version = self.filter_class.VERSION if self.final is None: self.final = self.filter_class.FINAL
def __init__(self, design, name, map_fun, reduce_fun=None, language='javascript', wrapper=None, **defaults): """Initialize the view definition. Note that the code in `map_fun` and `reduce_fun` is automatically dedented, that is, any common leading whitespace is removed from each line. :param design: the name of the design document :param name: the name of the view :param map_fun: the map function code :param reduce_fun: the reduce function code (optional) :param language: the name of the language used :param wrapper: an optional callable that should be used to wrap the result rows """ if design.startswith('_design/'): design = design[8:] self.design = design self.name = name if isinstance(map_fun, FunctionType): map_fun = _strip_decorators(getsource(map_fun).rstrip()) self.map_fun = dedent(map_fun.lstrip('\n')) if isinstance(reduce_fun, FunctionType): reduce_fun = _strip_decorators(getsource(reduce_fun).rstrip()) if reduce_fun: reduce_fun = dedent(reduce_fun.lstrip('\n')) self.reduce_fun = reduce_fun self.language = language self.wrapper = wrapper self.defaults = defaults
def getCode(data): """ Get the raw code for a function :param data: :return: """ if not data.startswith('CODE'): return functionID = data.split(':')[1] if data.endswith(':CLASS'): function = None exec "import %s" % functionID.rsplit('.', 2)[0] exec "function = eval('%s')" % functionID try: raw = inspect.getsource(function) f = '' for i in raw.split('\n'): try: new = i.split(' ', 1)[1] f += new + '\n' except: f += '\n' return f except TypeError: return '' else: function = blur_function.PYTHON_FRAMEWORK[functionID] return inspect.getsource(function)
def plug_in(self, out): """Write logistic regression predict method """ out.write(COPYRIGHT) out.write(IMPORTS) out.write(CONSTANTS) for function in FUNCTIONS: out.write(inspect.getsource(function)) out.write("\n\n") out.write(CLASS_DEFINITION) for method in CLASS_METHODS: out.write(inspect.getsource(getattr(self, method))) out.write("\n") out.write("logistic_regression_json = ") pprint.pprint(self.__dict__, out) out.write("\n") example_field_id = self.input_fields[0] if example_field_id == self.objective_id: example_field_id = self.input_fields[1] example_field_name = self.fields[example_field_id]["name"] example = summary_example(self.fields[example_field_id]) out.write( \ "local_logistic = BasicLR(logistic_regression_json)\n" "# place here your input data as a dictionary\n" "input_data = {\"%s\": %s}\n" % (example_field_name, example)) out.write("local_logistic.predict(input_data)\n")
def assert_function_style(name, member, doc, args): code = inspect.getsource(member) has_return = re.findall(r"\s*return \S+", code, re.MULTILINE) if has_return and "# Returns" not in doc: innerfunction = [inspect.getsource(x) for x in member.__code__.co_consts if inspect.iscode(x)] return_in_sub = [ret for code_inner in innerfunction for ret in re.findall(r"\s*return \S+", code_inner, re.MULTILINE)] if len(return_in_sub) < len(has_return): raise ValueError("{} needs a '# Returns' section".format(name), member.__module__) has_raise = re.findall(r"^\s*raise \S+", code, re.MULTILINE) if has_raise and "# Raises" not in doc: innerfunction = [inspect.getsource(x) for x in member.__code__.co_consts if inspect.iscode(x)] raise_in_sub = [ret for code_inner in innerfunction for ret in re.findall(r"\s*raise \S+", code_inner, re.MULTILINE)] if len(raise_in_sub) < len(has_raise): raise ValueError("{} needs a '# Raises' section".format(name), member.__module__) if len(args) > 0 and "# Arguments" not in doc: raise ValueError("{} needs a '# Arguments' section".format(name), member.__module__) assert_blank_before(name, member, doc, ['# Arguments', '# Raises', '# Returns'])
def _analyzeGens(top, absnames): genlist = [] for g in top: if isinstance(g, _UserCode): tree = g elif isinstance(g, (_AlwaysComb, _AlwaysSeq, _Always)): f = g.func s = inspect.getsource(f) s = _dedent(s) tree = ast.parse(s) #print ast.dump(tree) tree.sourcefile = inspect.getsourcefile(f) tree.lineoffset = inspect.getsourcelines(f)[1]-1 tree.symdict = f.func_globals.copy() tree.callstack = [] # handle free variables tree.nonlocaldict = {} if f.func_code.co_freevars: for n, c in zip(f.func_code.co_freevars, f.func_closure): obj = _cell_deref(c) if isinstance(g, _AlwaysComb): if not ( isinstance(obj, (int, long, EnumType,_Signal)) or \ _isMem(obj) or _isTupleOfInts(obj) ): info = "File %s, line %s: " % (tree.sourcefile, tree.lineoffset) print type(obj) raise ConversionError(_error.UnsupportedType, n, info) tree.symdict[n] = obj # currently, only intbv as automatic nonlocals (until Python 3.0) if isinstance(obj, intbv): tree.nonlocaldict[n] = obj tree.name = absnames.get(id(g), str(_Label("BLOCK"))).upper() v = _FirstPassVisitor(tree) v.visit(tree) if isinstance(g, _AlwaysComb): v = _AnalyzeAlwaysCombVisitor(tree, g.senslist) elif isinstance(g, _AlwaysSeq): v = _AnalyzeAlwaysSeqVisitor(tree, g.senslist, g.reset, g.sigregs, g.varregs) else: v = _AnalyzeAlwaysDecoVisitor(tree, g.senslist) v.visit(tree) else: # @instance f = g.gen.gi_frame s = inspect.getsource(f) s = _dedent(s) tree = ast.parse(s) # print ast.dump(tree) tree.sourcefile = inspect.getsourcefile(f) tree.lineoffset = inspect.getsourcelines(f)[1]-1 tree.symdict = f.f_globals.copy() tree.symdict.update(f.f_locals) tree.nonlocaldict = {} tree.callstack = [] tree.name = absnames.get(id(g), str(_Label("BLOCK"))).upper() v = _FirstPassVisitor(tree) v.visit(tree) v = _AnalyzeBlockVisitor(tree) v.visit(tree) genlist.append(tree) return genlist
def assertDefinitionIndented(self, obj): name = obj.__name__ obj2 = getattr(processed, name) orig = inspect.getsource(obj) xformed = inspect.getsource(obj2) self.assertShowWhitespaceEqual(preproc(orig), xformed) self.assertCompiles(xformed)
def _default_config(self): orig_source = inspect.getsource(default_config) pref_source = inspect.getsource(default_config.set_prefs) start, end = orig_source.split(pref_source) pref_path_adds = '\n' + ''.join( [" prefs.add('python_path', '%s')\n" % p for p in self.pathlist]) return start + pref_source + pref_path_adds + end
def output(self): # print import for i in sorted(self.imports): print i # print class definition print 'class %s_SUPER(%s):' % (str(self.first_father), reduce(lambda a, b: a + "," + b, self.fathers)) attrs = [] functions = [] for i in inspect.getmembers(eval(self.first_father)): if str(type(i[1])).find('instancemethod') == -1: attrs.append([i[0], i[1]]) else: functions.append([i[0], i[1]]) # print attrbute definition for i in attrs: if type(i[1]) in [type(''), type([]), type({})]: print " ", i[0], "=", repr(i[1]) else: print " ", i[0], "=", i[1] # print class definition for i in functions: print inspect.getsource(eval(str(self.first_father) + "." + str(i[0])))
def set_metadata_hash(self): self.metadata.ext = self.ext self.metadata.key = self.key self.metadata.next_filter_name = self.next_filter_name self.metadata.prior_hash = self.prior.hashstring self.metadata.pre_method_source = inspect.getsource(self.pre) self.metadata.post_method_source = inspect.getsource(self.post) strargs = [] self.log.debug("args for %s are %s" % (self.key, self.args)) for k in sorted(self.args): if not k in ['runner']: v = str(self.args[k]) strargs.append("%s: %s" % (k, v)) self.metadata.argstr = ", ".join(strargs) # Determines if Dexy itself has been updated or if the filter source # code has changed. self.metadata.dexy_version = dexy.__version__ sources = [] klass = self.filter_class while klass != dexy.filter.Filter: sources.append(dexy.filter.Filter.source[klass.__name__]) klass = klass.__base__ sources.append(dexy.filter.Filter.source[klass.__name__]) self.metadata.filter_source = "\n".join(sources) # TODO add filter software version self.set_and_save_hash()
def load_config(func_list=[]): """ Defines default configuration for Couch DB. We need the following: URI - the Couch DB URI DB - the Couch DB name DESIGN - the Couch DB design name DEBUG - debug level, used to setup HTTPConnection debug level """ msg = """ import os, re, sys, time, types, traceback, inspect import urllib, urllib2, httplib import json from json import JSONDecoder, JSONEncoder try: from path import path except ImportError: pass try: from ipipe import * except ImportError: pass # global variables URI="http://localhost:5984" DB="das" DESIGN="dasadmin" DEBUG=0 """ msg += "\n%s\n" % inspect.getsource(PrintManager) msg += "PM = PrintManager()" for func in func_list: msg += "\n%s\n" % inspect.getsource(func) return msg
def run(self, func, *args, **kwargs): si, so, sr = self.ssh.exec_command('mktemp') tmpfile = so.read().rstrip().decode('utf-8') code = '\n'.join([ inspect.getsource(func), inspect.getsource(_wrapper_function).format( func.__name__, repr(pickle.dumps(args)), repr(pickle.dumps(kwargs)), repr(tmpfile), ), "_wrapper_function()" ]) if sys.version_info[0] == 2: si, so, sr = self.ssh.exec_command('python') else: si, so, sr = self.ssh.exec_command('python3') si.write(code) si.channel.shutdown_write() sys.stdout.write(so.read().decode('utf-8')) sys.stderr.write(sr.read().decode('utf-8')) si, so, sr = self.ssh.exec_command('cat {0};rm {0}'.format(tmpfile)) ret = pickle.loads(so.read()) if isinstance(ret, Exception): raise ret return ret return None
def main(): print inspect.getsource(save) for demo in ['ParametricSurfaceDemo', 'SimplePlot', 'TestNumPy', ]: result = __import__(demo).make() raw_input("Is the demo looking HAPPY? ") for format in ('png', 'pdf', 'ps', 'eps', 'svg'): print 'Saving %s.%s...' % (demo, format), if save(result, '%s.%s' % (demo, format), format): print 'success' else: print 'failure' for demo in ['AutoSwitch', 'EnrichmentDemo', ]: result = __import__(demo).make() raw_input("Is the demo looking HAPPY? ") for i, w in enumerate(walk(result)): for format in ('png', 'pdf', 'ps', 'eps', 'svg'): print 'Saving %s%s.%s...' % (demo, i, format), if save(w, '%s%s.%s' % (demo, i, format), format): print 'success' else: print 'failure'
def test_80_import_project(self): self.projectdb.insert('test_project2', { 'name': 'test_project', 'group': 'group', 'status': 'TODO', 'script': inspect.getsource(sample_handler), 'comments': 'test project', 'rate': 1.0, 'burst': 10, }) self.projectdb.insert('test_project3', { 'name': 'test_project', 'group': 'group', 'status': 'TODO', 'script': inspect.getsource(sample_handler), 'comments': 'test project', 'rate': 1.0, 'burst': 10, }) from projects import test_project self.assertIsNotNone(test_project) self.assertIsNotNone(test_project.Handler) from projects.test_project2 import Handler self.assertIsNotNone(Handler) import projects.test_project3 self.assertIsNotNone(projects.test_project3.Handler)
def __init__(cls, name, bases, attrs): if not hasattr(cls, "plugins"): # This branch only executes when processing the mount point itself. # So, since this is a new plugin type, not an implementation, this # class shouldn't be registered as a plugin. Instead, it sets up a # list where plugins can be registered later. cls.plugins = [] cls.aliases = {} cls.source = {} cls.source[cls.__name__] = inspect.getsource(cls) else: # This must be a plugin implementation, which should be registered. # Simply appending it to the list is all that's needed to keep # track of it later. cls.plugins.append(cls) cls.source[cls.__name__] = inspect.getsource(cls) if hasattr(cls, "ALIASES"): for alias in cls.ALIASES: if alias in cls.aliases: raise Exception( "duplicate alias %s found in %s, already present in %s" % (alias, cls.__name__, cls.aliases[alias].__name__) ) cls.aliases[alias] = cls elif hasattr(cls, "NAMESPACE"): cls.aliases[cls.NAMESPACE] = cls
def getsource(obj,is_binary=False): """Wrapper around inspect.getsource. This can be modified by other projects to provide customized source extraction. Inputs: - obj: an object whose source code we will attempt to extract. Optional inputs: - is_binary: whether the object is known to come from a binary source. This implementation will skip returning any output for binary objects, but custom extractors may know how to meaningfully process them.""" if is_binary: return None else: try: src = inspect.getsource(obj) except TypeError: if hasattr(obj,'__class__'): src = inspect.getsource(obj.__class__) return src
def do_pycat(self, args): """Print the source to a module we have imported.""" #TODO(ghowland): Pycat functions, classes, etc from source as well. try: print getsource(eval(args)) except IOError, e: print 'Python IOError: %s' % e
def __init__( self, num_subsamples=25, num_bootstraps=100, subsample_len_exp=0.5, with_cilk=False, with_openMP=False, dimension=1, pure_python=False, with_scala=False, ): self.dim = dimension self.with_cilk = with_cilk self.with_openMP = with_openMP self.pure_python = pure_python self.with_scala = with_scala self.estimate_src = inspect.getsource(self.compute_estimate) self.estimate_ast = ast.parse(self.estimate_src.lstrip()) self.reduce_src = inspect.getsource(self.reduce_bootstraps) self.reduce_ast = ast.parse(self.reduce_src.lstrip()) self.average_src = inspect.getsource(self.average) self.average_ast = ast.parse(self.average_src.lstrip()) self.num_subsamples = num_subsamples self.num_bootstraps = num_bootstraps self.subsample_len_exp = subsample_len_exp self.cached_mods = {}
def get_interface(self): """ """ try: myinterface_name = self.request['QUERY_STRING'].split('/')[0] except: return '' interfacesdict = {} get_interfaces( tuple(providedBy(self.context)) , interfacesdict) myiclass = interfacesdict[myinterface_name] try: code = inspect.getsource(myiclass) source = highlight(code, PythonLexer(), HtmlFormatter()) except TypeError: source = '<pre>' + inspect.getsource(myiclass) + '</pre>' except NameError: source = inspect.getsource(myiclass) except: source = "" status = 'Reading ' + inspect.getsourcefile(myiclass) result = { 'status': status, 'bottom': source} return json.dumps(result, ensure_ascii= True, indent=4)
def _getContents(self, mod): try: inspect.getsource(mod) value = inspect.getsource(mod) except Exception: value = mod return value
def job_file(url, password, request_id, n_args, shell, grid_engine_opts): """Format the template for a specific job, ready for deployment. *url* is the URL (including port) that the workers should contact to fetch job information, including a trailing slash. *password* is the HTTP Basic Auth password to use when talking to *url*. *request_id* is the request ID workers should use to associate themselves with the correct request. *n_args* is the number of jobs that will be queued in the array task, the same as the number of arguments being mapped by sheepdog. *shell* is the path to the Python that will execute the job. Could be a system or user Python, so long as it meets the Sheepdog requirements. Is used for the -S option to GridEngine as well as the script shebang. *grid_engine_opts* is a list of string arguments to Grid Engine to specify options such as resource requirements. """ grid_engine_opts = list(grid_engine_opts) grid_engine_opts.append("-t 1-{0}".format(n_args)) grid_engine_opts.append("-S \"{0}\"".format(shell)) geopts = '\n'.join("#$ {0}".format(opt) for opt in grid_engine_opts) client_code = inspect.getsource(client) serialisation_code = inspect.getsource(serialisation) return template.format(**locals())
def get_class(self): """ XXX: if the are a monkey patch i wont know about it """ myclass = self.context.__class__ ancestors = {} get_ancestors(myclass, ancestors) try: mysupclass_name = self.request['QUERY_STRING'].split('/')[0] except: return '' mysupclass = ancestors[mysupclass_name] try: code = '### Reading' + inspect.getsourcefile(mysupclass) code = inspect.getsource(mysupclass ) source = highlight(code, PythonLexer(), HtmlFormatter()) except TypeError: source = '<pre>' + inspect.getsource( mysupclass) + '</pre>' except NameError: source = inspect.getsource(mysupclass) except: source = "" status = 'Reading ' + inspect.getsourcefile(mysupclass) result = { 'status': status, 'bottom': source} return json.dumps(result, ensure_ascii= True, indent=4)
def queue_get_model_code(server_hwobj): """ returns a list of tuples of (name of queue model module, source code of queue model). The client can compile and use the queue model as follows: for (module_name, module_code) in server.native_get_queue_model_code(): queue_model_objects = imp.new_module(module_name) exec module_code in queue_model_objects.__dict__ sys.modules[module_name] = queue_model_objects The module containing the required queue model objects is the last one in the list, so after the loop exits queue_model_objects is set to the module that is needed by the client. Modules other than the last one are modules imported by the queue model that are not available for direct import by the XML-RPC client """ # Recipe from http://code.activestate.com/recipes/82234-importing-a-dynamically-generated-module/ # At the moment, queue_model_objects_v1 does not import anything except # standard Python modules, so we only need to send over the code for the # queue model itself return [(queue_model_enumerables.__name__, inspect.getsource(queue_model_enumerables) ), (queue_model_objects.__name__, inspect.getsource(queue_model_objects) ) ]
def _extract_source(self, modelname, pml, className): filesource = "#<start sys imports>\n" filesource += "from yhat import %s\n" % pml.__name__ filesource += "import inspect\n" filesource += "import re\n" filesource += "#<end sys imports>\n" filesource += "#<start user imports>\n" import_source = inspect.getsource(pml.require) imports = [] for line in import_source.split('\n'): if "import" in line: imports.append(line.strip()) imports = [i for i in imports if i.startswith("#")==False] filesource += "\n".join(imports) + "\n" filesource += "#<end user imports>\n\n" filesource += "#<start user functions>\n" if hasattr(pml, "udfs"): for udf in pml.udfs: if isinstance(udf, types.FunctionType): source = inspect.getsource(udf).split("\n") padding = re.search('[ ]+', source[0]).group(0) for line in source: filesource += line[len(padding)-1:] + "\n" filesource += "\n" filesource += "#<end user functions>\n" filesource += "\n" filesource += "class %s(%s):" % (className, pml.__name__) + "\n" for name, step in inspect.getmembers(pml, predicate=inspect.ismethod): filesource += inspect.getsource(step) + "\n" return filesource
def __init__(self): if not hasattr(self.__class__, 'FILTERS'): self.__class__.FILTERS = dexy.introspect.filters(Constants.NULL_LOGGER) if not hasattr(self.__class__, 'SOURCE_CODE'): artifact_class_source = inspect.getsource(self.__class__) artifact_py_source = inspect.getsource(Artifact) self.__class__.SOURCE_CODE = hashlib.md5(artifact_class_source + artifact_py_source).hexdigest() self._inputs = {} self.additional = None self.db = [] # accepts 'append' self.args = {} self.args['globals'] = {} self.is_last = False self.artifact_class_source = self.__class__.SOURCE_CODE self.artifacts_dir = 'artifacts' # TODO don't hard code self.batch_id = None self.binary_input = None self.binary_output = None self.ctime = None self.data_dict = OrderedDict() self.dexy_version = Version.VERSION self.dirty = False self.document_key = None self.elapsed = 0 self.final = None self.initial = None self.inode = None self.input_data_dict = OrderedDict() self.key = None self.log = logging.getLogger() self.mtime = None self.state = 'new'
def get_source(obj, remove_self=False): source = inspect.getsource(obj).lstrip() if remove_self is False: return source else: return source.replace('self,', '')
bcd2f = ((d2f0_dt2f, d2f1_dt2f), (d2g0_dx2f, d2g1_dx2f)) def Yaf(xt): """Analytical solution""" (x, t) = xt Ya = 0 for k in range(1, 101): Ya += 16 * exp(-pi**2 * t * D * k**2) * cos(pi * k / 4)**3 * sin( pi * k * x) / (pi**2 * k**2) return Ya if __name__ == '__main__': print(getsource(Gf)) print('Gf([0, 0], 0, [0, 0], [[0, 0], [0, 0]]) = ', Gf([0, 0], 0, [0, 0], [[0, 0], [0, 0]])) print() print(getsource(dG_dYf)) print('dG_dYf([0, 0], 0, [0, 0], [[0, 0], [0, 0]]) = ', dG_dYf([0, 0], 0, [0, 0], [[0, 0], [0, 0]])) print() print(getsource(dG_dY_dxf)) print('dG_dY_dxf([0, 0], 0, [0, 0], [[0, 0], [0, 0]]) = ', dG_dY_dxf([0, 0], 0, [0, 0], [[0, 0], [0, 0]])) print() print(getsource(dG_dY_dtf)) print('dG_dY_dtf([0, 0], 0, [0, 0], [[0, 0], [0, 0]]) = ', dG_dY_dtf([0, 0], 0, [0, 0], [[0, 0], [0, 0]])) print()
def default(self, obj): """ This method is used to serialize objects to JSON format. If obj is a function, then it will return a dict with two keys : 'code', for the code source, and 'nonlocals' for all nonlocalsvalues. (including nonlocals functions, that will be serialized as this is recursive.) if obj is a np.darray, it converts it into a list. if obj is an object with __dict__ attribute, it returns its __dict__. Else, will let the JSONEncoder do the stuff, and throw an error if the type is not suitable for JSONEncoder. Parameters ---------- obj : Any Arbitrary object to convert Returns ------- Any Python object that JSON encoder will recognize """ if not (isinstance(obj, ModuleType)) and isinstance( obj, (MethodType, FunctionType)): cvars = inspect.getclosurevars(obj) cvardict = { **copy.copy(cvars.globals), **copy.copy(cvars.nonlocals) } for i in list(cvardict): # NOTE : All module types objects are removed, because otherwise it # throws ValueError: Circular reference detected if not. TODO if isinstance(cvardict[i], ModuleType): del cvardict[i] try: code = inspect.getsource(obj) except OSError: # This happens when rendering videos included in the documentation # within doctests and should be replaced by a solution avoiding # hash collision (due to the same, empty, code strings) at some point. # See https://github.com/ManimCommunity/manim/pull/402. code = "" return self._cleaned_iterable({ "code": code, "nonlocals": cvardict }) elif isinstance(obj, np.ndarray): if obj.size > 1000: obj = np.resize(obj, (100, 100)) return f"TRUNCATED ARRAY: {repr(obj)}" # We return the repr and not a list to avoid the JsonEncoder to iterate over it. return repr(obj) elif hasattr(obj, "__dict__"): temp = getattr(obj, "__dict__") # MappingProxy is scene-caching nightmare. It contains all of the object methods and attributes. We skip it as the mechanism will at some point process the object, but instantiated. # Indeed, there is certainly no case where scene-caching will receive only a non instancied object, as this is never used in the library or encouraged to be used user-side. if isinstance(temp, MappingProxyType): return "MappingProxy" return self._cleaned_iterable(temp) elif isinstance(obj, np.uint8): return int(obj) # Serialize it with only the type of the object. You can change this to whatever string when debugging the serialization process. return str(type(obj))
def getsource(object): # pylint: disable=redefined-builtin """TFDecorator-aware replacement for inspect.getsource.""" return _inspect.getsource(tf_decorator.unwrap(object)[1])
def get_code(self): ''' returns the code for the current class ''' import inspect return inspect.getsource(Heap)
def help(cls): print(inspect.getsource(cls))
class Helpful(commands.Cog): def __init__(self, bot): self._default_help_command = bot.help_command bot.help_command = StellaBotHelp() bot.help_command.cog = self self.bot = bot @commands.command(aliases=["ping", "p"], help="Shows the bot latency from the discord websocket.") async def pping(self, ctx): await ctx.embed( title="PP", description=f"Your pp lasted `{self.bot.latency * 1000:.2f}ms`" ) @commands.command(aliases=["up"], help="Shows the bot uptime from when it was started.") async def uptime(self, ctx): c_uptime = datetime.datetime.utcnow() - self.bot.uptime await ctx.embed( title="Uptime", description=f"Current uptime: `{humanize.precisedelta(c_uptime)}`" ) @commands.command(aliases=["src", "sources"], brief="Shows the source code link in github.", help="Shows the source code in github given the cog/command name. " "Defaults to the stella_bot source code link if not given any argument. " "It accepts 2 types of content, the command name, or the Cog method name. " "Cog method must specify it's Cog name separate by a period and it's method.", cls=flg.SFlagCommand) @flg.add_flag("--code", type=bool, action="store_true", default=False, help="Shows the code block instead of the link. Accepts True or False, defaults to False if not stated.") async def source(self, ctx, content=None, **flags): source_url = 'https://github.com/InterStella0/stella_bot' if not content: return await ctx.embed(title="here's the entire repo", description=source_url) src, module = None, None def command_check(command): nonlocal src, module if command == 'help': src = type(self.bot.help_command) module = src.__module__ else: obj = self.bot.get_command(command.replace('.', ' ')) if obj and obj.cog_name != "Jishaku": src = obj.callback.__code__ module = obj.callback.__module__ def cog_check(content): nonlocal src, module if "." not in content: return cog, _, method = content.partition(".") cog = self.bot.get_cog(cog) if method_func := getattr(cog, method, None): module = method_func.__module__ target = getattr(method_func, "callback", method_func) src = target.__code__ for func in (command_check, cog_check): if not src: func(content) if module is None: return await ctx.maybe_reply(f"Method {content} not found.") show_code = flags.pop("code", False) if show_code: param = {"text": inspect.getsource(src), "width": 1900, "replace_whitespace": False} list_codeblock = [f"```py\n{cb}\n```" for cb in textwrap.wrap(**param)] menu = MenuBase(empty_page_format(list_codeblock)) await menu.start(ctx) else: lines, firstlineno = inspect.getsourcelines(src) location = module.replace('.', '/') + '.py' url = f'<{source_url}/blob/master/{location}#L{firstlineno}-L{firstlineno + len(lines) - 1}>' await ctx.embed(title=f"Here's uh, {content}", description=f"[Click Here]({url})")
@handcalc() def func_1(x, y): a = 2 * x b = 3 * a + y return locals() @handcalc() def error_func(x, y): a = 2 * x b = 3 * a + y return b # Must return locals() cell_1_source = remove_imports_defs_and_globals(inspect.getsource(cell_1)) cell_2_source = remove_imports_defs_and_globals(inspect.getsource(cell_2)) cell_2b_source = remove_imports_defs_and_globals(inspect.getsource(cell_2b)) cell_3_source = remove_imports_defs_and_globals(inspect.getsource(cell_3)) cell_4_source = remove_imports_defs_and_globals(inspect.getsource(cell_4)) cell_5_source = remove_imports_defs_and_globals(inspect.getsource(cell_5)) cell_6_source = remove_imports_defs_and_globals(inspect.getsource(cell_6)) cell_7_source = remove_imports_defs_and_globals(inspect.getsource(cell_7)) cell_7b_source = remove_imports_defs_and_globals(inspect.getsource(cell_7b)) cell_8_source = remove_imports_defs_and_globals(inspect.getsource(cell_8)) error_cell_source = remove_imports_defs_and_globals( inspect.getsource(error_cell)) cell_1_renderer = handcalcs.handcalcs.LatexRenderer(cell_1_source, cell_1.calc_results) cell_2_renderer = handcalcs.handcalcs.LatexRenderer(cell_2_source,
def _src_hash(self, func): _src = inspect.getsource(func) return hashlib.md5(_src.encode()).hexdigest()
def source(func): s = 'File: %s\n\n' % inspect.getsourcefile(func) s = s + inspect.getsource(func) return s
[sg.Col(col_listbox), col_instructions], ] # create the form and show it without the plot window = sg.Window('Demo Application - Embedding Matplotlib In PySimpleGUI', layout, resizable=True, finalize=True) canvas_elem = window['-CANVAS-'] multiline_elem = window['-MULTILINE-'] figure_agg = None while True: event, values = window.read() if event in (sg.WIN_CLOSED, 'Exit'): break if figure_agg: # ** IMPORTANT ** Clean up previous drawing before drawing again delete_figure_agg(figure_agg) # get first listbox item chosen (returned as a list) choice = values['-LISTBOX-'][0] # get function to call from the dictionary func = fig_dict[choice] # show source code to function in multiline window['-MULTILINE-'].update(inspect.getsource(func)) fig = func() # call function to get the figure figure_agg = draw_figure(window['-CANVAS-'].TKCanvas, fig) # draw the figure
for div in divs: div.unwrap() return str(body) except Exception as exc: logger = logging.getLogger(__name__) logger.warning("Failed to process body_text\n" + str(exc)) return body_text # These final lines exist to give sphinx a stable str representation of # these two functions accross runs, and to ensure that the str changes # if the source does. # # Note that this would be better down with a metaclass factory table_fix_src = inspect.getsource(table_fix) table_fix_hash = hashlib.sha512(table_fix_src.encode()).hexdigest() derender_toc_src = inspect.getsource(derender_toc) derender_toc_hash = hashlib.sha512(derender_toc_src.encode()).hexdigest() class TableFixMeta(type): def __repr__(self): return f"table_fix, hash: {table_fix_hash}" def __str__(self): return f"table_fix, hash: {table_fix_hash}" class TableFix(object, metaclass=TableFixMeta): def __new__(cls, *args, **kwargs):
def preparejob(self, jobconfig, master_input_sandbox): job = self.getJobObject() mon = job.getMonitoringService() import Ganga.Core.Sandbox as Sandbox from Ganga.GPIDev.Lib.File import File from Ganga.Core.Sandbox.WNSandbox import PYTHON_DIR import inspect fileutils = File(inspect.getsourcefile(Ganga.Utility.files), subdir=PYTHON_DIR) sharedfiles = jobconfig.getSharedFiles() subjob_input_sandbox = job.createPackedInputSandbox( jobconfig.getSandboxFiles() + [fileutils]) appscriptpath = [jobconfig.getExeString()] + jobconfig.getArgStrings() sharedoutputpath = job.getOutputWorkspace().getPath() ## FIXME Check this isn't a GangaList outputpatterns = jobconfig.outputbox environment = jobconfig.env if not jobconfig.env is None else {} import inspect script_location = os.path.join( os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe()))), 'BatchScriptTemplate.py.template') from Ganga.GPIDev.Lib.File import FileUtils text = FileUtils.loadScript(script_location, '') import Ganga.Core.Sandbox as Sandbox import Ganga.Utility as Utility from Ganga.Utility.Config import getConfig from Ganga.GPIDev.Lib.File.OutputFileManager import getWNCodeForOutputSandbox, getWNCodeForOutputPostprocessing, getWNCodeForDownloadingInputFiles, getWNCodeForInputdataListCreation jobidRepr = repr(self.getJobObject().getFQID('.')) replace_dict = { '###OUTPUTSANDBOXPOSTPROCESSING###': getWNCodeForOutputSandbox(job, ['__syslog__'], jobidRepr), '###OUTPUTUPLOADSPOSTPROCESSING###': getWNCodeForOutputPostprocessing(job, ''), '###DOWNLOADINPUTFILES###': getWNCodeForDownloadingInputFiles(job, ''), '###INLINEMODULES###': inspect.getsource(Sandbox.WNSandbox), '###INLINEHOSTNAMEFUNCTION###': inspect.getsource(Utility.util.hostname), '###APPSCRIPTPATH###': repr(appscriptpath), #'###SHAREDINPUTPATH###' : repr(sharedinputpath)), '###INPUT_SANDBOX###': repr(subjob_input_sandbox + master_input_sandbox + sharedfiles), '###CREATEINPUTDATALIST###': getWNCodeForInputdataListCreation(job, ''), '###SHAREDOUTPUTPATH###': repr(sharedoutputpath), '###OUTPUTPATTERNS###': repr(outputpatterns), '###JOBID###': jobidRepr, '###ENVIRONMENT###': repr(environment), '###PREEXECUTE###': self.config['preexecute'], '###POSTEXECUTE###': self.config['postexecute'], '###JOBIDNAME###': self.config['jobid_name'], '###QUEUENAME###': self.config['queue_name'], '###HEARTBEATFREQUENCE###': self.config['heartbeat_frequency'], '###INPUT_DIR###': repr(job.getStringInputDir()), '###GANGADIR###': repr(getConfig('System')['GANGA_PYTHONPATH']) } for k, v in replace_dict.iteritems(): text = text.replace(str(k), str(v)) logger.debug('subjob input sandbox %s ', subjob_input_sandbox) logger.debug('master input sandbox %s ', master_input_sandbox) from Ganga.GPIDev.Lib.File import FileBuffer return job.getInputWorkspace().writefile(FileBuffer( '__jobscript__', text), executable=1)
from apps.ml.income_classifier.extra_trees import ExtraTreesClassifier try: registry = MLRegistry() #Random Forest classifier rf = RandomForestClassifier() #add to ML registry registry.add_algorithm( endpoint_name="income_classifier", algorithm_object=rf, algorithm_name="random forest", algorithm_status="production", algorithm_version="0.0.1", owner="NorthernJay", algorithm_description="Random Forest with simple pre and post process", algorithm_code=inspect.getsource(RandomForestClassifier)) #Extra Trees classifier et = ExtraTreesClassifier() #add to ML registry registry.add_algorithm( endpoint_name="income_classifier", algorithm_object=et, algorithm_name="extra trees", algorithm_status="testing", algorithm_version="0.0.1", owner="NorthernJay", algorithm_description="Extra Tree with simple pre and post process", algorithm_code=inspect.getsource(RandomForestClassifier)) except Exception as e: print("Exception while loading the algorithms to the registry", str(e))
def _call_Attribute(self, node): value = self.visit(node.func.value) method = getattr(value, node.func.attr) if not inspect.ismethod(method) and not inspect.isfunction(method): raise TypeError("'%s' object is not callable" % str(type(method))) # prepare the argument values args = [] kwargs = OrderedDict() for arg in node.args: args.append(self.visit(arg)) for key in node.keywords: kwargs[key.arg] = self.visit(key.value) # check intrinsic method name = str(method) if self._is_intrinsic_method(value, method) or name in self.intrinsic_methods: args.insert(0, self.fsm) # pass the current local scope from .thread import Thread from .pool import ThreadPool if isinstance(value, Thread): value.start_frame = self.start_frame if isinstance(value, ThreadPool): for thread in value.threads: thread.start_frame = self.start_frame return method(*args, **kwargs) # stack a new scope frame self.pushScope(ftype='call') resolved_args = inspect.getcallargs(method, *args, **kwargs) for arg, value in sorted(resolved_args.items(), key=lambda x: x[0]): self.setArgBind(arg, value) self.setFsm() self.incFsmCount() text = textwrap.dedent(inspect.getsource(method)) tree = ast.parse(text).body[0] # visit the function definition ret = self._visit_next_function(tree) # fsm jump by return statement end_count = self.getFsmCount() unresolved_return = self.getUnresolvedReturn() for ret_count, value in unresolved_return: self.setFsm(ret_count, end_count) # clean-up jump conditions self.clearBreak() self.clearContinue() self.clearReturn() self.clearReturnVariable() # return to the previous scope frame self.popScope() return ret
def test_meta_schema_gen_up_to_date(tmp_path: Path) -> None: src_target = tmp_path / "src.py" python_codegen(metaschema_file_uri, src_target) assert os.path.exists(src_target) with open(src_target) as f: assert f.read() == inspect.getsource(cg_metaschema)
def marmoset_patch(func, s, r): source = inspect.getsource(func).replace(s, r) exec source in func.func_globals func.func_code = func.func_globals[func.__name__].func_code
def test_doctest_issue4197(self): # To avoid having to keep two copies of the doctest module's # unit tests in sync, this test works by taking the source of # test_doctest itself, rewriting it a bit to cope with a new # location, and then throwing it in a zip file to make sure # everything still works correctly test_src = inspect.getsource(test_doctest) test_src = test_src.replace( "from test import test_doctest", "import test_zipped_doctest as test_doctest") test_src = test_src.replace("test.test_doctest", "test_zipped_doctest") test_src = test_src.replace("test.sample_doctest", "sample_zipped_doctest") sample_src = inspect.getsource(sample_doctest) sample_src = sample_src.replace("test.test_doctest", "test_zipped_doctest") with temp_dir() as d: script_name = make_script(d, 'test_zipped_doctest', test_src) zip_name, run_name = make_zip_script(d, 'test_zip', script_name) z = zipfile.ZipFile(zip_name, 'a') z.writestr("sample_zipped_doctest.py", sample_src) z.close() if verbose: zip_file = zipfile.ZipFile(zip_name, 'r') print 'Contents of %r:' % zip_name zip_file.printdir() zip_file.close() os.remove(script_name) sys.path.insert(0, zip_name) import test_zipped_doctest # Some of the doc tests depend on the colocated text files # which aren't available to the zipped version (the doctest # module currently requires real filenames for non-embedded # tests). So we're forced to be selective about which tests # to run. # doctest could really use some APIs which take a text # string or a file object instead of a filename... known_good_tests = [ test_zipped_doctest.SampleClass, test_zipped_doctest.SampleClass.NestedClass, test_zipped_doctest.SampleClass.NestedClass.__init__, test_zipped_doctest.SampleClass.__init__, test_zipped_doctest.SampleClass.a_classmethod, test_zipped_doctest.SampleClass.a_property, test_zipped_doctest.SampleClass.a_staticmethod, test_zipped_doctest.SampleClass.double, test_zipped_doctest.SampleClass.get, test_zipped_doctest.SampleNewStyleClass, test_zipped_doctest.SampleNewStyleClass.__init__, test_zipped_doctest.SampleNewStyleClass.double, test_zipped_doctest.SampleNewStyleClass.get, test_zipped_doctest.old_test1, test_zipped_doctest.old_test2, test_zipped_doctest.old_test3, test_zipped_doctest.old_test4, test_zipped_doctest.sample_func, test_zipped_doctest.test_DocTest, test_zipped_doctest.test_DocTestParser, test_zipped_doctest.test_DocTestRunner.basics, test_zipped_doctest.test_DocTestRunner.exceptions, test_zipped_doctest.test_DocTestRunner.option_directives, test_zipped_doctest.test_DocTestRunner.optionflags, test_zipped_doctest.test_DocTestRunner.verbose_flag, test_zipped_doctest.test_Example, test_zipped_doctest.test_debug, test_zipped_doctest.test_pdb_set_trace, test_zipped_doctest.test_pdb_set_trace_nested, test_zipped_doctest.test_testsource, test_zipped_doctest.test_trailing_space_in_test, test_zipped_doctest.test_DocTestSuite, test_zipped_doctest.test_DocTestFinder, ] # These remaining tests are the ones which need access # to the data files, so we don't run them fail_due_to_missing_data_files = [ test_zipped_doctest.test_DocFileSuite, test_zipped_doctest.test_testfile, test_zipped_doctest.test_unittest_reportflags, ] # Needed for test_DocTestParser and test_debug deprecations = [ # Ignore all warnings about the use of class Tester in this module. ("class Tester is deprecated", DeprecationWarning) ] if sys.py3kwarning: deprecations += [("backquote not supported", SyntaxWarning), ("execfile.. not supported", DeprecationWarning)] with test.test_support.check_warnings(*deprecations): for obj in known_good_tests: _run_object_doctest(obj, test_zipped_doctest)
def __getattribute__(self, name): print 'called getattribute' return object.__getattribute__(self, name) a = A() # end_setup_code print a.__class__ print type(a) # <codecell> import sys import inspect import attr_access reload(attr_access) source = inspect.getsource(attr_access) setup_code = source.split('# end_setup_code')[0] from timeit import timeit print setup_code # <codecell> print 'hello, ben' # <codecell>
def test_lambdify_inspect(): f = lambdify(x, x**2) # Test that inspect.getsource works but don't hard-code implementation # details assert 'x**2' in inspect.getsource(f)
def wrapper(wrapped, instance, *args, **kwargs): print(level, instance, wrapped.__name__, wrapped.__doc__, inspect.getfullargspec(wrapped), inspect.getsource(wrapped)) return wrapped(*args, **kwargs)
def _has_init(cls): try: inspect.getsource(cls.__init__) # Get the source code of the function return True # If no error is raised, source code has been found except TypeError: # Will raise this error if no Python code found (when initialised implicitly by Python and not the programmer) return False
if not path.exists(TEMP_FOLDER): makedirs(TEMP_FOLDER, exist_ok=True) if not path.exists(SPLEETER_OUT): makedirs(SPLEETER_OUT, exist_ok=True) # Inter-thread communication spleeter_q = Queue() # ffmpeg -> spleeter arbiter_q = Queue() # spleeter -> arbiter playback1_q = Queue() # arbiter -> playback1 playback2_q = Queue() # arbiter -> playback2 delete_q = Queue() # spleeter, playbackN -> delete # Modify spleeter.separator to consume less VRAM # https://stackoverflow.com/questions/41858147/how-to-modify-imported-source-code-on-the-fly/41863728 source = getsource(spleeter.separator) mem_fraction = source.split("gpu_memory_fraction = ")[1] mem_fraction = mem_fraction[:mem_fraction.find("\n")] new_source = source.replace("gpu_memory_fraction = " + mem_fraction, "gpu_memory_fraction = " + TF_MEMORY_FRACTION) exec(new_source, spleeter.separator.__dict__) warmup_path = TEMP_FOLDER + "/warmup.mp3" if not path.exists(warmup_path): print("Creating warm-up file...") s = AudioSegment.silent() s.export(warmup_path) print("Warm-up file created!") print("Warming up spleeter")
def visualize(self, save: bool = False) -> None: """ Auxiliary function to visualize and save the visualizations of an intensity function, exemplary trajectory and location of points :param save: to save (True) or not to save (False) the visualizations :type save: boolean (default: False) """ import matplotlib.pyplot as plt import inspect plt.style.use('seaborn-whitegrid') plt.rcParams['figure.figsize'] = [10, 5] grid = np.linspace(self.lower, self.upper, 10000) func = self.mean_function(np.linspace(self.lower, self.upper, 10000)) try: plt.plot(grid, func) except: plt.plot(grid, np.repeat(func, 10000)) plt.title('Intensity function') plt.xlabel('time') plt.ylabel('value') if save: try: plt.savefig('intensity_function_' + inspect.getsource(self.mean_function).split('return')[ 1].strip() + '.png') print('Saved as ' + 'intensity_function_' + inspect.getsource(self.mean_function).split('return')[ 1].strip() + '.png') except: warnings.warn("Saving intensity function failed!") plt.show() plt.clf() t = self.generate() plt.step(t, list(range(0, len(t)))) plt.title('Simulated trajectory') plt.xlabel('time') plt.ylabel('value') if save: try: plt.savefig( 'trajectory_' + inspect.getsource(self.mean_function).split('return')[1].strip() + '.png') print('Saved as ' + 'trajectory_' + inspect.getsource(self.mean_function).split('return')[ 1].strip() + '.png') except: warnings.warn("Saving trajectory failed!") plt.show() plt.clf() plt.plot(t, list(np.repeat(0, len(t))), '.') plt.title('Simulated points') plt.xlabel('time') if save: try: plt.savefig('points_' + inspect.getsource(self.mean_function).split('return')[1].strip() + '.png') print('Saved as ' + 'points_' + inspect.getsource(self.mean_function).split('return')[ 1].strip() + '.png') except: warnings.warn("Saving points failed!") plt.show() plt.clf()
def assertSourceEqual(self, obj, top, bottom): self.assertEqual(inspect.getsource(obj), self.sourcerange(top, bottom))
def repr_widget_for_editor( self, widget, first_node=False ): #widgetVarName is the name with which the parent calls this instance if first_node: self.code_declared_classes = {} self.pending_listener_registration = list() self.known_project_children = [ self, ] #a list containing widgets that have been parsed and that are considered valid listeners self.pending_signals_to_connect = list( ) #a list containing dicts {listener, emitter, register_function, listener_function} self.path_to_this_widget = [] self.prepare_path_to_this_widget(self.children['root']) self.known_project_children.append(widget) widget.path_to_this_widget.append(widget.attributes['editor_varname']) print(widget.attributes['editor_varname']) code_nested = '' #the code strings to return if not hasattr(widget, 'attributes'): return '' #no nested code widgetVarName = widget.attributes['editor_varname'] newClass = widget.attributes['editor_newclass'] == 'True' classname = 'CLASS' + widgetVarName if newClass else widget.__class__.__name__ code_nested = prototypes.proto_widget_allocation % { 'varname': widgetVarName, 'classname': classname, 'editor_constructor': widget.attributes['editor_constructor'], 'editor_instance_id': widget.identifier } code_nested += prototypes.proto_attribute_setup % { 'varname': widgetVarName, 'attr_dict': ','.join('"%s":"%s"' % (key, widget.attributes[key]) for key in widget.attributes.keys() if key not in html_helper.htmlInternallyUsedTags) } code_nested += prototypes.proto_style_setup % { 'varname': widgetVarName, 'style_dict': ','.join('"%s":"%s"' % (key, widget.style[key]) for key in widget.style.keys()) } #for all the methods of this widget for (setOnEventListenerFuncname, setOnEventListenerFunc) in inspect.getmembers(widget): #if the member is decorated by decorate_set_on_listener if hasattr(setOnEventListenerFunc, '_event_info'): #if there is a callback if hasattr(getattr(widget, setOnEventListenerFuncname), 'callback_copy'): listenerFunction = getattr( widget, setOnEventListenerFuncname).callback_copy if type(listenerFunction) == gui.ClassEventConnector: listenerFunction = listenerFunction.event_method_bound listenerPrototype = setOnEventListenerFunc._event_info[ 'prototype'] listener = listenerFunction.__self__ listenerFunctionName = listenerFunction.__name__ #setOnEventListenerFunc._event_info['name'] + "_" + widget.attributes['editor_varname'] listenerClassFunction = prototypes.proto_code_function % { 'funcname': listenerFunctionName, 'parameters': listenerPrototype } #override, if already implemented, we use this code, unless it is a fakeListenerFunction if hasattr( listener, listenerFunctionName ) and listenerFunction.__name__ != editor_widgets.fakeListenerFunc.__name__: listenerClassFunction = inspect.getsource( listenerFunction) self.pending_listener_registration.append({ 'done': False, 'eventsource': widget, 'eventlistener': listener, 'setoneventfuncname': setOnEventListenerFuncname, 'listenerfuncname': listenerFunctionName, 'listenerClassFunction': listenerClassFunction }) if newClass: widgetVarName = 'self' children_code_nested = '' for child_key in widget.children.keys(): child = widget.children[child_key] if type(child) == str: #children_code_nested += prototypes.proto_layout_append%{'parentname':widgetVarName,'varname':"'%s'"%child} continue if 'editor_varname' not in child.attributes.keys(): continue child.path_to_this_widget = widget.path_to_this_widget[:] children_code_nested += self.repr_widget_for_editor(child) children_code_nested += prototypes.proto_layout_append % { 'parentname': widgetVarName, 'varname': "%s,'%s'" % (child.attributes['editor_varname'], child.attributes['editor_varname']) } children_code_nested += self.check_pending_listeners( widget, widgetVarName) if newClass: # and not (classname in self.code_declared_classes.keys()): if not widget.identifier in self.code_declared_classes: self.code_declared_classes[widget.identifier] = '' self.code_declared_classes[ widget.identifier] = prototypes.proto_code_class % { 'classname': classname, 'superclassname': widget.attributes['editor_baseclass'], 'nested_code': children_code_nested } + self.code_declared_classes[widget.identifier] else: code_nested = code_nested + children_code_nested return code_nested
def load_raw(name): log.debug(f'Loading template {name}') return getsource( getattr(__import__('ch2.uranus.template', fromlist=[name]), name))
def parse_nddl_class(python_object): py_class = python_object.__class__ source = getsource(py_class) print source
__name__, suppress_callback_exceptions=True, external_stylesheets=[dbc.themes.COSMO], ) server = app.server app_subdomain = os.getenv("APP_SUBDOMAIN", "dash-vtk-tutorials") pages = [ p.replace(".py", "") for p in sorted(os.listdir("demos")) if p not in ignored_pages and p.endswith(".py") ] print(pages) modules = {p: import_module(f"demos.{p}") for p in pages} apps = {p: m.app for p, m in modules.items()} source_codes = {p: getsource(m) for p, m in modules.items()} notfound_404 = html.Div([ html.H1("404"), "Webpage not found. Please contact us if a page is supposed to be here.", ]) app.layout = dbc.Container( children=[ dbc.Row( style={ "height": "10%", "align-items": "center" }, children=[ dbc.Col( [
def __init__(self, fieldset, ptype, pyfunc=None, funcname=None, funccode=None, py_ast=None, funcvars=None, c_include=""): self.fieldset = fieldset self.ptype = ptype self._lib = None # Derive meta information from pyfunc, if not given self.funcname = funcname or pyfunc.__name__ if pyfunc is AdvectionRK4_3D: warning = False if isinstance(fieldset.W, Field) and fieldset.W.creation_log != 'from_nemo' and \ fieldset.W._scaling_factor is not None and fieldset.W._scaling_factor > 0: warning = True if type(fieldset.W) in [SummedField, NestedField]: for f in fieldset.W: if f.creation_log != 'from_nemo' and f._scaling_factor is not None and f._scaling_factor > 0: warning = True if warning: logger.warning_once( 'Note that in AdvectionRK4_3D, vertical velocity is assumed positive towards increasing z.\n' ' If z increases downward and w is positive upward you can re-orient it downwards by setting fieldset.W.set_scaling_factor(-1.)' ) if funcvars is not None: self.funcvars = funcvars elif hasattr(pyfunc, '__code__'): self.funcvars = list(pyfunc.__code__.co_varnames) else: self.funcvars = None self.funccode = funccode or inspect.getsource(pyfunc.__code__) # Parse AST if it is not provided explicitly self.py_ast = py_ast or parse(fix_indentation(self.funccode)).body[0] if pyfunc is None: # Extract user context by inspecting the call stack stack = inspect.stack() try: user_ctx = stack[-1][0].f_globals user_ctx['math'] = globals()['math'] user_ctx['random'] = globals()['random'] user_ctx['ErrorCode'] = globals()['ErrorCode'] except: logger.warning( "Could not access user context when merging kernels") user_ctx = globals() finally: del stack # Remove cyclic references # Compile and generate Python function from AST py_mod = Module(body=[self.py_ast]) exec(compile(py_mod, "<ast>", "exec"), user_ctx) self.pyfunc = user_ctx[self.funcname] else: self.pyfunc = pyfunc if version_info[0] < 3: numkernelargs = len(inspect.getargspec(self.pyfunc).args) else: numkernelargs = len(inspect.getfullargspec(self.pyfunc).args) assert numkernelargs == 3, \ 'Since Parcels v2.0, kernels do only take 3 arguments: particle, fieldset, time !! AND !! Argument order in field interpolation is time, depth, lat, lon.' self.name = "%s%s" % (ptype.name, self.funcname) # Generate the kernel function and add the outer loop if self.ptype.uses_jit: kernelgen = KernelGenerator(fieldset, ptype) kernel_ccode = kernelgen.generate(deepcopy(self.py_ast), self.funcvars) self.field_args = kernelgen.field_args self.vector_field_args = kernelgen.vector_field_args fieldset = self.fieldset for f in self.vector_field_args.values(): Wname = f.W.ccode_name if f.W else 'not_defined' for sF_name, sF_component in zip( [f.U.ccode_name, f.V.ccode_name, Wname], ['U', 'V', 'W']): if sF_name not in self.field_args: if sF_name != 'not_defined': self.field_args[sF_name] = getattr(f, sF_component) self.const_args = kernelgen.const_args loopgen = LoopGenerator(fieldset, ptype) if path.isfile(c_include): with open(c_include, 'r') as f: c_include_str = f.read() else: c_include_str = c_include self.ccode = loopgen.generate(self.funcname, self.field_args, self.const_args, kernel_ccode, c_include_str) basename = path.join(get_cache_dir(), self._cache_key) self.src_file = "%s.c" % basename self.lib_file = "%s.%s" % (basename, 'dll' if platform == 'win32' else 'so') self.log_file = "%s.log" % basename