def assertIO(self, input, output): stdout, stdin = sys.stdout, sys.stdin sys.stdout, sys.stdin = StringIO(), StringIO(input) resolve() sys.stdout.seek(0) out = sys.stdout.read()[:-1] sys.stdout, sys.stdin = stdout, stdin self.assertEqual(out, output)
def main(): if "M" in options.debug: import pdb pdb.set_trace() if not options.filelist: options.parser.print_help() return if options.output == "-": fp = sys.stdout elif options.output: fp = open(options.output, "w") else: fp = None if fp: print_header(fp) nerrors = 0 for i, options.filename in enumerate(options.filelist): try: if options.verbose: print(i, options.filename) if not options.filename.endswith(".m"): print("\tIgnored: '%s' (unexpected file type)" % options.filename) continue if basename(options.filename) in options.xfiles: if options.verbose: print("\tExcluded: '%s'" % options.filename) continue buf = open(options.filename).read() buf = buf.replace("\r\n", "\n") # FIXME buf = buf.decode("ascii", errors="ignore") stmt_list = parse.parse(buf if buf[-1] == '\n' else buf + '\n') if not stmt_list: continue if not options.no_resolve: G = resolve.resolve(stmt_list) if not options.no_backend: s = backend.backend(stmt_list) if not options.output: f = splitext(basename(options.filename))[0] + ".py" with open(f, "w") as fp: print_header(fp) fp.write(s) else: fp.write(s) #fp.close() except KeyboardInterrupt: break except: nerrors += 1 traceback.print_exc(file=sys.stdout) if options.strict: break finally: pass if nerrors: print("Errors:", nerrors)
def sed(request): context = {'sr': 1/3600, 'log': []} if request.method == 'POST': # Form submission handling coords = request.POST.get('coords') sr = 1/3600 name,ra,dec = resolve(coords) for _ in ['coords', 'preview']: context[_] = request.POST.get(_) if not name: context['message'] = "Can't resolve position: " + coords else: context['name'] = name context['ra'] = ra context['dec'] = dec context['sr'] = sr work = sed_worker(ra, dec, sr) context['cats'] = work['cats'] context['log'] = work['log'] return TemplateResponse(request, 'sed.html', context=context)
def compile(self, rdql): from lex import Lexer from yacc import Parser self.ast = Parser().parse(rdql, lexer=Lexer()) self.ast = resolve(self.ast) select, count, self.ast = generate(self.ast) return select, count
def Init(parameters): Log.node_name = parameters['NODENAME'] Log.address = (resolve(parameters['LOGGER'])[0], 8083) # # Open the socket to the logging endpoint # Log.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def callgraph(func_list): """ Build callgraph of func_list, ignoring built-in functions """ G = nx.DiGraph() for func in func_list: G.add_node(func.head.ident.name) for func in func_list: assert isinstance(func,node.function) func_name = func.head.ident.name resolve.resolve(func) for s in node.postorder(func): if (s.__class__ is node.funcall and s.func_expr.__class__ is node.ident and s.func_expr.name in G.nodes()): G.add_edge(func_name,s.func_expr.name) return G
def callgraph(func_list): """ Build callgraph of func_list, ignoring built-in functions """ G = nx.DiGraph() for func in func_list: G.add_node(func.head.ident.name) for func in func_list: assert isinstance(func, node.function) func_name = func.head.ident.name resolve.resolve(func) for s in node.postorder(func): if (s.__class__ is node.funcall and s.func_expr.__class__ is node.ident and s.func_expr.name in G.nodes()): G.add_edge(func_name, s.func_expr.name) return G
def main(): tar = None if not options.filelist: options.parser.print_help() return if (len(options.filelist) == 1 and options.filelist[0].endswith(".tar")): tar = tarfile.open(options.filelist[0]) options.filelist = tar.getnames() if options.output == "-": fp = sys.stdout elif options.output: fp = open(options.output, "w") else: assert not options.output filename, filetype = splitext(basename(options.filelist[0])) options.output = filename + ".py" fp = open(options.output, "w") print >> fp, "# Autogenerated with SMOP " + version.__version__ #print >> fp, "from __future__ import division" print >> fp, "from smop.core import *" if options.link: print >> fp, "from %s import *" % options.link print >> fp, "#", options.filename for i, options.filename in enumerate(options.filelist): try: if not options.filename.endswith((".m")): if options.verbose: print "\tIgnored: '%s' (unexpected file type)" % options.filename continue if os.path.basename(options.filename) in options.xfiles: print "\tExcluded: '%s'" % options.filename continue if options.verbose: print options.filename if tar: buf = tar.extractfile(options.filename).read() else: buf = open(options.filename).read() buf = buf.replace("\r\n", "\n") buf = buf.decode("ascii", errors="ignore") stmt_list = parse.parse(buf if buf[-1] == '\n' else buf + '\n') #assert None not in stmt_list if not stmt_list: return if not options.no_resolve: G = resolve.resolve(stmt_list) if not options.no_backend: s = backend.backend(stmt_list) print >> fp, s except Exception as e: print "\tFailed: ", options.filename if not options.ignore: raise options.ignore -= 1
def main(): if len(sys.argv) != 2: print("Usage: " + os.path.basename(__file__) + " path/to/library") exit(1) path = os.path.abspath(__file__) os.chdir(os.path.dirname(path)) path_to_library = os.path.abspath(sys.argv[1]) resolved_content = resolve.resolve(path_to_library) return_code = copy_to_clipboard(resolved_content) exit(return_code)
def secondscale_map(request, ra=0, dec=0, sr=0.01, size=600): if request.method == 'POST': coords = request.POST.get('coords') sr = float(request.POST.get('sr')) name, ra, dec = resolve(coords) return redirect('/secondscale/map/coords/%g/%g/%g' % (ra, dec, sr)) if ra and dec: ra = float(ra) dec = float(dec) sr = float(sr) if sr else 0.01 records = Records.objects.extra(where=[ "q3c_radial_query(ra, dec, %s, %s, %s)" ], params=(ra, dec, sr)).order_by('time') x = [] y = [] mag = [] f = [] for r in records: x.append(r.ra) y.append(r.dec) mag.append(r.mag) f.append(favor2_filters.get(r.filter.id)) dpi = 72 figsize = (size / dpi, size / dpi) fig = Figure(facecolor='white', figsize=figsize) ax = fig.add_subplot(111) formatter = ScalarFormatter(useOffset=False) ax.xaxis.set_major_formatter(formatter) ax.yaxis.set_major_formatter(formatter) ax.scatter(x, y, c=f) ax.set_xlabel("RA") ax.set_ylabel("Dec") ax.set_title("Map at (%g, %g), radius %g" % (ra, dec, sr)) # 10% margins on both axes ax.margins(0.1, 0.1) canvas = FigureCanvas(fig) response = HttpResponse(content_type='image/png') canvas.print_png(response) return response
def secondscale_info(request, ra=0, dec=0, sr=0.01, size=1000): if request.method == 'POST': coords = request.POST.get('coords') sr = request.POST.get('sr') sr = float(sr) if sr else 0.01 name, ra, dec = resolve(coords) return redirect('/secondscale/coords/%g/%g/%g' % (ra, dec, sr)) return TemplateResponse(request, 'secondscale_info.html', context={ 'ra': ra, 'dec': dec, 'sr': sr })
def survey_info(request, ra=0, dec=0, sr=0.1, filter=True, imtype='survey'): if request.method == 'POST': coords = request.POST.get('coords') sr = request.POST.get('sr') sr = float(sr) if sr else 0.01 name, ra, dec = resolve(coords) return redirect('/survey/%g/%g/%g' % (ra, dec, sr)) if request.GET.has_key('type'): imtype = request.GET.get('type', 'survey') context = {} context['ra'] = ra context['dec'] = dec context['sr'] = sr # TODO: expand the search area a bit and filter images containing the point images = Images.objects.extra( where=["q3c_radial_query(ra0, dec0, %s, %s, %s)"], params=(ra, dec, 7.0), select={ 'filter_string': 'get_filter_name(filter)' }).filter(type=imtype).order_by('-time') if request.method == 'GET' and request.GET.get('channel_id'): images = images.filter(channel_id=int(request.GET.get('channel_id'))) if request.method == 'GET': filter = int(request.GET.get('filter', 1)) if filter and filter == 1: images = [ i for i in images if check_pos(i.keywords, float(ra), float(dec)) ] if filter and filter == 2: images = images.extra(where=[ "id IN (SELECT DISTINCT frame FROM records2 WHERE q3c_radial_query(ra, dec, %s, %s, %s))" ], params=(ra, dec, 0.01)) context['images'] = images return TemplateResponse(request, 'survey_info.html', context=context)
def lookup(nodes, interfaces): # join nodes to interfaces node_to_ips = {} missing = [] for name, n in nodes.iteritems(): ips = [] for i in n['interface_ids']: ip = interfaces[i]['ip'] if ip: ips.append(ip) if ips: node_to_ips[name] = ips else: missing.append(name) # if IP is not in PLCAPI, do the lookup ourselves if len(missing) > 0: node_to_ips.update(resolve.resolve(missing)) return node_to_ips
def __init__(self, node, state, src_port_index, dst_node, dst_port_index, dst_uid): dst_port = self.PORT_OFFSET + dst_port_index if state == 'Enabled' else self.PORT_DEAD dst_addr, _ = resolve(dst_node) src_addr, _ = node.env['profile']['address'].split(':') self.node = node self.state = state self.index = src_port_index self.remote_port = 0 self.remote_uid = 0 self.training_state = 0 self.lock = Lock() self.remote_addr = (dst_addr, dst_port) self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.address = socket.gethostbyname(src_addr) self.socket.bind((self.address, src_port_index + self.PORT_OFFSET))
def uv_only(request, size=512): context = {'sr': 0.1, 'log': []} if request.method == 'POST': # Form submission handling coords = request.POST.get('coords') sr = float(request.POST.get('sr', 0.1)) name,ra,dec = resolve(coords) for _ in ['coords', 'preview', 'nuv', 'fuv']: context[_] = request.POST.get(_) if not name: context['message'] = "Can't resolve position: " + coords else: context['name'] = name context['ra'] = ra context['dec'] = dec context['sr'] = float(sr) if sr else 0.1 work = uv_only_worker(ra, dec, sr) context['cats'] = work['cats'] if work['galex']: table = work['galex']['table'] table = table[work['nidx']] if request.POST.get('nuv'): table = table[table['NUV'] < float(request.POST.get('nuv'))] if request.POST.get('fuv'): table = table[table['FUV'] < float(request.POST.get('fuv'))] context['table'] = table context['has_sdss'] = 'sdss' in [_['name'] for _ in work['cats']] context['log'] = work['log'] return TemplateResponse(request, 'uv-only.html', context=context)
def survey_photometry(request, imtype='survey'): context = {} ra, dec, sr = None, None, 0.01 if request.method == 'POST': coords = request.POST.get('coords') sr = request.POST.get('sr') sr = float(sr) if sr else 0.01 context['coords'] = coords name, ra, dec = resolve(coords) context['name'] = name if request.POST.get('bv'): context['bv'] = request.POST.get('bv') if request.POST.get('vr'): context['vr'] = request.POST.get('vr') if request.POST.get('period'): context['period'] = request.POST.get('period') context['ra'] = ra context['dec'] = dec context['sr'] = sr if ra is not None and dec is not None: record = Records.objects.extra( where=["q3c_radial_query(ra, dec, %s, %s, %s)"], params=(ra, dec, sr)).filter(filter=0).order_by('quality').first() if record: context['best_image'] = record.frame.id return TemplateResponse(request, 'survey_photometry.html', context=context)
try: symtab = {} for func_obj in func_list: try: func_name = func_obj.head.ident.name symtab[func_name] = func_obj print "\t",func_name except AttributeError: if verbose: print "\tJunk ignored" if strict: return continue if options.do_resolve: resolve.resolve(func_obj) if options.do_typeof: for func_obj in func_list: t = func_obj.apply([],symtab) if options.do_rewrite: for func_obj in func_list: rewrite.rewrite(func_obj) for func_obj in func_list: s = backend.backend(func_obj) print >> fp, s except Exception as ex: print repr(ex) if strict:
print "... t=123" print "!exec foo(3)" continue if buf[0] == '!': try: exec buf[1:] except: print "eh?" continue t = parse.parse(buf if buf[-1] == '\n' else buf + '\n') if not t: continue except EOFError: return resolve.resolve(t, symtab) _ = backend.backend(t) print _ if not output: output = "a.py" fp = open(output, "w") if output != "-" else sys.stdout print >> fp, "# Autogenerated with SMOP version " + __version__ print >> fp, "# " + " ".join(sys.argv) print >> fp, "from __future__ import division" print >> fp, "from runtime import *" for pattern in args: for filename in glob.glob(os.path.expanduser(pattern)): if not filename.endswith(".m"): print "\tIngored file: '%s'" % filename
def Resolve(tree, GeneToSpecies): StoreSpeciesSets(tree, GeneToSpecies) for n in tree.traverse("postorder"): tree = resolve.resolve(n, GeneToSpecies) return tree
def secondscale_lightcurve(request, ra=0, dec=0, sr=0.01, size=1000, type='mag'): type = type or 'mag' if request.method == 'POST': coords = request.POST.get('coords') sr = float(request.POST.get('sr')) name, ra, dec = resolve(coords) return redirect('/secondscale/lightcurve/coords/%g/%g/%g' % (ra, dec, sr)) if ra and dec: ra = float(ra) dec = float(dec) sr = float(sr) if sr else 0.01 records = Records.objects.extra(where=[ "q3c_radial_query(ra, dec, %s, %s, %s)" ], params=(ra, dec, sr)).order_by('time') x = [] y = [] y1 = [] y2 = [] f = [] for r in records: x.append(r.time) if type == 'mag': y.append(r.mag) y1.append(r.mag - r.mag_err) y2.append(r.mag + r.mag_err) else: y.append(r.flux) y1.append(r.flux - r.flux_err) y2.append(r.flux + r.flux_err) f.append(favor2_filters.get(r.filter.id)) dpi = 72 figsize = (size / dpi, 600 / dpi) fig = Figure(facecolor='white', figsize=figsize) ax = fig.add_subplot(111) ax.autoscale() #ax.plot(x, y, ".") ax.scatter(x, y, c=f, marker='.', edgecolors=f) ax.vlines(x, y1, y2, colors=f) if records: # It is failing if no points are plotted ax.xaxis.set_major_formatter(DateFormatter('%Y.%m.%d %H:%M:%S')) ax.set_xlabel("Time, UT") if type == 'mag': ax.set_ylabel("Magnitude") ax.invert_yaxis() else: ax.set_ylabel("Flux, ADU") ax.set_title("Lightcurve at (%g, %g), radius %g" % (ra, dec, sr)) fig.autofmt_xdate() # 10% margins on both axes ax.margins(0.1, 0.1) canvas = FigureCanvas(fig) response = HttpResponse(content_type='image/png') canvas.print_png(response) return response
selecao += 9 elif key == 'a': if (selecao - 1) >= 1: selecao -= 1 elif key == 'd': if (selecao + 1) <= 81: selecao += 1 elif key in ['0','1','2','3','4','5','6','7','8','9','.']: jogo = tratador(jogo,key, lg, lr, cg, cr) elif key == 'n': from novo_jogo import novo_jogo jogo,solucao = novo_jogo(jogo,solucao) elif key == 'z': from salva_jogo import salva_jogo salva_jogo(jogo, solucao) elif key == 'f': from fecha_jogo import fecha_jogo resposta = fecha_jogo() if resposta in ['s', 'S']: break elif key == 'x': from resolve import resolve jogo, solucao = resolve(jogo, solucao) lg, lr, cg, cr = desenha_sudoku(window1, jogo, selecao) key = window1.getch() curses.endwin()
continue print('Instrumentation finished') print('Profiling: %s' % full_path) output = None lib = get_lib(full_path) if not os.path.exists(d8_path): print("Wrong d8_path. Set correct d8_path.") exit() cmd = [d8_path] if len(lib) > 0: cmd.append(lib) cmd.append(ins_path) output = None try: output = subprocess.check_output(cmd, timeout=30) except subprocess.TimeoutExpired as e: output = e.output except subprocess.CalledProcessError as e: output = e.output print('Profiling finished') records = resolve.resolve(output) if len(records) > 0: with open(type_path, 'wb') as f: f.write(json.dumps(records).encode()) else: print("Wrong corpus directory path") FNULL.close()
def getDetails(): if request.method == "POST": domain = request.form['domain'] s = resolve.resolve(domain) # print(s) return s
def pscheduler(): return resolve('pyramid_scheduler.pscheduler:main')()
def proxy(argv=sys.argv): pcmd = argv[0][argv[0].rindex('i-') + 2:] return resolve('pyramid.scripts.' + pcmd + ':main')(argv=argv)
def images_list(request, night='', type='all', ra='', dec='', sr='5.0', channel=0): if request.method == 'POST': coords = request.POST.get('coords') sr = float(request.POST.get('sr')) type = request.POST.get('type') name, ra, dec = resolve(coords) if not type or type == 'all': return redirect('/images/coords/%g/%g/%g' % (ra, dec, sr)) else: return redirect('/images/coords/%g/%g/%g/type/%s' % (ra, dec, sr, type)) if ra and dec: # Positional query ra = float(ra) dec = float(dec) sr = float(sr) if sr else 5.0 images = Images.objects.extra( select={ 'filter_string': 'get_filter_name(filter)' }, where=["q3c_radial_query(ra0, dec0, %s, %s, %s)"], params=(ra, dec, sr)).order_by('-time') #images = Images.objects.raw("SELECT *, get_filter_name(filter) AS filter_string FROM images WHERE q3c_radial_query(ra0, dec0, %s, %s, %s) ORDER BY time DESC", (ra, dec, sr)) query_type = 'positional' elif night: # Images for given night images = Images.objects.extra( select={ 'filter_string': 'get_filter_name(filter)' }).order_by('-time').filter(night=night) query_type = 'night' else: images = Images.objects.extra( select={ 'filter_string': 'get_filter_name(filter)' }).order_by('-time') query_type = 'all' if type and type != 'all': images = images.filter(type=type) else: type = 'all' if request.method == 'GET': if request.GET.get('channel'): images = images.filter(channel_id=int(request.GET.get('channel'))) if request.GET.get('filter'): images = images.filter(filter__name=request.GET.get('filter')) types = [ i[0] for i in db_query("SELECT fast_distinct(%s, %s)", ('images', 'type'), simplify=False) ] types.append('all') channels = [str(x) for x in range(1, settings.NCHANNELS + 1)] return TemplateResponse(request, 'images.html', context={ 'images': images, 'night': night, 'type': type, 'types': types, 'query_type': query_type, 'channels': channels, 'ra': ra, 'dec': dec, 'sr': sr })
def scheduler_target_view(request, id=0): target = SchedulerTargets.objects.get(id=id) context = {'target': target} if request.method == 'POST': action = request.POST.get('action') if action == 'disable' and target.status.name == 'active': target.status = SchedulerTargetStatus.objects.get(name='inactive') target.save() if action == 'enable' and target.status.name == 'inactive': target.status = SchedulerTargetStatus.objects.get(name='active') target.save() if action == 'complete' and target.status.name != 'archived': target.status = SchedulerTargetStatus.objects.get(name='completed') target.save() if action == 'restart': target.status = SchedulerTargetStatus.objects.get(name='active') target.save() if action == 'delete': target.delete() if action == 'update': name = request.POST.get('new-name') coords = request.POST.get('new-coords') type = request.POST.get('new-type') filter = request.POST.get('new-filter') # Parse exposure try: exposure = float(request.POST.get('new-exposure')) exposure = max(0.1, min(exposure, 10000.0)) except: exposure = 10.0 # Repeat try: repeat = int(request.POST.get('new-repeat')) repeat = max(1, min(repeat, 1000)) except: repeat = 1 # Remove whitespaces name = '_'.join(name.split()) # Parse coordinates sname, ra, dec = resolve(coords) if not sname: message = "Can't resolve object: %s" % (coords) elif not name: message = "Empty target name" else: target.name = name target.type = type target.filter = filter target.exposure = exposure target.repeat = repeat target.ra = ra target.dec = dec target.save() return redirect('scheduler_target', id) uuid = target.uuid images = Images.objects.raw( "select *,get_filter_name(filter) as filter_string from images where keywords->'TARGET UUID'='%s' and type != 'avg' order by time desc" % (uuid)) nimages = len(list(images)) context['images'] = images context['nimages'] = nimages return TemplateResponse(request, 'scheduler_target.html', context=context)
def alembic(): return resolve('alembic.config:main')()
string += '\n\t\t],\n' for k in self.set_required_pre: string += '\t\t"%s": [\n' %(dico[k + "Adjust"]) #ici for c in self.alphabet: fmt = '\t\t\t{ "read" : "%s", "to_state": "%s", "write": "%s", "action": "%s"},\n' string += fmt %(c, dico[k], c, 'RIGHT') #ici string = string[:-2] string += '\n\t\t],\n' string = string[:-2] string += '\n\t}\n' string += "}\n" with open("lol.json", "w") as stream: stream.write(string) # print string if __name__ == "__main__": tk = get_tokens() p = Prog(tk) print p.name print p.alphabet print p.blank print p.set_lb for st in p.lst_st: print str(st) resolve(p) p.tojson()
import json import requests from resolve import resolve from jsonfind import find_and_modify if __name__ == '__main__': response = requests.get('http://localhost:5000') body = response.json() print( f'response body: {json.dumps(body["container"]["params"], indent=4)}') for resolver in body['resolvers']: value = resolve(resolver, resolver['target']['type']) find_and_modify(resolver['target']['query'], body['container']['params'], value) print( f'resolved body: {json.dumps(body["container"]["params"], indent=4)}')
#建立数据库中的表单 def create_mysql(): key = [u'中文名称'] url = item_url + '1' soup = BeautifulSoup(spider(url)) table = soup.html.body.findChildren('table') tr = table[2].findChildren('tr') for i in range(len(tr)): td = tr[i].findChildren('td') key.append(td[0].contents[0].split('\n')[0]) sql_create = """create table tcmcenter.tcm( tcmid int not null auto_increment,""" for i in range(len(key) - 1): sql_create = sql_create + key[i] + ' text null,' sql_create = sql_create + key[-1] + " text null,primary key(tcmid));" #print sql_create cur.execute(sql_create) #设置成MyISAM的形式,但是创建的时候默认是InnoDB,这么做的主要目的是使用InnoDB时不允许row size太大 sql_alter = """alter table tcmcenter.tcm engine=MyISAM""" cur.execute(sql_alter) conn.commit() if __name__ == '__main__': create_mysql() for i in range(100): url = item_url + str(i + 1) resolve(spider(url)) cur.close() conn.close()
def main(): tar = None if "M" in options.debug: import pdb pdb.set_trace() if not options.filelist: if options.archive: tar = tarfile.open(options.archive) options.filelist = tar.getnames() elif options.code: tmp = tempfile.NamedTemporaryFile(suffix=".m") tmp.file.write(options.code) tmp.file.flush() options.filelist = [tmp.name] if options.output: print "Conflicting options -c and -o" return options.output = "-" else: options.parser.print_help() return if options.output == "-": fp = sys.stdout elif options.output: fp = open(options.output, "w") else: fp = None if fp: print_header(fp) if options.glob_pattern: options.filelist = fnmatch.filter(options.filelist, options.glob_pattern) nerrors = 0 for i, options.filename in enumerate(options.filelist): try: if options.verbose: print i, options.filename if not options.filename.endswith(".m"): if options.verbose: print("\tIgnored: '%s' (unexpected file type)" % options.filename) continue if basename(options.filename) in options.xfiles: if options.verbose: print "\tExcluded: '%s'" % options.filename continue if tar: buf = tar.extractfile(options.filename).read() else: buf = open(options.filename).read() buf = buf.replace("\r\n", "\n") buf = buf.decode("ascii", errors="ignore") stmt_list = parse.parse(buf if buf[-1] == '\n' else buf + '\n') #assert None not in stmt_list if not stmt_list: continue if not options.no_resolve: G = resolve.resolve(stmt_list) if not options.no_backend: s = backend.backend(stmt_list) if not options.output: f = splitext(basename(options.filename))[0] + ".py" with open(f, "w") as fp: print_header(fp) fp.write(s) try: py_compile.compile(f,doraise=True) if options.execfile: execfile(f) except: if options.delete-on-error: os.unlink(f) if options.verbose: print "Removed",f raise else: fp.write(s) except KeyboardInterrupt: break except: nerrors += 1 traceback.print_exc(file=sys.stdout) if options.strict: break finally: print "Errors:", nerrors
buf = buf[:-1] + "\n" + raw_input("... ") if buf[0] == '?': print main.__doc__ continue if buf[0] == "!": try: exec buf[1:] except Exception as ex: print ex continue t = parse.parse(buf if buf[-1]=='\n' else buf+'\n') if not t: continue print "t=", repr(t) print 60*"-" resolve.resolve(t,symtab) print "t=", repr(t) print 60*"-" print "symtab:",symtab s = backend.backend(t) print "python:",s.strip() try: print eval(s) except SyntaxError: exec s except EOFError: return except Exception as ex: print ex if not output:
string = string[:-2] string += '\n\t\t],\n' for k in self.set_required_pre: string += '\t\t"%s": [\n' % (dico[k + "Adjust"]) #ici for c in self.alphabet: fmt = '\t\t\t{ "read" : "%s", "to_state": "%s", "write": "%s", "action": "%s"},\n' string += fmt % (c, dico[k], c, 'RIGHT') #ici string = string[:-2] string += '\n\t\t],\n' string = string[:-2] string += '\n\t}\n' string += "}\n" with open("lol.json", "w") as stream: stream.write(string) # print string if __name__ == "__main__": tk = get_tokens() p = Prog(tk) print p.name print p.alphabet print p.blank print p.set_lb for st in p.lst_st: print str(st) resolve(p) p.tojson()
def start(self): if not os.path.exists("/var/log/cronus"): os.makedirs("/var/log/cronus") logger_error = logging.getLogger("error_log") eh = logging.FileHandler("/var/log/cronus/error.log") eh.setLevel(logging.DEBUG) logger_error.addHandler(eh) logger_activity = logging.getLogger("activity_log") ah = logging.FileHandler("/var/log/cronus/activity.log") ah.setLevel(logging.DEBUG) logger_activity.addHandler(ah) while 1: conn = MySQLdb.connect(host=config.dbhost, user=config.dbuser, passwd=config.dbpasswd, db=config.dbname) cursor = conn.cursor() # retrieve new jobs cursor.execute("SELECT * from job_queue WHERE status = 0") conn.commit() conn.close() job_rows = cursor.fetchall() if len(job_rows) == 0: print "No jobs" else: # seperate jobs that should be executed and discarded job_rows, discarded_rows = resolve.resolve(job_rows) print "No of resolved rows", len(job_rows) print "No if discarded rows", len(discarded_rows) # set the executable job's status to 1 and invokes JobHandler. # updates the job in the db to prevent QueueHandler picking the job again for job_row in job_rows: job_id = job_row[0] status = 1 deployment_sets = self.get_deployment_sets(job_id) job = Job(job_id, status, deployment_sets) conn = MySQLdb.connect( host=config.dbhost, user=config.dbuser, passwd=config.dbpasswd, db=config.dbname ) cursor = conn.cursor() cursor.execute("UPDATE job_queue SET status = 1 WHERE id = " + str(job_id)) conn.commit() conn.close() jobhandler = JobHandler(job) jobhandler.start() # set the discarded job's status to 4 and invokes JobHandler # updates the job in the db to prevent QueueHandler picking the job again for job_row in discarded_rows: job_id = job_row[0] status = 4 deployment_sets = self.get_deployment_sets(job_id) job = Job(job_id, status, deployment_sets) conn = MySQLdb.connect( host=config.dbhost, user=config.dbuser, passwd=config.dbpasswd, db=config.dbname ) cursor = conn.cursor() cursor.execute("UPDATE job_queue SET status = 1 WHERE id = " + str(job_id)) conn.commit() conn.close() jobhandler = JobHandler(job) jobhandler.start() time.sleep(60)
def main(): if "M" in options.debug: import pdb pdb.set_trace() if not options.filelist: options.parser.print_help() return if options.output == "-": fp = sys.stdout elif options.output: fp = open(options.output, "w") else: fp = None nerrors = 0 for i, options.filename in enumerate(options.filelist): try: if options.verbose: print(i, options.filename) if not options.filename.endswith(".m"): print("\tIgnored: '%s' (unexpected file type)" % options.filename) continue if basename(options.filename) in options.xfiles: if options.verbose: print("\tExcluded: '%s'" % options.filename) continue buf = open(options.filename).read() buf = buf.replace("\r\n", "\n") stmt_list = parse.parse(buf if buf[-1] == '\n' else buf + '\n') if not stmt_list: continue if not options.no_resolve: G = resolve.resolve(stmt_list) graph_list = [] for n in G.nodes(): temp = str.split(n, '_') while len(temp) > 3: temp[0] += '_' + temp.pop(1) graph_list.append(temp + [G.node[n]["ident"].props]) resolve_array_refs(stmt_list, graph_list) #print_list(stmt_list) if not options.no_backend: s = backend.backend(stmt_list).strip() if not options.output: f = splitext(basename(options.filename))[0] + ".py" with open(f, "w") as fp: print_header(fp, s) fp.write(s) else: print_header(fp, s) fp.write(s) except KeyboardInterrupt: break except: nerrors += 1 traceback.print_exc(file=sys.stdout) if options.strict: break finally: pass if nerrors: print("Errors:", nerrors)
def main(): #args = options.parser.parse_args() #for key in dir(args): # if not key.startswith("_"): # setattr(options,key,getattr(args,key)) if not options.filelist: options.parser.print_help() return #assert options.filelist #xfiles = options.exclude.split(",") if options.exclude else [] if not options.output: options.output = "a.py" fp = open(options.output,"w") if options.output != "-" else sys.stdout print >> fp, "# Autogenerated with SMOP "# + options.version # for key in dir(options.args): # if not key.startswith("_"): # value = getattr(options,key) # print >> fp, '# %s=%s' % (key,value) print >> fp, "from __future__ import division" print >> fp, "from core import *" #if options.callgraph: # C = nx.DiGraph() for options.filename in options.filelist: try: #for options.filename in glob.glob(os.path.expanduser(pattern)): print >> fp, "#", options.filename if not options.filename.endswith((".m",".tst")): print "\tIgnored file: '%s'" % options.filename continue if os.path.basename(options.filename) in options.xfiles: print "\tExcluded file: '%s'" % options.filename continue if options.verbose: print options.filename buf = open(options.filename).read().replace("\r\n","\n") stmt_list=parse.parse(buf if buf[-1]=='\n' else buf+'\n') #assert None not in stmt_list if not stmt_list and options.strict: return if options.enumerate: for i,stmt_obj in enumerate(stmt_list): #stmt_class = stmt_obj.__class__.__name__ print i, stmt_obj # if i == options.debug_index: # import pdb ; pdb.set_trace() # if stmt_class == "func_stmt": # func_name = stmt_obj.ident.name # if options.verbose: # print "\t",func_name # else: # func_name = "" if not options.no_resolve: G = resolve.resolve(stmt_list) if not options.no_backend: s = backend.backend(stmt_list) print >> fp, s except Exception as e: print e if options.strict: raise
def scheduler_targets_list(request, type=None): message = None inactive = False active = True completed = True archived = False failed = False if request.method == 'POST': message = "" ids = [int(id) for id in request.POST.getlist('target_ids[]')] action = request.POST.get('action') # message += request.POST.__str__() # if message: # message += "<br>" if action == 'new': name = request.POST.get('new-name') coords = request.POST.get('new-coords') type = request.POST.get('new-type') filter = request.POST.get('new-filter') # Parse exposure try: exposure = float(request.POST.get('new-exposure')) exposure = max(0.1, min(exposure, 10000.0)) except: exposure = 10.0 # Repeat try: repeat = int(request.POST.get('new-repeat')) repeat = max(1, min(repeat, 1000)) except: repeat = 1 # Remove whitespaces name = '_'.join(name.split()) # Parse coordinates sname, ra, dec = resolve(coords) if not sname: message = "Can't resolve object: %s" % (coords) elif not name: message = "Empty target name" else: id = db_query( "INSERT INTO scheduler_targets (name, type, ra, dec, exposure, repeat, filter, status, time_created)" + " VALUES (%s, %s, %s, %s, %s, %s, %s, get_scheduler_target_status_id('active'), favor2_timestamp()) RETURNING id", (name, type, ra, dec, exposure, repeat, filter)) message = "Target %d created: Name: %s RA: %g Dec: %g Type: %s Filter: %s Exp: %g Repeat: %d" % ( id, name, ra, dec, type, filter, exposure, repeat) elif len(ids): for id in ids: target = SchedulerTargets.objects.get(id=id) if action == 'disable' and target.status.name == 'active': target.status = SchedulerTargetStatus.objects.get( name='inactive') target.save() message += "Target %d disabled<br>" % id if action == 'enable' and target.status.name == 'inactive': target.status = SchedulerTargetStatus.objects.get( name='active') target.save() message += "Target %d enabled<br>" % id if action == 'complete' and target.status.name != 'archived': target.status = SchedulerTargetStatus.objects.get( name='completed') target.save() message += "Target %d completed<br>" % id if action == 'restart': target.status = SchedulerTargetStatus.objects.get( name='active') target.save() message += "Target %d restarted<br>" % id if action == 'delete': target.delete() message += "Target %d deleted<br>" % id else: message = "No targets selected" # Redirect to the same view, but with no POST args. We can't display messages with it! return redirect('scheduler_targets') elif request.method == 'GET': active = int(request.GET.get('active', active)) inactive = int(request.GET.get('inactive', inactive)) completed = int(request.GET.get('completed', completed)) failed = int(request.GET.get('failed', failed)) type = request.GET.get('type', type) targets = SchedulerTargets.objects.order_by('-time_created') if not active: targets = targets.exclude(status__name='active') if not inactive: targets = targets.exclude(status__name='inactive') if not completed: targets = targets.exclude(status__name='completed') if not archived: targets = targets.exclude(status__name='archived') if not failed: targets = targets.exclude(status__name='failed') if type and type != 'all': targets = targets.filter(type=type) else: type = 'all' context = { 'targets': targets, 'message': message, 'type': type, 'active': active, 'inactive': inactive, 'completed': completed, 'archived': archived, 'failed': failed } context['types'] = [ _['type'] for _ in SchedulerTargets.objects.distinct('type').values('type') ] context['types'].append('all') return TemplateResponse(request, 'scheduler_targets.html', context=context)
args = parser.parse_args() #print("The arguments given are: ") #print("Genome is",args.genome_file) #print("VCF File is", args.vcf_file) #print("MEI Annotation file is", args.annot_file) #print("Output File is", args.output_file)7 vcf_new = '' if args.vcf_folder is None: if args.output_file is None: output = args.vcf_file + "_v2" vcf_new = open(output, "w") else: output = args.output_file vcf_new = open(output, "w") if args.resolve: if args.annot_file is None or args.genome_file is None: sys.exit('Please run with correct genome and annotation file') else: mei_annot_file = args.annot_file genome = args.genome_file rslv.resolve(genome, args.vcf_file, vcf_new, mei_annot_file, args.precise, args.vcf_folder, args.verbose) elif args.mendelian: print( "Running mendelian filter (Make sure that your VCF has the required columns for the trio and son must be at the last column)" ) mf.flt(args.vcf_file, vcf_new)