async def _ping(self, ctx): await ctx.cd() # websocket ping (pretty easy since discord.py has it built-in) websocket = util.prec_duration_strf(_ws := ctx.bot.latency) with util.Timer() as big_timer: # database ping (select 1) _start = time.perf_counter() with util.Timer() as timer: await ctx.bot.db.fetch("select 1") database_s = util.prec_duration_strf(timer.time) # database ping (select *) with util.Timer() as timer: await ctx.bot.db.fetch("select * from users") database = util.prec_duration_strf(timer.time) # typing ping with util.Timer() as timer: original = await ctx.send(f"{core.LOADING} Pinging...") _typing = util.prec_duration_strf(timer.time) # round trip round_trip = util.prec_duration_strf(big_timer.time + _ws) # format embed embed = discord.Embed(color=core.COLOR) embed.description = "Don't worry about what's below unless you really care." embed.set_author(name="Latency", icon_url=ctx.bot.avatar) embed.timestamp = ctx.now embed.add_field(name="Websocket", value=websocket) embed.add_field(name="", value="") # blank field embed.add_field(name="Typing", value=_typing) embed.add_field(name="Database (SELECT 1)", value=database_s) embed.add_field(name="", value="") # blank field embed.add_field(name="Database (SELECT *)", value=database) embed.add_field(name="Round Trip", value=round_trip) await ctx.maybe_edit(original, "", embed=embed, allowed_mentions=discord.AllowedMentions.none())
async def _reload(self, ctx, *extensions): if len(extensions) <= 0: extensions = list(ctx.bot.extensions.keys()) _pointer = extensions[0] if len( extensions) == 1 else f"{len(extensions)} cogs" _ = await ctx.send(f"{core.LOADING} Reloading {_pointer}...") _successful = [] _errors = {} with util.Timer() as timer: for extension in extensions: try: ctx.bot.reload_extension(extension) except Exception as e: _errors[extension] = e else: _successful.append(extension) _time = util.prec_duration_strf(timer.time) _content: str _pointer = _successful[0] if len( _successful) == 1 else f"{len(_successful)} cogs" if _successful: _content = f"{core.CHECK} `[{_time}]` Successfully reloaded {_pointer}." else: _content = f"⚠ `[{_time}]` All cogs raised errors:" for ext, error in _errors.items(): _content += f"\n**{ext}:** {error}" await ctx.maybe_edit(_, content=_content, allowed_mentions=discord.AllowedMentions.none())
async def _database_exec(self, ctx, *, query): with util.Timer() as timer: _query = await ctx.bot.db.pool.execute(util.strip_codeblocks(query) ) query_time = util.prec_duration_strf(timer.time) await ctx.send(f"Query time: {query_time}\n```sql\n{_query}```")
def exec_lemul(args): from compiler import lsim path_handler = paths.PathHandler(args.subset, args.program) program = DSProgDB.get_prog(args.program) timer = util.Timer('emul', path_handler) if args.unscaled: direc = path_handler.lgraph_adp_dir() else: direc = path_handler.lscale_adp_dir() board = get_device(None) for dirname, subdirlist, filelist in \ os.walk(direc): for adp_file in filelist: if adp_file.endswith('.adp'): with open(dirname + "/" + adp_file, 'r') as fh: print("===== %s =====" % (adp_file)) adp = ADP.from_json(board, \ json.loads(fh.read())) if args.unscaled: for cfg in adp.configs: cfg.modes = [cfg.modes[0]] plot_file = path_handler.adp_sim_plot( paths.PlotType.SIMULATION, \ adp.metadata[ADPMetadata.Keys.DSNAME], adp.metadata[ADPMetadata.Keys.LGRAPH_ID], 'na', 'na', 'na', \ per_variable=args.separate_figures) else: plot_file = path_handler.adp_sim_plot( paths.PlotType.SIMULATION, \ adp.metadata[ADPMetadata.Keys.DSNAME], adp.metadata[ADPMetadata.Keys.LGRAPH_ID], adp.metadata[ADPMetadata.Keys.LSCALE_ID], adp.metadata[ADPMetadata.Keys.LSCALE_SCALE_METHOD], adp.metadata[ADPMetadata.Keys.LSCALE_OBJECTIVE], \ per_variable=args.separate_figures) print(plot_file) board = get_device( adp.metadata[ADPMetadata.Keys.RUNTIME_PHYS_DB]) lsim.simulate_adp(board,adp,plot_file, \ enable_quantization=not args.no_quantize, \ enable_intervals=not args.no_operating_range, \ enable_physical_model= not args.no_physdb, \ enable_model_error =not args.no_model_error, \ separate_figures=args.separate_figures)
def exec_lgraph(args): from compiler import lgraph board = get_device(args.model_number) path_handler = paths.PathHandler(args.subset, args.program) program = DSProgDB.get_prog(args.program) timer = util.Timer('lgraph', path_handler) timer.start() count = 0 for index,adp in \ enumerate(lgraph.compile(board, program, vadp_fragments=args.vadp_fragments, asm_frags=args.asm_fragments, synth_depth=args.synth_depth, vadps=args.vadps, adps=args.adps, \ routes=args.routes)): timer.end() adp.metadata.set(ADPMetadata.Keys.DSNAME, \ args.program) adp.metadata.set(ADPMetadata.Keys.FEATURE_SUBSET, \ args.subset) adp.metadata.set(ADPMetadata.Keys.LGRAPH_ID, \ int(index)) print("<<< writing circuit>>>") filename = path_handler.lgraph_adp_file(index) with open(filename, 'w') as fh: jsondata = adp.to_json() fh.write(json.dumps(jsondata, indent=4)) print("<<< writing graph >>>") filename = path_handler.lgraph_adp_diagram_file(index) adprender.render(board, adp, filename) count += 1 if count >= args.adps: break timer.start() print("<<< done >>>") timer.kill() print(timer) timer.save()
def exec_lexec(args): EXEC_CMD = "python3 grendel.py exec {adp_path} --model-number {model_number}" if args.scope: EXEC_CMD += " --osc" board = get_device(None) path_handler = paths.PathHandler(args.subset, args.program) program = DSProgDB.get_prog(args.program) timer = util.Timer('lexec', path_handler) for dirname, subdirlist, filelist in \ os.walk(path_handler.lscale_adp_dir()): for adp_file in filelist: if adp_file.endswith('.adp'): adp_path = dirname + "/" + adp_file print(adp_path) with open(adp_path, 'r') as fh: print("===== %s =====" % (adp_file)) adp = ADP.from_json(board, \ json.loads(fh.read())) kwargs = { 'adp_path': adp_path, 'model_number': adp.metadata[ADPMetadata.Keys.RUNTIME_PHYS_DB] } if not _lexec_already_ran(path_handler,board,adp,trial=0, \ scope=args.scope) or \ args.force: timer.start() cmd = EXEC_CMD.format(**kwargs) code = os.system(cmd) timer.end() #input("continue") if code == signal.SIGINT or code != 0: raise Exception("User terminated process") print(timer) timer.save()
async def _database_fetch(self, ctx, *, query): with util.Timer() as timer: # noinspection PyBroadException try: fetched = await ctx.bot.db.pool.fetch( util.strip_codeblocks(query)) except Exception as e: return await ctx.send(f"⚠\n```sql\n{e}```") query_time = util.prec_duration_strf(timer.time) if len(fetched) <= 0: return await ctx.send("Query returned nothing.") table_raw = tabulate.tabulate(fetched, headers="keys", tablefmt="fancy_grid") if len(table_raw) < 1950 and not len(table_raw.split("\n")[0]) > 138: return await ctx.send( f"Query time: {query_time}\n```py\n{table_raw}```") _myst = await ctx.bot.mystbin.post(table_raw) await ctx.send(f"Query time: {query_time}\n\n<{_myst}>")
async def _refresh(self, ctx): _ = await ctx.send(f"{core.LOADING} Refreshing...") with util.Timer() as timer: for utility in os.listdir("./util"): if utility.endswith(".py"): _name = importlib.import_module(f"util.{utility[:-3]}") importlib.reload(_name) for core_feature in os.listdir("./core"): if core_feature.endswith(".py"): _name = importlib.import_module( f"core.{core_feature[:-3]}") importlib.reload(_name) for extension in os.listdir("./cogs"): if extension.endswith(".py"): try: ctx.bot.reload_extension(f"cogs.{extension[:-3]}") except Exception as e: await ctx.send(e) await ctx.maybe_edit( _, content=f"Done. {util.prec_duration_strf(timer.time)}", allowed_mentions=discord.AllowedMentions.none())
def exec_lscale(args): from compiler import lscale import compiler.lscale_pass.lscale_ops as scalelib board = get_device(args.model_number) path_handler = paths.PathHandler(args.subset, args.program) program = DSProgDB.get_prog(args.program) timer = util.Timer('lscale', path_handler) for dirname, subdirlist, filelist in \ os.walk(path_handler.lgraph_adp_dir()): for lgraph_adp_file in filelist: if lgraph_adp_file.endswith('.adp'): with open(dirname + "/" + lgraph_adp_file, 'r') as fh: print("===== %s =====" % (lgraph_adp_file)) adp = ADP.from_json(board, \ json.loads(fh.read())) obj = scalelib.ObjectiveFun(args.objective) scale_method = scalelib.ScaleMethod(args.scale_method) calib_obj = get_calibrate_objective(args.calib_obj) if args.no_scale and not scale_method is scalelib.ScaleMethod.IDEAL: raise Exception( "cannot disable scaling transform if you're using the delta model database" ) timer.start() for idx,scale_adp in enumerate(lscale.scale(board, \ program, \ adp, \ objective=obj, \ scale_method=scale_method, \ calib_obj=calib_obj, \ no_scale=args.no_scale, \ one_mode=args.one_mode)): timer.end() print("<<< writing scaled circuit %d/%d>>>" % (idx, args.scale_adps)) scale_adp.metadata.set(ADPMetadata.Keys.LSCALE_ID, idx) calib_obj = llenums.CalibrateObjective(scale_adp \ .metadata[ADPMetadata.Keys.RUNTIME_CALIB_OBJ]) filename = path_handler.lscale_adp_file( scale_adp.metadata[ADPMetadata.Keys.LGRAPH_ID], scale_adp.metadata[ADPMetadata.Keys.LSCALE_ID], scale_adp.metadata[ADPMetadata.Keys.LSCALE_SCALE_METHOD], scale_adp.metadata[ADPMetadata.Keys.LSCALE_OBJECTIVE], calib_obj, scale_adp.metadata[ADPMetadata.Keys.RUNTIME_PHYS_DB], \ no_scale=scale_adp.metadata[ADPMetadata.Keys.LSCALE_NO_SCALE], \ one_mode=scale_adp.metadata[ADPMetadata.Keys.LSCALE_ONE_MODE] \ ) with open(filename, 'w') as fh: jsondata = scale_adp.to_json() fh.write(json.dumps(jsondata, indent=4)) print("<<< writing graph >>>") filename = path_handler.lscale_adp_diagram_file( scale_adp.metadata[ADPMetadata.Keys.LGRAPH_ID], scale_adp.metadata[ADPMetadata.Keys.LSCALE_ID], scale_adp.metadata[ADPMetadata.Keys.LSCALE_SCALE_METHOD], scale_adp.metadata[ADPMetadata.Keys.LSCALE_OBJECTIVE], calib_obj, scale_adp.metadata[ADPMetadata.Keys.RUNTIME_PHYS_DB], \ no_scale=scale_adp.metadata[ADPMetadata.Keys.LSCALE_NO_SCALE], \ one_mode=scale_adp.metadata[ADPMetadata.Keys.LSCALE_ONE_MODE] \ ) adprender.render(board, scale_adp, filename) if idx >= args.scale_adps: break timer.start() print("<<< done >>>") timer.kill() print(timer) timer.save()