def test_it_returns_true_if_url_is_on_blacklist(self, mock_open): mock_open.return_value = io.StringIO("www.google.com\nwww.amazon.com") assert on_blacklist("www.amazon.com") is True
def setUp(self): logger = getLogger() logger.setLevel('DEBUG') self.output = io.StringIO() logger.addHandler(StreamHandlerRaiseException(self.output)) self.circuit = QuantumCircuit(QuantumRegister(1))
def __enter__(self): self._stdout = sys.stdout sys.stdout = self._stringio = io.StringIO() return self
if __name__ == "__main__": solver = FrequencyTableSolver() solver.read_csv_data("degree by family income_6x12.csv") solver.initialize_starting_point() solver.show_state('STARTING POINT SOLUTION') profile = False if profile: import cProfile, io, pstats pr = cProfile.Profile() pr.enable() solver.solve(iterations=1000) pr.disable() s = io.StringIO() ps = pstats.Stats(pr, stream=s).sort_stats(pstats.SortKey.CUMULATIVE) ps.print_stats() print(s.getvalue()) else: solver.solve(iterations=100 * 150) solver.show_state('ENDING POINT SOLUTION') print(f'error_list: {solver.error_list}') print(f'data: {solver.data}') print(f'fitted_frequencies: {solver.fitted_frequencies}') print( f'iterations/second: {(solver.iteration+1)/(solver.t_solve_end-solver.t_solve_start):.1f}' )
def HMFOR_plots(HMFOR_inputs, cd_lower, cd_upper, cv_lower, cv_upper, FE_lower, FE_upper, yield_lower, yield_upper): # Generates plots for HMFOR reaction [NPV_base, payback_time_base, product_income, op_costs, cap_costs] = HMFOR_TEA(*HMFOR_inputs) # ________Pie Charts__________ # Operating Costs op_cost_pie = pygal.Pie() op_cost_pie.title = 'Annual Operating Cost Breakdown ($)' op_cost_pie.add('Electricity', round(op_costs[0], 2)) op_cost_pie.add('Maintenance', round(op_costs[1], 2)) op_cost_pie.add('Crystallization', round(op_costs[2], 2)) op_cost_pie.add('Water', round(op_costs[3], 2)) op_cost_pie.add('HMF input', round(op_costs[4], 2)) op_cost_pie_data = op_cost_pie.render_data_uri() # Operating Costs without HMF op_cost_pie_no_hmf = pygal.Pie() op_cost_pie_no_hmf.title = 'Annual Operating Cost Breakdown Excluding HMF ($)' op_cost_pie_no_hmf.add('Electricity', round(op_costs[0], 2)) op_cost_pie_no_hmf.add('Maintenance', round(op_costs[1], 2)) op_cost_pie_no_hmf.add('Crystallization', round(op_costs[2], 2)) op_cost_pie_no_hmf.add('Water', round(op_costs[3], 2)) op_cost_pie_no_hmf_data = op_cost_pie_no_hmf.render_data_uri() # Capital Costs cap_cost_pie = pygal.Pie() cap_cost_pie.title = 'Capital Cost Breakdown ($)' cap_cost_pie.add('Electrolyzer', round(cap_costs[0], 2)) cap_cost_pie.add('Crystallizer', round(cap_costs[1], 2)) cap_cost_pie.add('Balance', round(cap_costs[2], 2)) cap_cost_pie_data = cap_cost_pie.render_data_uri() # ________Sensitivity Analysis Charts__________ # Set up scenarios (+/- 10%) sa_vars = ['Electrolyzer Cost', 'Faradaic Efficiency', 'FDCA Yield', 'Cell Voltage', 'Current Density', 'HMF Price', 'Electricity Price'] sa_lower_vars = [[0.9*HMFOR_inputs[7], HMFOR_inputs[13], HMFOR_inputs[14], HMFOR_inputs[12], HMFOR_inputs[11], HMFOR_inputs[6], HMFOR_inputs[3]], [HMFOR_inputs[7], 0.9*HMFOR_inputs[13], HMFOR_inputs[14], HMFOR_inputs[12], HMFOR_inputs[11], HMFOR_inputs[6], HMFOR_inputs[3]], [HMFOR_inputs[7], HMFOR_inputs[13], 0.9*HMFOR_inputs[14], HMFOR_inputs[12], HMFOR_inputs[11], HMFOR_inputs[6], HMFOR_inputs[3]], [HMFOR_inputs[7], HMFOR_inputs[13], HMFOR_inputs[14], 0.9*HMFOR_inputs[12], HMFOR_inputs[11], HMFOR_inputs[6], HMFOR_inputs[3]], [HMFOR_inputs[7], HMFOR_inputs[13], HMFOR_inputs[14], HMFOR_inputs[12], 0.9*HMFOR_inputs[11], HMFOR_inputs[6], HMFOR_inputs[3]], [HMFOR_inputs[7], HMFOR_inputs[13], HMFOR_inputs[14], HMFOR_inputs[12], HMFOR_inputs[11], 0.9*HMFOR_inputs[6], HMFOR_inputs[3]], [HMFOR_inputs[7], HMFOR_inputs[13], HMFOR_inputs[14], HMFOR_inputs[12], HMFOR_inputs[11], HMFOR_inputs[6], 0.9*HMFOR_inputs[3]], ] sa_upper_vars = [[1.1*HMFOR_inputs[7], HMFOR_inputs[13], HMFOR_inputs[14], HMFOR_inputs[12], HMFOR_inputs[11], HMFOR_inputs[6], HMFOR_inputs[3]], [HMFOR_inputs[7], 1.1*HMFOR_inputs[13], HMFOR_inputs[14], HMFOR_inputs[12], HMFOR_inputs[11], HMFOR_inputs[6], HMFOR_inputs[3]], [HMFOR_inputs[7], HMFOR_inputs[13], 1.1*HMFOR_inputs[14], HMFOR_inputs[12], HMFOR_inputs[11], HMFOR_inputs[6], HMFOR_inputs[3]], [HMFOR_inputs[7], HMFOR_inputs[13], HMFOR_inputs[14], 1.1*HMFOR_inputs[12], HMFOR_inputs[11], HMFOR_inputs[6], HMFOR_inputs[3]], [HMFOR_inputs[7], HMFOR_inputs[13], HMFOR_inputs[14], HMFOR_inputs[12], 1.1*HMFOR_inputs[11], HMFOR_inputs[6], HMFOR_inputs[3]], [HMFOR_inputs[7], HMFOR_inputs[13], HMFOR_inputs[14], HMFOR_inputs[12], HMFOR_inputs[11], 1.1*HMFOR_inputs[6], HMFOR_inputs[3]], [HMFOR_inputs[7], HMFOR_inputs[13], HMFOR_inputs[14], HMFOR_inputs[12], HMFOR_inputs[11], HMFOR_inputs[6], 1.1*HMFOR_inputs[3]], ] sa_lower = [] for i in range(0, len(sa_lower_vars)): results = HMFOR_TEA(*HMFOR_inputs[:3], sa_lower_vars[i][6], *HMFOR_inputs[4:6], sa_lower_vars[i][5], sa_lower_vars[i][0], *HMFOR_inputs[8:11], sa_lower_vars[i][4], sa_lower_vars[i][3], sa_lower_vars[i][1], sa_lower_vars[i][2], HMFOR_inputs[-1]) sa_lower.append(results[0] / NPV_base-1) sa_upper = [] for i in range(0, len(sa_upper_vars)): results = HMFOR_TEA(*HMFOR_inputs[:3], sa_upper_vars[i][6], *HMFOR_inputs[4:6], sa_upper_vars[i][5], sa_upper_vars[i][0], *HMFOR_inputs[8:11], sa_upper_vars[i][4], sa_upper_vars[i][3], sa_upper_vars[i][1], sa_upper_vars[i][2], HMFOR_inputs[-1]) sa_upper.append(results[0] / NPV_base-1) num_vars = len(sa_vars) # bars centered on the y axis pos = np.arange(num_vars) + .5 # make the left and right axes fig = Figure() ax_lower = fig.add_axes([0.05, 0.1, 0.35, 0.8]) ax_upper = fig.add_axes([0.6, 0.1, 0.35, 0.8]) # just tick on the top ax_lower.xaxis.set_ticks_position('top') ax_upper.xaxis.set_ticks_position('top') # Set figure title fig.suptitle('Sensitivity Analysis') # set bar colors c_lower = [] c_upper = [] for i in range(0, num_vars): if sa_lower[i] < 0: c_lower.append('red') else: c_lower.append('green') if sa_upper[i] < 0: c_upper.append('red') else: c_upper.append('green') # make the lower graph ax_lower.barh(pos, [abs(ele) for ele in sa_lower], align='center', color=c_lower, height=0.5, edgecolor='None') ax_lower.set_yticks([]) ax_lower.invert_xaxis() # make the upper graph ax_upper.barh(pos, [abs(ele) for ele in sa_upper], align='center', color=c_upper, height=0.5, edgecolor='None') ax_upper.set_yticks([]) # we want the labels to be centered in the fig coord system and # centered w/ respect to the bars so we use a custom transform transform = transforms.blended_transform_factory( fig.transFigure, ax_upper.transData) for i, label in enumerate(sa_vars): ax_upper.text(0.5, i+0.5, label, ha='center', va='center', transform=transform) # the axes titles are in axes coords, so x=0, y=1.025 is on the left # side of the axes, just above, x=1.0, y=1.025 is the right side of the # axes, just above ax_upper.set_title('+10%', x=-0.15, y=0.97, fontsize=12) ax_lower.set_title('-10%', x=1.15, y=0.97, fontsize=12) # adding the annotations for i in range(0, num_vars): ax_upper.annotate(str(round(abs(sa_upper[i])*100, 2)) + '%', xy=(0.00001, 0.5 + i), xycoords='data', xytext=(16, 0), textcoords='offset points', size=10, va='center') ax_lower.annotate(str(round(abs(sa_lower[i])*100, 2)) + '%', xy=(max([abs(ele) for ele in sa_lower])/2, 0.5 + i), xycoords='data', xytext=(16, 0), textcoords='offset points', size=10, va='center') SA_output = io.StringIO() FigureCanvasSVG(fig).print_svg(SA_output) # ________Color Scatter Charts__________ scatter_step = 75 # Current Density (x) vs Voltage (y) x = [] y = [] cd_cv_npv = [] cd = cd_lower cv = cv_lower cd_step = (cd_upper-cd_lower)/scatter_step cv_step = (cv_upper-cv_lower)/scatter_step for i in range(0, scatter_step): for j in range(0, scatter_step): x.append(cd) y.append(cv) results = HMFOR_TEA(*HMFOR_inputs[:11], cd, cv, *HMFOR_inputs[13:]) cd_cv_npv.append(results[0]) cv += cv_step cd += cd_step cv = cv_lower fig = Figure() ax = fig.add_subplot(111) im = ax.scatter(x, y, s=3, c=cd_cv_npv) ax.scatter(HMFOR_inputs[11], HMFOR_inputs[12], edgecolors='black', s=8, c='b') ax.set_xlabel('Current Density $[A/cm^2]$') ax.set_ylabel('Cell Voltage $[V]$') ax.set_title('Current Density vs Cell Voltage') ax.set_xlim(cd_lower, cd_upper) ax.set_ylim(cv_lower, cv_upper) ax.set_xticks(np.arange(cd_lower, cd_upper + 10 ** -8, (cd_upper-cd_lower)/4)) ax.set_yticks(np.arange(cv_lower, cv_upper + 10 ** -8, (cv_upper-cv_lower)/4)) fig.colorbar(im, ax=ax, label='Net Present Value [$]') cd_cv_output = io.StringIO() FigureCanvasSVG(fig).print_svg(cd_cv_output) # FE (x) vs Voltage (y) x = [] y = [] fe_cv_npv = [] FE = FE_lower cv = cv_lower FE_step = (FE_upper-FE_lower)/scatter_step cv_step = (cv_upper-cv_lower)/scatter_step for i in range(0, scatter_step): for j in range(0, scatter_step): x.append(FE) y.append(cv) results = HMFOR_TEA(*HMFOR_inputs[:12], cv, FE, *HMFOR_inputs[14:]) fe_cv_npv.append(results[0]) cv += cv_step FE += FE_step cv = cv_lower fig = Figure() ax = fig.add_subplot(111) im = ax.scatter(x, y, s=3, c=fe_cv_npv) ax.scatter(HMFOR_inputs[13], HMFOR_inputs[12], edgecolors='black', s=8, c='b') ax.set_xlabel('Faradaic Efficiency') ax.set_ylabel('Cell Voltage $[V]$') ax.set_title('Faradaic Efficienct vs Cell Voltage') ax.set_xlim(FE_lower, FE_upper) ax.set_ylim(cv_lower, cv_upper) ax.set_xticks(np.arange(FE_lower, FE_upper + 10 ** -8, (FE_upper-FE_lower)/4)) ax.set_yticks(np.arange(cv_lower, cv_upper + 10 ** -8, (cv_upper-cv_lower)/4)) fig.colorbar(im, ax=ax, label='Net Present Value [$]') fe_cv_output = io.StringIO() FigureCanvasSVG(fig).print_svg(fe_cv_output) # Yield (x) vs Voltage (y) x = [] y = [] yld_cv_npv = [] yld = yield_lower cv = cv_lower yld_step = (yield_upper-yield_lower)/scatter_step cv_step = (cv_upper-cv_lower)/scatter_step for i in range(0, scatter_step): for j in range(0, scatter_step): x.append(yld) y.append(cv) results = HMFOR_TEA( *HMFOR_inputs[:12], cv, HMFOR_inputs[13], yld, HMFOR_inputs[-1]) yld_cv_npv.append(results[0]) cv += cv_step yld += yld_step cv = cv_lower fig = Figure() ax = fig.add_subplot(111) im = ax.scatter(x, y, s=3, c=yld_cv_npv) ax.scatter(HMFOR_inputs[14], HMFOR_inputs[12], edgecolors='black', s=8, c='b') ax.set_xlabel('FDCA Yield') ax.set_ylabel('Cell Voltage $[V]$') ax.set_title('FDCA Yield vs Cell Voltage') ax.set_xlim(yield_lower, yield_upper) ax.set_ylim(cv_lower, cv_upper) ax.set_xticks(np.arange(yield_lower, yield_upper + 10 ** -8, (yield_upper-yield_lower)/4)) ax.set_yticks(np.arange(cv_lower, cv_upper + 10 ** -8, (cv_upper-cv_lower)/4)) fig.colorbar(im, ax=ax, label='Net Present Value [$]') yld_cv_output = io.StringIO() FigureCanvasSVG(fig).print_svg(yld_cv_output) # Current Density vs NPV x = [] y = [] cd = cd_lower cd_step = (cd_upper-cd_lower)/scatter_step for i in range(0, scatter_step): x.append(cd) results = HMFOR_TEA(*HMFOR_inputs[:11], cd, cv, *HMFOR_inputs[13:]) y.append(results[0]) cd += cd_step # Using Pygal # xy_chart = pygal.XY(stroke=False) # xy_chart.title = 'Current Density vs NPV ($)' # xy_chart.add('Current Density $[A/cm^2]$', # [(x[i], y[i]) for i in range(0, len(x))]) # cd_npv = xy_chart.render_data_uri() fig = Figure() ax = fig.add_subplot(111) im = ax.scatter(x, y) ax.set_title('Current Density vs NPV') ax.set_xlabel('Current Density $[A/cm^2]$') ax.set_ylabel('Net Present Value [$]') cd_npv_output = io.StringIO() FigureCanvasSVG(fig).print_svg(cd_npv_output) return [op_cost_pie_data, op_cost_pie_no_hmf_data, cap_cost_pie_data, SA_output, cd_cv_output, fe_cv_output, yld_cv_output, cd_npv_output]
def _run(self): # pylint: disable=method-hidden # Remove handlers that log to stderr root = logging.getLogger() for handler in root.handlers[:]: if isinstance( handler, logging.StreamHandler) and handler.stream == sys.stderr: root.removeHandler(handler) stream = io.StringIO() handler = logging.StreamHandler(stream=stream) handler.formatter = logging.Formatter( u'%(levelname)s:%(name)s %(message)s') root.addHandler(handler) err = io.StringIO() sys.stderr = err def lastlog(n=10, prefix=None, level=None): """ Print the last `n` log lines to stdout. Use `prefix='p2p'` to filter for a specific logger. Use `level=INFO` to filter for a specific level. Level- and prefix-filtering are applied before tailing the log. """ lines = (stream.getvalue().strip().split('\n') or []) if prefix: lines = [ line for line in lines if line.split(':')[1].startswith(prefix) ] if level: lines = [line for line in lines if line.split(':')[0] == level] for line in lines[-n:]: print(line) def lasterr(n=1): """ Print the last `n` entries of stderr to stdout. """ for line in (err.getvalue().strip().split('\n') or [])[-n:]: print(line) tools = ConsoleTools( self.app.raiden, self.app.discovery, self.app.config['settle_timeout'], ) self.console_locals = { 'app': self.app, 'raiden': self.app.raiden, 'chain': self.app.raiden.chain, 'discovery': self.app.discovery, 'tools': tools, 'lasterr': lasterr, 'lastlog': lastlog, 'usage': print_usage, } print('\n' * 2) print('Entering Console' + OKGREEN) print('Tip:' + OKBLUE) print_usage() IPython.start_ipython(argv=['--gui', 'gevent'], user_ns=self.console_locals) sys.exit(0)
import io import os import sys sys.stdout = buffer = io.StringIO() import app import pytest @pytest.mark.it("Odd and even numbers order by values") def test_odd_even(): captured = buffer.getvalue() assert "[85, 59, 37, 25, 5, 81, 41, 55, 4, 80, 64, 66, 20, 64, 22, 76, 76, 96, 2, 68]\n" in captured @pytest.mark.it("Looping the list") def test_for(): f = open(os.path.dirname(os.path.abspath(__file__))+'/app.py') content = f.read() assert content.find("for") > 0 @pytest.mark.it("Conditional if/else") def test_if_else(): f = open(os.path.dirname(os.path.abspath(__file__))+'/app.py') content = f.read() assert content.find("if") > 0 assert content.find("else") > 0
def write_report(output_file, STYLE, SCRIPT, table_rgi, bubble_plot, samtools_median_depth, samtools_mean_depth): import io from docutils.core import publish_file, publish_parts from docutils.parsers.rst import directives if samtools_median_depth < 50: warning_type = 'danger' else: warning_type = 'info' report_str = f""" .. raw:: html {SCRIPT} {STYLE} ============================================================= RGI report ============================================================= .. contents:: :backlinks: none :depth: 2 Bubble plot ----------- .. raw:: html {bubble_plot} Table ------ .. raw:: html <div class="alert alert-{warning_type}" role="alert"> Median depth: <strong> {samtools_median_depth} </strong> </br> Mean depth: <strong> {samtools_mean_depth} </strong> </div> <div class="alert alert-warning" role="alert"> Genes with a hit <span class="label label-default">coverage < 90%</span> are highlighted in <span class="label label-success">green</span> (if any) </br> Genes with an <span class="label label-default">identity < 90%</span> are highlighted in <span class="label label-danger">red</span> (if any) </div> {table_rgi} """ with open(output_file, "w") as fh: publish_file( source=io.StringIO(report_str), destination=fh, writer_name="html", settings_overrides={"stylesheet_path": ""}, )
def verify(db, args): label = args['label'] token = db.gettoken(label) if token is None: sys.exit('No token labeled "%s"' % label) sopin = args['sopin'] userpin = args['userpin'] hierarchyauth = args['hierarchy_auth'] verify_output = {} verify_output['label'] = label pobj = db.getprimary(token['pid']) sealobj = db.getsealobject(token['id']) wrappingkeyauth = None verify_output['config'] = yaml.safe_load(io.StringIO(token['config'])) verify_output['pin'] = {} with TemporaryDirectory() as d: tpm2 = Tpm2(d) pobjauth = pobj['objauth'] pobj_handle = get_pobject(pobj, tpm2, hierarchyauth, d) if sopin != None: sosealctx = tpm2.load(pobj_handle, pobjauth, sealobj['sopriv'], sealobj['sopub']) # Unseal the wrapping key auth sosealauthsalt = sealobj['soauthsalt'] sosealauth = hash_pass(sopin, salt=sosealauthsalt) wrappingkeyauth = tpm2.unseal(sosealctx, sosealauth['hash']) verify_output['pin']['so'] = {'seal-auth': sosealauth['hash']} if userpin != None: usersealctx = tpm2.load(pobj_handle, pobjauth, sealobj['userpriv'], sealobj['userpub']) # Unseal the wrapping key auth usersealauthsalt = sealobj['userauthsalt'] usersealauth = hash_pass(userpin, salt=usersealauthsalt) wrappingkeyauth = tpm2.unseal(usersealctx, usersealauth['hash']) verify_output['pin']['user'] = { 'seal-auth': usersealauth['hash'] } verify_output['wrappingkey'] = { 'hex': bytes.hex(wrappingkeyauth), } if userpin != None: verify_output['wrappingkey']['auth'] = usersealauth['hash'] if sopin != None: verify_output['wrappingkey']['soauth'] = sosealauth['hash'] wrapper = AESAuthUnwrapper(wrappingkeyauth) tobjs = db.gettertiary(token['id']) verify_output['objects'] = [] for tobj in tobjs: attrs = yaml.safe_load(tobj['attrs']) priv = None if CKA_TPM2_PRIV_BLOB in attrs: priv = binascii.unhexlify(attrs[CKA_TPM2_PRIV_BLOB]) pub = None if CKA_TPM2_PUB_BLOB in attrs: pub = binascii.unhexlify(attrs[CKA_TPM2_PUB_BLOB]) encauth = None if CKA_TPM2_OBJAUTH_ENC in attrs: encauth = binascii.unhexlify(attrs[CKA_TPM2_OBJAUTH_ENC]) tobjauth = None if encauth: encauth = encauth.decode() tpm2.load(pobj_handle, pobjauth, priv, pub) tobjauth = wrapper.unwrap(encauth).decode() verify_output['objects'].append({ 'id: ': tobj['id'], 'auth: ': tobjauth, 'encauth': encauth }) yaml_dump = yaml.safe_dump(verify_output, default_flow_style=False) print(yaml_dump)
import pandas as pd import numpy as np import matplotlib.pyplot as plt import statsmodels.api as sm from scipy import stats from statsmodels.graphics.api import qqplot import io import requests %matplotlib inline ############## # Monthly data url = "http://www.sidc.be/silso/DATA/SN_m_tot_V2.0.txt" s = requests.get(url).content colnames = ['YEAR', 'MONTH', 'YM', 'ACTIVITY', 'C1', 'C2', 'C3'] df = pd.read_table(io.StringIO(s.decode('utf-8')), names = colnames, header = None, delim_whitespace=True ) df.head(5) df.shape df. df['DAY']=1 df = df[['YEAR', 'MONTH', 'DAY', 'ACTIVITY']] df.index = pd.to_datetime(df[['YEAR','MONTH','DAY']]) df = df[['ACTIVITY']] df.head(5) df.info() df.describe() pd.to_datetime(df[['YEAR','MONTH','DAY']]) # You can select by datetime with partial match df.loc['1830']
def connect(self): self._stderr, self._buf, sys.stderr = sys.stderr, io.StringIO(), self
def setUp(self): super().setUp() self.initial_stdout = log_manager.stdout self.initial_stderr = log_manager.stderr self.stringbuf = io.StringIO() log_manager.set_output_stream(self.stringbuf)
def test_it_ignores_empty_lines(self, mock_open): mock_open.return_value = io.StringIO("\nwww.amazon.com") assert on_blacklist("www.zalando.de") is False
def test_it_returns_true_if_a_partial_match_is_found(self, mock_open): mock_open.return_value = io.StringIO("www.amazon.com") assert on_blacklist("http://www.amazon.com/") is True
async def repl(self, ctx): """Launches an interactive REPL session.""" if not self.dev_check(ctx.author.id): return await ctx.send( "HALT! This command is for the devs only. Sorry. :x:") variables = { 'ctx': ctx, 'bot': self.bot, 'message': ctx.message, 'guild': ctx.guild, 'channel': ctx.channel, 'author': ctx.author, '_': None, } if ctx.channel.id in self.sessions: await ctx.send( 'Already running a REPL session in this channel. Exit it with `quit`.' ) return self.sessions.add(ctx.channel.id) await ctx.send( 'Enter code to execute or evaluate. `exit()` or `quit` to exit.') def check(m): return m.author.id == ctx.author.id and \ m.channel.id == ctx.channel.id and \ m.content.startswith('`') while True: try: response = await self.bot.wait_for('message', check=check, timeout=10.0 * 60.0) except asyncio.TimeoutError: await ctx.send('Exiting REPL session.') self.sessions.remove(ctx.channel.id) break cleaned = self.cleanup_code(response.content) if cleaned in ('quit', 'exit', 'exit()'): await ctx.send('Exiting.') self.sessions.remove(ctx.channel.id) return executor = exec if cleaned.count('\n') == 0: # single statement, potentially 'eval' try: code = compile(cleaned, '<repl session>', 'eval') except SyntaxError: pass else: executor = eval if executor is exec: try: code = compile(cleaned, '<repl session>', 'exec') except SyntaxError as e: await ctx.send(self.get_syntax_error(e)) continue variables['message'] = response fmt = None stdout = io.StringIO() try: with redirect_stdout(stdout): result = executor(code, variables) if inspect.isawaitable(result): result = await result except Exception as e: value = stdout.getvalue() fmt = f'```py\n{value}{traceback.format_exc()}\n```' else: value = stdout.getvalue() if result is not None: fmt = f'```py\n{value}{result}\n```' variables['_'] = result elif value: fmt = f'```py\n{value}\n```' try: if fmt is not None: if len(fmt) > 2000: await ctx.send('Content too big to be printed.') else: await ctx.send(fmt) except discord.Forbidden: pass except discord.HTTPException as e: await ctx.send(f'Unexpected error: `{e}`')
def __init__(self, string): self._size = len(string.encode('latin-1')) self._reader = io.StringIO(string)
def setUp(self): self.capturedOutput = io.StringIO() sys.stdout = self.capturedOutput
CORS_ALLOW_ALL_ORIGINS=(bool, True), MODE=(str, "production"), REDIS_HOST=(str, "localhost")) if env("MODE") == "production": import io import google.auth from google.cloud import secretmanager as sm GCP_SECRETS_NAME = "application_settings" _, project = google.auth.default() client = sm.SecretManagerServiceClient() name = f"projects/{project}/secrets/{GCP_SECRETS_NAME}/versions/latest" payload = client.access_secret_version( name=name).payload.data.decode("UTF-8") env.read_env(io.StringIO(payload)) # Django Settings SECRET_KEY = env("SECRET_KEY") DEBUG = env("DEBUG") ALLOWED_HOSTS = [] for host in env("ALLOWED_HOSTS").split(","): ALLOWED_HOSTS += [host] # Application definition INSTALLED_APPS = [ "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", "django.contrib.messages", "django.contrib.staticfiles", "corsheaders",
def write_csv(data): output = io.StringIO() writer = csv.writer(output, quoting=csv.QUOTE_NONNUMERIC) writer.writerows(data) return {'data': output.getvalue(), 'encoding': 'text/csv', 'ext': 'csv'}
def process(precsv, processingFile=None): ordering = [ 'name', 'filename', 'nuked', 'category', 'pretime', 'source', 'requestid', 'requestgroup', 'searchname' ] # Clean up the file a bit. precsv.replace("'", "", inplace=True, regex=True) precsv["nuked"].replace("2", "0", inplace=True) precsv["nuked"].replace("3", "1", inplace=True) precsv["nuked"].replace("4", "1", inplace=True) precsv["nuked"].replace("5", "1", inplace=True) precsv["nuked"].replace("69", "0", inplace=True) precsv.replace(".\\n$", '', inplace=True, regex=True) # Sometimes there are duplicates within the table itself, remove them precsv.drop_duplicates(subset='name', keep="last", inplace=True) # Add clean searchname column precsv['searchname'] = precsv['name'].map( lambda name: releases.clean_release_name(name)) # Drop the pres without requestid's precsv = precsv[precsv.requestid != '0'] # Create a list of names to check if they exist names = list(precsv.name) # Query to find any existing pres, we need to delete them so COPY doesn't fail prenamelist = [] with db_session() as db: if names: pres = db.query(Pre).filter(Pre.name.in_(names)).all() for pre in pres: prenamelist.append(pre.name) data = io.StringIO() precsv.to_csv(data, index=False, header=False) # Delete any pres found as we are essentially going to update them if prenamelist: for pre in pres: db.delete(pre) db.commit() print("pre-import: Deleted {} pres that will re-inserted".format( len(prenamelist))) else: print( "pre-import: File clean, no pres need to be deleted before re-insert" ) try: if processingFile is not None: print("pre-import: Attempting to add {} to the database".format( processingFile['lastfile'])) data.seek(0) copy_file(engine, data, ordering, Pre) # Write out the last pre csv name so it can be restarted later without downloading all the pres. with open('lastfile.json', 'w') as outfile: json.dump({'lastfile': int(processingFile['lastfile'])}, outfile) else: data.seek(0) copy_file(engine, data, ordering, Pre) data.close() print("pre-import: Chunk import successful") except Exception as e: print("pre-import: Error inserting into database - {}".format(e)) if processingFile is not None: INSERTFAILS.append(processingFile['lastfile']) else: print("pre-import: Error processing chunk")
def piket_banjir_csv(): waduk = Bendungan.query.order_by(Bendungan.wil_sungai, Bendungan.id).all() sampling, end = day_range(request.values.get('sampling')) pre_csv = [] pre_csv.append(['REKAPITULASI LAPORAN PIKET']) pre_csv.append(['PETUGAS UNIT PENGELOLA BENDUNGAN']) pre_csv.append(['BALAI BESAR WILAYAH SUNGAI BENGAWAN SOLO']) pre_csv.append(["Hari/Tanggal", get_hari_tanggal(sampling)]) pre_csv.append(['Waktu', '20.35 WIB']) pre_csv.append([ 'No','Nama Bendungan','Cuaca Terkini','Curah Hujan Terkini (mm)','Durasi Hujan','Elevasi Normal (meter)','Volume Waduk Normal (Juta m3)','TMA Terkini (meter)','Volume Waduk Terkini (Juta m3)','Tinggi Limpasan Spillway (cm)','Debit Limpasan Spillway (m3/detik)','Tampungan Waduk Saat Ini (%)','Kondisi Visual Bendungan','Nama Petugas Piket' ]) data = { '1': [], '2': [], '3': [] } count = 1 for w in waduk: piket_banjir = PiketBanjir.query.filter( and_( PiketBanjir.sampling >= sampling, PiketBanjir.sampling <= end), PiketBanjir.obj_type == 'bendungan', PiketBanjir.obj_id == w.id ).first() data[w.wil_sungai].append({ 'no': count, 'bendungan': w, 'piket_banjir': piket_banjir or {} }) count += 1 for wil, da in data.items(): pre_csv.append([wil_sungai[wil]]) for d in da: pre_csv.append([ d['no'], d['bendungan'].name, None if not d['piket_banjir'] else d['piket_banjir'].cuaca.title(), None if not d['piket_banjir'] else d['piket_banjir'].ch, None if not d['piket_banjir'] else d['piket_banjir'].durasi, d['bendungan'].muka_air_normal, round(d['bendungan'].volume/1000000, 2), None if not d['piket_banjir'] else d['piket_banjir'].tma, None if not d['piket_banjir'] else round(d['piket_banjir'].volume/1000000, 2), None if not d['piket_banjir'] else d['piket_banjir'].spillway_tma, None if not d['piket_banjir'] else d['piket_banjir'].spillway_deb, None if not d['piket_banjir'] else d['piket_banjir'].volume_percent, None if not d['piket_banjir'] else d['piket_banjir'].kondisi, None if not d['piket_banjir'] else d['piket_banjir'].petugas ]) output = io.StringIO() writer = csv.writer(output, delimiter='\t') for l in pre_csv: writer.writerow(l) output.seek(0) return Response(output, mimetype="text/csv", headers={ "Content-Disposition": f"attachment;filename=rekap_laporan_piket-{sampling.strftime('%d %B %Y')}.csv" })
def aws_credential_file(self): try: aws_credential_file = os.path.expanduser('~/.aws/credentials') credential_file_from_env = os.environ.get('AWS_CREDENTIAL_FILE') if credential_file_from_env and \ os.path.isfile(credential_file_from_env): aws_credential_file = config_unicodise(credential_file_from_env) elif not os.path.isfile(aws_credential_file): return config = PyConfigParser() debug("Reading AWS credentials from %s" % (aws_credential_file)) with io.open(aws_credential_file, "r", encoding=getattr(self, 'encoding', 'UTF-8')) as fp: config_string = fp.read() try: try: # readfp is replaced by read_file in python3, # but so far readfp it is still available. config.readfp(io.StringIO(config_string)) except MissingSectionHeaderError: # if header is missing, this could be deprecated # credentials file format as described here: # https://blog.csanchez.org/2011/05/ # then do the hacky-hack and add default header # to be able to read the file with PyConfigParser() config_string = u'[default]\n' + config_string config.readfp(io.StringIO(config_string)) except ParsingError as exc: raise ValueError( "Error reading aws_credential_file " "(%s): %s" % (aws_credential_file, str(exc))) profile = config_unicodise(os.environ.get('AWS_PROFILE', "default")) debug("Using AWS profile '%s'" % (profile)) # get_key - helper function to read the aws profile credentials # including the legacy ones as described here: # https://blog.csanchez.org/2011/05/ def get_key(profile, key, legacy_key, print_warning=True): result = None try: result = config.get(profile, key) except NoOptionError as e: # we may want to skip warning message for optional keys if print_warning: warning("Couldn't find key '%s' for the AWS Profile " "'%s' in the credentials file '%s'", e.option, e.section, aws_credential_file) # if the legacy_key defined and original one wasn't found, # try read the legacy_key if legacy_key: try: key = legacy_key profile = "default" result = config.get(profile, key) warning( "Legacy configuratin key '%s' used, please use" " the standardized config format as described " "here: https://aws.amazon.com/blogs/security/a-new-and-standardized-way-to-manage-credentials-in-the-aws-sdks/", key) except NoOptionError as e: pass if result: debug("Found the configuration option '%s' for the AWS " "Profile '%s' in the credentials file %s", key, profile, aws_credential_file) return result profile_access_key = get_key(profile, "aws_access_key_id", "AWSAccessKeyId") if profile_access_key: Config().update_option('access_key', config_unicodise(profile_access_key)) profile_secret_key = get_key(profile, "aws_secret_access_key", "AWSSecretKey") if profile_secret_key: Config().update_option('secret_key', config_unicodise(profile_secret_key)) profile_access_token = get_key(profile, "aws_session_token", None, False) if profile_access_token: Config().update_option('access_token', config_unicodise(profile_access_token)) except IOError as e: warning("Errno %d accessing credentials file %s", e.errno, aws_credential_file) except NoSectionError as e: warning("Couldn't find AWS Profile '%s' in the credentials file " "'%s'", profile, aws_credential_file)
def run_code(code, code_path, ns=None, function_name=None): """ Import a Python module from a path, and run the function given by name, if function_name is not None. """ # Change the working directory to the directory of the example, so # it can get at its data files, if any. Add its path to sys.path # so it can import any helper modules sitting beside it. if six.PY2: pwd = os.getcwdu() else: pwd = os.getcwd() old_sys_path = list(sys.path) if setup.config.plot_working_directory is not None: try: os.chdir(setup.config.plot_working_directory) except OSError as err: raise OSError( str(err) + '\n`plot_working_directory` option in' 'Sphinx configuration file must be a valid ' 'directory path') except TypeError as err: raise TypeError( str(err) + '\n`plot_working_directory` option in ' 'Sphinx configuration file must be a string or ' 'None') sys.path.insert(0, setup.config.plot_working_directory) elif code_path is not None: dirname = os.path.abspath(os.path.dirname(code_path)) os.chdir(dirname) sys.path.insert(0, dirname) # Reset sys.argv old_sys_argv = sys.argv sys.argv = [code_path] # Redirect stdout stdout = sys.stdout if six.PY3: sys.stdout = io.StringIO() else: sys.stdout = cStringIO.StringIO() # Assign a do-nothing print function to the namespace. There # doesn't seem to be any other way to provide a way to (not) print # that works correctly across Python 2 and 3. def _dummy_print(*arg, **kwarg): pass try: try: code = unescape_doctest(code) if ns is None: ns = {} if not ns: if setup.config.plot_pre_code is None: six.exec_( six.text_type( "import numpy as np\n" + "from matplotlib import pyplot as plt\n"), ns) else: six.exec_(six.text_type(setup.config.plot_pre_code), ns) ns['print'] = _dummy_print if "__main__" in code: six.exec_("__name__ = '__main__'", ns) code = remove_coding(code) six.exec_(code, ns) if function_name is not None: six.exec_(function_name + "()", ns) except (Exception, SystemExit) as err: raise PlotError(traceback.format_exc()) finally: os.chdir(pwd) sys.argv = old_sys_argv sys.path[:] = old_sys_path sys.stdout = stdout return ns
def test_widetoy(): class Toy( Component ): def construct( s ): # Interfaces s.i = InPort( Bits128 ) s.inlong = InPort( Bits128 ) s.out = OutPort( Bits128 ) s.state = Wire(Bits1) @s.update def add_upblk(): # This update block models the behavior of a 32-bit adder s.out = s.i + s.inlong if s.out[3] == "1": s.state = s.state +b1(1) else: s.state = b1(0) # Create a toy component and elaborate it dut = Toy() dut.config_tracing = TracingConfigs( tracing='text_fancy' ) dut.elaborate() # Setup the simulation dut.apply( SimulationPass() ) dut.sim_reset() # Test vector vector = [ # i inlong out b128(0x1000000000000+1), b128(2), b128(0x1000000000000+3), b128(0x1000000000000+0), b128(2), b128(0x1000000000000+2), b128(0x1000000000000+0), b128(2), b128(0x1000000000000+2), b128(0x1000000000000+1), b128(-2), b128(0x1000000000000+-1), b128(0x1000000000000+1), b128(-42), b128(0x1000000000000+-41), b128(0x1000000000000+1), b128(-4), b128(0x1000000000000+-3), b128(0x1000000000000+1), b128(2), b128(0x1000000000000+3), b128(0x1000000000000+0), b128(2), b128(0x1000000000000+2), b128(0x1000000000000+1), b128(2), b128(0x1000000000000+3), b128(0x1000000000000+0), b128(-5), b128(0x1000000000000+-5), ] # Begin simulation for i, inlong, out in zip(vector[0::3], vector[1::3], vector[2::3]): dut.i = i dut.inlong = inlong dut.eval_combinational() dut.tick() assert dut.out == out #print f = io.StringIO() dut.print_textwave() with redirect_stdout(f): dut.print_textwave() out = f.getvalue() for i in dut._tracing.text_sigs: dot = i.find(".") sliced = i[dot+1:] if sliced != "reset" and sliced != "clk": assert i[dot+1:] in out
def latex(self, aliases=None): """Return LaTeX string representation of circuit. This method uses the LaTeX Qconfig package to create a graphical representation of the circuit. Returns: string: for writing to a LaTeX file. """ self._initialize_latex_array(aliases) self._build_latex_array(aliases) header_1 = r"""% \documentclass[preview]{standalone} % If the image is too large to fit on this documentclass use \documentclass[draft]{beamer} """ beamer_line = "\\usepackage[size=custom,height=%d,width=%d,scale=%.1f]{beamerposter}\n" header_2 = r"""% instead and customize the height and width (in cm) to fit. % Large images may run out of memory quickly. % To fix this use the LuaLaTeX compiler, which dynamically % allocates memory. \usepackage[braket, qm]{qcircuit} \usepackage{amsmath} \pdfmapfile{+sansmathaccent.map} % \usepackage[landscape]{geometry} % Comment out the above line if using the beamer documentclass. \begin{document} \begin{equation*}""" qcircuit_line = r""" \Qcircuit @C=%.1fem @R=%.1fem @!R { """ output = io.StringIO() output.write(header_1) output.write('%% img_width = %d, img_depth = %d\n' % (self.img_width, self.img_depth)) output.write(beamer_line % self._get_beamer_page()) output.write(header_2) output.write(qcircuit_line % (self.column_separation, self.row_separation)) for i in range(self.img_width): output.write("\t \t") for j in range(self.img_depth + 1): cell_str = self._latex[i][j] # Don't truncate offset float if drawing a barrier if 'barrier' in cell_str: output.write(cell_str) else: # floats can cause "Dimension too large" latex error in # xymatrix this truncates floats to avoid issue. cell_str = re.sub(r'[-+]?\d*\.\d{2,}|\d{2,}', _truncate_float, cell_str) output.write(cell_str) if j != self.img_depth: output.write(" & ") else: output.write(r'\\' + '\n') output.write('\t }\n') output.write('\\end{equation*}\n\n') output.write('\\end{document}') contents = output.getvalue() output.close() return contents
import sys import io import time import pprint input_txt = """ 999999792 """ sys.stdin = io.StringIO(input_txt); tmp = input() #sys.stdin = open("CGL_2_B_in17.test") #sys.stdout = open("out.dat","w") start = time.time() # copy the below part and paste to the submission form. # ---------function------------ import math from typing import List def is_prime(n: int)-> bool: if n == 2: return True if n < 2 or n % 2 == 0: return False # when n is a prime number # x^(n-1) ≡ 1 (mod n) return pow(2, (n-1), n) == 1 def prime_factorize(n: int) ->List[int]: prime_factors = []
def __init__(self, queue): super().__init__(io.StringIO(), False, 1) self._queue = queue
def dumps(obj, sort_keys=False): fout = io.StringIO() dump(obj, fout, sort_keys=sort_keys) return fout.getvalue()
def _encode(val): val = sorted(val) output = io.StringIO() csv.writer(output, quoting=csv.QUOTE_MINIMAL).writerow(val) return output.getvalue().strip()
def test_it_returns_false_if_url_is_not_on_blacklist(self, mock_open): mock_open.return_value = io.StringIO("www.amazon.com") assert on_blacklist("www.zalando.de") is False