def _gen_response(level): if 'file' not in request.args: return not_found(not_found_msg) filename = request.args['file'] n_lines = request.args.get('nlines', type=int) if not os.path.isfile(filename): return not_found(not_such_file) # full content lines = [ line for line in read_file(filename).splitlines() if f'[{level.upper()}]' in line ] if n_lines: # only n last lines length = len(lines) lines = lines[length - n_lines:length] if level == 'info': body = gen_file_body(filename, content=lines) elif level == 'error': body = gen_error_body(filename, errors=lines) elif level == 'warning': body = gen_warn_body(filename, warnings=lines) else: return not_found(f'Not Found: resource does not exits: {request.url}') response = Response(json.dumps(body, indent=4), status=200, mimetype='application/json') application.logger.info(f"{response.status}") return response
def test_compile_encryption(self): output_filename, _ = compile_zokrates(example_encryption, output_dir, 'DEF') verifier_filename = os.path.join(output_dir, output_filename) verifier_code = read_file(verifier_filename) self.assertIn('contract DEF', verifier_code)
def get_train_list(self): img_list = read_file(self.list_path) self.data_len = len(img_list) data_list = [] for i in range(self.epochs): if self.mode == 1: np.random.shuffle(img_list) data_list.extend(img_list) return data_list
def test_timer_decorator(self): log_file = base_log_file + '_decorator' my_logging.prepare_logger(log_file) sleep(0.5) my_logging.shutdown() content = read_file(log_file + '_data.log') d = json.loads(content) self.assertAlmostEqual(0.5, d['value'], 1)
def test_timer_context_manager(self): log_file = base_log_file + '_context_manager' my_logging.prepare_logger(log_file) my_logging.shutdown() with time_measure('mykey2'): time.sleep(0.5) content = read_file(log_file + '_data.log') d = json.loads(content) self.assertAlmostEqual(0.5, d['value'], 1)
def __init__(self, directory: str, filename: str, keys: Dict[str, int]): add_log_context('inputfileTx', filename) # locations self.directory = directory self.filename = filename self.output_directory = os.path.join(directory, 'compiled') self.code_file = os.path.join(directory, filename) # copy template to current directory self.scenario_directory = os.path.join(self.directory, 'scenario') dir_util.copy_tree(template, self.scenario_directory) self.scenario_js_file = os.path.join(self.scenario_directory, 'scenario.js') self.scenario_js = read_file(self.scenario_js_file) self.deploy_js_file = os.path.join(self.scenario_directory, 'migrations', '2_deploy_contracts.js') self.deploy_js = read_file(self.deploy_js_file) # copy contracts for filename in os.listdir(self.output_directory): if filename.endswith('.sol'): source = os.path.join(self.output_directory, filename) target = os.path.join(self.scenario_directory, 'contracts', filename) copyfile(source, target) # prepare logging log_file = my_logging.get_log_file(None, self.scenario_directory, 'transactions', False) my_logging.prepare_logger(log_file) # prepare runner self.r = get_runner(self.output_directory, self.code(), self.name(), keys) # others self.transactions = [] self.set_contract_name() self.set_accounts(keys) self.set_pk_announce(keys) self.set_contract_fetch() self.set_verifiers() self.n_calls = 0
def test_data(self): log_file = default_log_file + '_data_test' my_logging.prepare_logger(log_file) my_logging.data('key', 2) my_logging.info('ABCD') my_logging.shutdown() # check content = read_file(log_file + '_data.log') d = json.loads(content) self.assertEqual(d['key'], 'key') self.assertEqual(d['value'], 2) self.assertTrue('ABCD' not in content)
def test_logger(self): # ignore warnings warnings.simplefilter("ignore") # log something log_file = default_log_file + '_basic_test' my_logging.prepare_logger(log_file) my_logging.info("ABCD") my_logging.shutdown() # check logfile success = 'ABCD' in read_file(log_file + '_info.log') self.assertTrue(success)
def logs(): if 'file' not in request.args: return not_found(not_found_msg) filename = request.args['file'] if not os.path.isfile(filename): return not_found(not_such_file) body = gen_file_body(filename, list(read_file(filename).splitlines())) response = Response(json.dumps(body, indent=4), status=200, mimetype='application/json') application.logger.info(f"{response.status}") return response
def _is_time(self) -> bool: """ Checks if its past interval """ date_str = read_file(get_tmp_path('.stamp')) year, month, day = list(map(int, date_str.split('-'))) last_run = datetime(year, month, day) diff = self.now - last_run logger.info("Time Interval: {}".format(diff)) if diff > timedelta(hours=self.INTERVAL): return True return False
def compile(file_location, d, count, get_binaries=False): code = read_file(file_location) # log specific features of compiled program my_logging.data('originalLoc', lines_of_code(code)) m = re.search(r'\/\/ Description: (.*)', code) if m: my_logging.data('description', m.group(1)) m = re.search(r'\/\/ Domain: (.*)', code) if m: my_logging.data('domain', m.group(1)) _, filename = os.path.split(file_location) # compile with time_measure('compileFull'): ast = get_processed_ast(code) code_file = compile_ast(ast, d, filename) if get_binaries: # compilation of the solidity code is not required compile_solidity(d, code_file) if count: my_logging.data('nStatements', count_statements(ast))
def code(self): return read_file(self.code_file)
def test_compile_1(self): output_filename, _ = compile_zokrates(example, output_dir, 'ABC') verifier_filename = os.path.join(output_dir, output_filename) verifier_code = read_file(verifier_filename) self.assertIn('contract ABC', verifier_code)
if get_binaries: # compilation of the solidity code is not required compile_solidity(d, code_file) if count: my_logging.data('nStatements', count_statements(ast)) if __name__ == '__main__': # parse arguments a = parse_arguments() # create output directory ensure_directory(a.output) # create log directory log_file = my_logging.get_log_file(filename='compile', parent_dir=a.output, include_timestamp=False, label=None) my_logging.prepare_logger(log_file) # only type-check if a.type_check: code = read_file(a.input) ast = get_processed_ast(code) else: # compile with log_context('inputfile', os.path.basename(a.input)): compile(a.input, a.output, a.count)