async def admin(): new_user = { 'email': '*****@*****.**', 'password': '******', 'img': '0000000001.jpg', 'description': 'Przykladowy opis', 'motivation': 'Motivation opis', 'what_can_you_bring': 'wiedze', 'experience': 'spore', 'mentor': True, 'active': True, 'organiser': True, 'admin': True, 'name': 'Piotr', 'pyfunction': 'Chief Education Officer', 'surname': 'Dyba', 'linkedin': 'https://www.linkedin.com/in/pdyba', 'twitter': 'https://twitter.com/dybacompl', 'facebook': 'https://www.facebook.com/piotr.dyba.photo', } tbl = models.Users(**new_user) await tbl.create() await models.Users.get_by_id(1) color_print('Admin Created', color='green')
async def make_a_query(query, retry=False): global db if ';' in query: query = query.replace(';', '') try: if not db: db = await asyncpg.connect(**psql_cfg) try: return await db.fetch(query) except ( DatatypeMismatchError, ): logging.exception('queering db: %s', query) except (PostgresSyntaxError, UndefinedColumnError): logging.warning('queering db: %s', query) raise except (UniqueViolationError, PostgresSyntaxError, UndefinedColumnError): raise except InvalidTextRepresentationError: logging.warning('queering db: %s', query) raise except ConnectionRefusedError: logging.error('DataBase is not UP!') color_print('DataBase is not UP!', color='red') except: if retry: logging.exception('connecting to db') db = None if not retry: return await make_a_query(query, retry=True) return False
def __init__(self, parse_tree): self.functions = { 'main': { 'argtypes_by_name': [], 'outtype': 'int', 'lines': [], 'cname': '_main', }, 'uniform': { 'argtypes_by_name': [], 'outtype': 'float', 'lines': ['return uniform();'], 'cname': '_uniform', }, 'laplace': { 'argtypes_by_name': [], 'outtype': 'float', 'lines': ['return laplace();'], 'cname': '_laplace', }, } self.switch_data = { #0: { # 'nb_inputs': 3 # 'nb_hidden': 4 # 'nb_outputs': 2 #}, } self.analyze_block(parse_tree, context='main') #print(parse_tree.display()) color_print('successful analysis!', color='WHITE')
def get_all_views(add_views=True, names=False, urls=False): view_list = [] for member in getmembers(views): try: name, view = member if not issubclass( view, HTTPModelClassView) or view == HTTPModelClassView: if 'View' in name: color_print(name, 'skipping', color='yellow') continue except: continue try: assert view._urls temp = [] if names: temp.append(name) if add_views: temp.append(view) if urls: temp.append(view._urls) assert len(temp) != 0 if len(temp) == 1: temp = temp[0] else: temp = tuple(temp) view_list.append(temp) except AttributeError: color_print(view, 'no URLS provided', color='red') return view_list
def test_color_print(capsys): resp = color_print('xxx') out, err = capsys.readouterr() assert resp == None assert out == '\x1b[1;31m xxx \x1b[0;0m\n' color_print('xxx', color='blue') out, err = capsys.readouterr() assert out == '\033[1;34m xxx \x1b[0;0m\n'
def run_help(): """ prints script documentation then exits program :return: void """ color_print(__doc__)
async def create_table(cls): if not await cls._table_exists(): unique = ", UNIQUE ({})".format(", ".join(cls._unique)) if hasattr( cls, '_unique') else '' querry = """CREATE TABLE {} ( {} {})""".format( cls._name, cls._gen_schema(), unique) await make_a_querry(querry) color_print('{} table created'.format(cls._name), color='green') else: print('{} table already exists'.format(cls._name))
def validate(model, dataset, batch_size=1): model.eval() with torch.no_grad(): keys, text, truth = dataset.get_data(batch_size=batch_size, device=args.device) pred = model(text) for i, key in enumerate(keys): print_text, _ = dataset.data_dict[key] print_text_class = pred[:, i][: len(print_text)].cpu().numpy() color_print(print_text, print_text_class)
async def inner_process(a_lesson): try: resp = await process(a_lesson) color_print(a_lesson, color='green') if verbose: print(resp) return resp except Exception as err: print(err) color_print(a_lesson, color='red') return False
def run(s): s = s.strip(" \t\n\r") if s == "": return elif s[0] == "#": color_print(s, 0, 31, 44) return else: dec = shlex.shlex(s) op = dec.get_token() return plugins[op](dec.instream.read())
def get_all_views_and_routes(afilter=None, with_routes=True): v = get_all_views(add_views=False, names=True, urls=True) for view in sorted(v, key=lambda a: a[0]): if afilter and afilter in view[0].lower(): print(view[0]) if with_routes: print('\t', view[1]) else: print(view[0]) if with_routes: print('\t', view[1]) color_print('Views: ', len(v), color='blue')
def print_link(self, link, indent=4): """links can get very long so make them short just for printing""" max_len = self.tSize[1] - 25 nlink = '' for char in link: length = len(nlink) + 1 if length < max_len: nlink += char else: break nlink += '...' ind = ' ' * indent utils.color_print(ind + nlink, self.colors.AQUA)
def get_all_views(add_views=True, names=False, urls=False): view_list = [] for member in getmembers(views): try: name, view = member if not issubclass(view, MCV) or view == MCV: if 'View' in name: color_print(name, 'skipping', color='yellow') continue if name.endswith('MCV'): color_print(name, 'skipping', color='white') continue except: continue try: assert view._urls temp = [] if names: temp.append(name) if add_views: temp.append(view) if urls: temp.append(view._urls) assert len(temp) != 0 if len(temp) == 1: temp = temp[0] else: temp = tuple(temp) view_list.append(temp) except (AttributeError, AssertionError): if 'Base' in name and 'Common' in name: color_print(name, 'skipping', color='white') else: color_print(view, 'no URLS provided', color='red') return view_list
async def add_exam(e_path, verbose=False): try: with open(e_path) as file: meta = yaml.load(file.read()) questions = meta['questions'] del meta['questions'] except (FileNotFoundError, FileExistsError): color_print('No exam found') return except Exception as err: print(err) color_print('Issue with reading exam data') return exam = models.Exam(title=meta['title'], users=DEFAULT_USER, description=meta['description']) exam, _ = await exam.update_or_create('title', get_insta=True) question_order = 1 for _, val in questions.items(): try: question = models.Question(**val) qid = await question.update_or_create('question') await exam.add_question(question_id=qid, order=question_order) question_order += 1 except Exception as err: if UniqueViolationError: color_print('question already existed', color='blue') continue print(err) return color_print('Exam id: {} added with {} questions'.format( exam.id, len(questions)), color='green')
def get_all_routes(): view = get_all_views(add_views=False, names=True, urls=True) routes = [] for v in view: if isinstance(v[1], list): for a in v[1]: routes.append(a) else: routes.append(v[1]) for r in sorted(routes): if not r.startswith('/api'): color_print(r, color='red') else: print(r) color_print('routes: ', len(routes), color='blue') return routes
def display(self, depth=0, delim=' :', collapse=False): ''' `collapse` indicates whether to collapse unary vines down to leafmost node of vine ''' if self.ignore: return elif not self.unroll and (not collapse or self.width() != 1): source = self.get_source() if len(source) > 64 + 3: source = source[:32] + '(WHITE)...(YELLOW)' + source[-32:] source = '(YELLOW)' + source + '(WHITE)' color_print(delim * depth + '(BLUE)' + self.label + '(WHITE)[' + source + ']') depth += 1 for k in self.kids: if type(k) == type(''): color_print(delim * depth + '(RED)' + k + '(WHITE)') else: k.display(depth, delim, collapse)
def add_urls(app): count_cls = 0 count_urls = 0 for view in get_all_views(): if isinstance(view._urls, list): count_cls += 1 for url in view._urls: app.add_route(view.as_view(), url) count_urls += 1 elif isinstance(view._urls, str): app.add_route(view.as_view(), view._urls) count_cls += 1 count_urls += 1 else: color_print("Something is missing: ", view._get_name(), view._urls, color='yellow') color_print('Using {} classes with {} urls'.format(count_cls, count_urls), color='blue') return app
def cut_instances(new_n_proc, regions=available_regions(), restricted={}): ''' Cuts current number of processes to new_n_proc. ''' color_print('collecting running instances') ihpr = {region: all_instances_in_region(region) for region in regions} nhpr = {region: len(ihpr[region]) for region in regions} n_proc = sum(nhpr.values()) if n_proc <= new_n_proc: return new_nhpr = n_processes_per_regions(new_n_proc, regions, restricted) color_print('terminating spare instances') for region in regions: diff = nhpr[region] - new_nhpr[region] color_print(region, diff) for instance in ihpr[region]: if diff <= 0: continue instance.terminate() diff -= 1 assert sum(new_nhpr.values()) == new_n_proc
def run(): global similarity_functions if sys.argv[1] == "--help": color_print(__doc__) else: # try: if len(sys.argv) < 3: raise CustomError( "Script does not have enough arguments. Run `python main.py --help` for help" ) input_file = sys.argv[1] similarity_function = sys.argv[2] if similarity_function not in similarity_functions: raise CustomError( f"Second argument should be one of the following: {', '.join(similarity_functions)}" ) all_solutions = False try: if sys.argv[3] == "--all-solutions": all_solutions = True except IndexError: pass prepare_clustering(input_file, similarity_function, all_solutions)
async def bootstrap_db(): for cls_name in dir(models): if not cls_name.startswith('_'): try: cls = getattr(models, cls_name, None) if isinstance(cls, types.FunctionType) or cls_name in cls_to_skip: color_print('skipping: ' + cls_name, color='yellow') continue if isinstance(cls, ModuleType): color_print('skipping module: ' + cls_name, color='yellow') continue if issubclass(cls, Table) and cls != Table: await cls.create_table() except TypeError: color_print(cls_name, color='red') color_print('DB bootstrap done', color='green')
async def add_question(qpath="../bootstrap_data/questions.question", verbose=False): try: with open(qpath) as file: questions = yaml.load(file.read()) except (FileNotFoundError, FileExistsError): color_print('No questions found') return except Exception as err: print(err) color_print('Issue with reading questions data') return for _, val in questions.items(): try: question = models.Question(**val) await question.update_or_create('question') except Exception as err: if UniqueViolationError: color_print('question already existed', color='blue') continue print(err) return color_print('Created {} questions'.format(len(questions)), color='green')
def print_post(self, index): """print a post by it's index in the feed""" #FIXME needs more work post = self.sanitize_post(index) width = self.tSize[1] - 3 utils.color_print('\n{0} '.format(index + 1) + post['name'] + ' [' + post['type'] + ']\n', self.colors.PURPLE) if not post['message'] == '': message = self.to_lines(post['message'], width) for line in message: utils.color_print(' ' + line, self.colors.BLUE) print if not post['link'] == '': self.print_link(post['link'], indent=4) print if not post['des'] == '': desc = self.to_lines(post['des'], width) for line in desc: utils.color_print(' | ' + line, self.colors.GREEN) print
LOGGING['loggers']['sanic.access']['level'] = 'WARNING' LOGGING['loggers']['root']['level'] = 'WARNING' LOGGING['loggers']['sanic.access']['handlers'] = ['error_console'] LOGGING['handlers']['error_console']['stream'] = stdout LOGGING['formatters']['generic'][ 'format'] = '%(asctime)s - %(levelname).4s - %(name)11.11s : %(message)s' app = Sanic(log_config=LOGGING) try: context = ssl.create_default_context(purpose=ssl.Purpose.CLIENT_AUTH) context.load_cert_chain(SERVER.CERT, keyfile=SERVER.PRIVKEY) port = SERVER.PORT_HTTPS except: port = SERVER.PORT_HTTP context = None app = add_static(app) app = add_urls(app) app = add_exception_handlers(app) if __name__ == "__main__": color_print('http://{}:{}'.format(SERVER.IP, port), color='green') app.run(host=SERVER.IP, port=port, debug=SERVER.DEBUG, ssl=context, workers=SERVER.WORKERS)
from eval import evaluate from utils import * from utils.torch_utils import create_summary_writer, write_meters_loss, write_image import misc_utils as utils ###################### # Paths ###################### save_root = os.path.join(opt.checkpoint_dir, opt.tag) log_root = os.path.join(opt.log_dir, opt.tag) if os.path.isdir(log_root): utils.color_print('该tag已使用过,可能会覆盖之前的结果,是否继续? (y/n) ', 3, end='') confirm = input('') if not confirm or confirm[0] != 'y': utils.color_print('Aborted.', 1) exit(1) utils.try_make_dir(save_root) utils.try_make_dir(log_root) train_dataloader = dl.train_dataloader val_dataloader = dl.val_dataloader # init log logger = init_log(training=True) ###################### # Init model
def cd(src): src = gen_src(src) color_print("chdir to {}".format(src), 0, 34, 47) os.chdir(src)
async def create_html_lessons(lang='pl', lesson=None, verbose=False): counter = GlobalCounter() async def process(a_dir, lang=lang): less_counter = LessonCounter() if a_dir.startswith('.') or a_dir.startswith('_'): return path = os.path.abspath('../lesson_source/{}'.format(a_dir)) + '/' images = path + 'images' path += lang l_path = path + '.md' e_path = path + '.exercises' m_path = path + '.meta' q_path = path + '.quiz' try: # lesson generation will be deprecated in future with open(l_path) as file: html = markdown.markdown(file.read(), extensions=[ 'markdown.extensions.codehilite', 'markdown.extensions.tables' ]) except FileNotFoundError: return with open(m_path) as file: meta = yaml.load(file.read()) meta['author'] = DEFAULT_USER meta['file'] = '{}.html'.format(a_dir) meta['lesson_no'] = int(a_dir) try: with open(q_path) as file: questions = yaml.load(file.read()) less_counter.quiz_outcome = 'found' except Exception as err: questions = False less_counter.quiz_outcome = 'none' if questions: quiz = models.Quiz(title=meta['title'], users=DEFAULT_USER, description=meta['description']) quiz_id = await quiz.update_or_create('title') meta['quiz'] = quiz_id question_order = 1 for _, val in questions.items(): try: question = models.Question(**val) qid = await question.update_or_create(*val.keys()) qq = models.QuizQuestions(quiz=quiz_id, question=qid, question_order=question_order) question_order += 1 await qq.update_or_create('question', 'quiz') less_counter.quiz_details_done += 1 except Exception as err: print(err) less_counter.quiz_details_error += 1 try: lesson = models.Lesson(**meta) lid, updated = await lesson.update_or_create('lesson_no', verbose=True) less_counter.lesson_outcome = 'found' if updated: less_counter.lesson_outcome = 'updated' counter.updated_lessons += 1 else: less_counter.lesson_outcome = 'created' counter.added_lessons += 1 except Exception as err: print(err) less_counter.lesson_outcome += 'error' counter.error_lessons += 1 try: with open(e_path) as file: exe = yaml.load(file) less_counter.exercise_outcome = 'found' except Exception as err: exe = False less_counter.exercise_outcome = 'not found' print(err) if exe: try: for val in exe.values(): exercise = models.Exercise(lesson=lid, **val) id, updated = await exercise.update_or_create('title', verbose=True) if updated: less_counter.exercise_details_updated += 1 else: less_counter.exercise_details_created += 1 except Exception as err: print('error creating exercise') less_counter.exercise_details_error += 1 print(exe) print(err) dest = os.path.abspath('static/images/') if os.path.exists(images): for file in os.listdir(images): src = os.path.join(images, file) if os.path.isfile(src): dst = dest + '/' + file shutil.copy(src, dst) less_counter.lesson_imgs_done += 1 else: less_counter.lesson_imgs_errors += 1 return less_counter async def inner_process(a_lesson): try: resp = await process(a_lesson) color_print(a_lesson, color='green') if verbose: print(resp) return resp except Exception as err: print(err) color_print(a_lesson, color='red') return False color_print('Processing lessons', color='blue') if lesson: await inner_process(lesson) else: for a_dir in os.listdir("../lesson_source/"): await inner_process(a_dir) color_print('Processing lessons ---> Done', color='blue') color_print('ADDED: ', counter.added_lessons, color='green') color_print('UPDATED: ', counter.updated_lessons, color='yellow') color_print('ERRORS: ', counter.error_lessons, color='red')
return a_parser if __name__ == '__main__': loop = asyncio.get_event_loop() parser = get_parser() args = parser.parse_args() if args.devusers: loop.run_until_complete(gen_users()) if args.bootstrap: loop.run_until_complete(bootstrap_db()) ans = input('Have You seen any errors (red) - y/n ') if ans.lower() == 'y': loop.run_until_complete(bootstrap_db()) if args.lesson: loop.run_until_complete( create_html_lessons(lesson=args.lesson, verbose=args.verbose)) if args.exam: loop.run_until_complete(add_exam(args.exam, verbose=args.verbose)) if args.questions: loop.run_until_complete( add_question(args.questions, verbose=args.verbose)) if args.alllessons: loop.run_until_complete(create_html_lessons(verbose=args.verbose)) if args.addquestion: loop.run_until_complete(add_question(verbose=args.verbose)) if args.admin: loop.run_until_complete(admin()) loop.close() color_print('ALL Done', color='green')
def get_logs(n_processes, regions, n_parents, adaptive, create_delay, sync_init_delay, txpu): '''Retrieves all logs from instances.''' if not os.path.exists('../results'): os.makedirs('../results') l = len(os.listdir('../results')) if l: color_print('sth is in dir ../results; aborting') return for rn in regions: color_print(f'collecting logs in {rn}') for ip in instances_ip_in_region(rn): run_task_for_ip('get-logs', [ip], parallel=0) if len(os.listdir('../results')) > l: l = len(os.listdir('../results')) break color_print(f'{len(os.listdir("../results"))} files in ../results') color_print('reading addresses') with open('ip_addresses', 'r') as f: ip_addresses = [line[:-1] for line in f] color_print('reading signing keys') with open('signing_keys', 'r') as f: hexes = [line[:-1].encode() for line in f] signing_keys = [SigningKey(hexed) for hexed in hexes] pk_hexes = [VerifyKey.from_SigningKey(sk).to_hex() for sk in signing_keys] arg_sort = [i for i, _ in sorted(enumerate(pk_hexes), key=lambda x: x[1])] signing_keys = [signing_keys[i] for i in arg_sort] ip_addresses = [ip_addresses[i] for i in arg_sort] color_print('writing addresses') with open('ip_addresses_sorted', 'w') as f: for ip in ip_addresses: f.write(ip + '\n') color_print('writing signing keys') with open('signing_keys_sorted', 'w') as f: for sk in signing_keys: f.write(sk.to_hex().decode() + '\n') color_print('generating pid->region mapping') with open('host_locations', 'w') as f: for rn in regions: f.write(rn + ' ') for ip in instances_ip_in_region(rn): f.write(str(ip_addresses.index(ip)) + ' ') f.write('\n') color_print('renaming logs') for fp in os.listdir('../results'): name = fp[-13:-8] # other | aleph pid = ip_addresses.index(fp.split(f'-{name}.log')[0].replace('-', '.')) os.rename(f'../results/{fp}', f'../results/{pid}.{name}.log.zip') result_path = f'../{n_processes}_{n_parents}_{adaptive}_{create_delay}_{sync_init_delay}_{txpu}' color_print('renaming dir') os.rename('../results', result_path) color_print('unzipping downloaded logs') for path in os.listdir(result_path): index = path.split('.')[0] path = os.path.join(result_path, path) with zipfile.ZipFile(path, 'r') as zf: zf.extractall(result_path) os.rename(f'{result_path}/aleph.log', f'{result_path}/{index}.aleph.log') os.remove(path) color_print('zipping logs') with zipfile.ZipFile(result_path + '.zip', 'w') as zf: for path in os.listdir(result_path): path = os.path.join(result_path, path) zf.write(path) os.remove(path) color_print('removing empty dir') os.rmdir(result_path) color_print('getting dag') run_task_for_ip('get-dag', [ip_addresses[0]]) color_print('done')
def run_protocol(n_processes, regions, restricted, instance_type): '''Runs the protocol.''' start = time() parallel = n_processes > 1 if regions == 'badger_regions': regions = badger_regions() if regions == 'all': regions = available_regions() # note: there are only 5 t2.micro machines in 'sa-east-1', 'ap-southeast-2' each color_print('launching machines') nhpr = n_processes_per_regions(n_processes, regions, restricted) launch_new_instances(nhpr, instance_type) color_print('waiting for transition from pending to running') wait('running', regions) color_print('generating keys') # generate signing and keys generate_keys(n_processes) color_print('generating addresses file') # prepare address file ip_list = instances_ip(regions) with open('ip_addresses', 'w') as f: f.writelines([ip + '\n' for ip in ip_list]) color_print('waiting till ports are open on machines') wait('open 22', regions) color_print('installing dependencies') # install dependencies on hosts run_task('inst-dep', regions, parallel) color_print('packing local repo') # pack testing repo with Connection('localhost') as c: zip_repo(c) color_print('wait till installation finishes') # wait till installing finishes wait_install(regions) color_print('sending testing repo') # send testing repo run_task('send-testing-repo', regions, parallel) color_print('syncing files') # send files: addresses, signing_keys, light_nodes_public_keys run_task('sync-files', regions, parallel) color_print('sending parameters') # send parameters run_task('send-params', regions, parallel) color_print(f'establishing the environment took {round(time()-start, 2)}s') color_print('running the experiment') # run the experiment run_task('run-protocol', regions, parallel)
s = sentence.split() for i in s: if i in pattern: print('\033[1;32m', end='') print(i, end='') print('\033[0m', end='') else: print('\033[4;33m', end='') print('\033[1;33m', end='') print(i, end='') print('\033[0m', end='') print(' ', end='') color_print('Find %d Matching Results in %d Files:' % (len(results.keys()), len(all_files)), 7, end='\n') results = sorted(results.items(), key=lambda kv: (kv[1], kv[0]), reverse=True) a = 0 idx = 0 with open('result.txt', 'w') as f: for i in results: if i[1] >= min_times and a < max_display: print_with_color(i[0]) print('(%d times)' % i[1]) a += 1 if show_context: for j in context[i[0]]: