def main(cli_params): if len(cli_params) == 0: print_err('Not implemented yet.') return False elif len(cli_params) == 3: srcdir = os.path.abspath(cli_params[0]) destdir = os.path.abspath(cli_params[1]) arch = "win" + cli_params[2] else: print_err('Wrong number of arguments. Must be none or three.') return False if not os.path.isdir(srcdir): print_err('ERROR: Source base directory "%s" missing.' % srcdir) return False os.makedirs(destdir, exist_ok=True) if not os.path.isdir(destdir): print_err('ERROR: Destination base "%s" is not a directory.' % destdir) return False if not arch in ['win32', 'win64']: print_err('ERROR: Unknow architecture. Must be "32" or "64".') return False if not kill_old_libs(destdir): return False src_mingw = os.path.join(srcdir, arch, 'bin') src_qt = os.path.join(srcdir, arch, 'dev', 'qt', 'bin') src_dev = os.path.join(srcdir, arch, 'dev', 'bin') file_dict = FILE_LIST[arch] file_list = [] for file in file_dict['mingw']: file_list.append([os.path.join(src_mingw, file), destdir]) for file in file_dict['qt']: destfile = file if '\\' in file: file_list.append([ os.path.join(os.path.dirname(src_qt), file), os.path.join(destdir, destfile) ]) os.makedirs(os.path.dirname(os.path.join(destdir, destfile)), exist_ok=True) else: file_list.append( [os.path.join(src_qt, file), os.path.join(destdir, destfile)]) for file in file_dict['dev']: file_list.append([os.path.join(src_dev, file), destdir]) if not copy_libs(file_list): return False print_ok('Libraries successfully updated.') return True
def _create_installers(self, arch): print_ok("Creating installer (%s) ..." % (ArchNames.names[arch])) ptversion = get_cmd_output( [CMD[HG], "log", "-b", self._hgbranch, "-l", "1", "--style", self._paths[VERSTYFILE]] ) with open(self._paths[ISSFILE][arch]) as issfile: iss_script = issfile.readlines() i = 0 while i < len(iss_script): line = iss_script[i].replace("{{versionstring}}", ptversion) line = line.replace("{{changelogfile}}", self._paths[CHLOGFILE]) line = line.replace( "{{outputbasename}}", self._INST_NAME_PATTERN % (self._release_date, ArchNames.names[arch]) ) iss_script[i] = line.replace("{{bindir}}", self._paths[BINDIR][arch]) i += 1 iscc_proc = Popen([CMD[ISCC], "/O" + self._paths[PKGBASEDIR], "-"], stdin=PIPE) iscc_proc.communicate(input=bytes("\n".join(iss_script), "latin_1")) if iscc_proc.returncode != 0: print_err("ERROR: Creating installer (%s) failed." % (ArchNames.names[arch])) return False return True
async def _init_schemas(): """ tanimlanmis schemalari ledgere ekler ve schema id leri server_json/server_schemas.json'a yazar """ pool_handle, wallet_handle = await start_indy(indy_config) schemas_str = get_schema_defs() schemas = json.loads(schemas_str) schema_ids = [] print('schema defs') print(schemas) for s in schemas: schema_id = await _save_schema_to_ledger(pool_handle, wallet_handle, s) if schema_id != None: schema_ids.append(schema_id) # print_warn(schema_ids) if len(schema_ids) > 0: print_ok('writing schema ids to json') with open('server_json/server_schemas.json', 'w') as f: f.write(json.dumps(schema_ids)) await stop_indy(pool_handle, wallet_handle)
def _create_installers(self, arch): print_ok('Creating installer (%s) ...' % (ArchNames.names[arch])) ptversion = get_cmd_output([ CMD[HG], 'log', '-b', self._hgbranch, '-l', '1', '--style', self._paths[VERSTYFILE] ]) with open(self._paths[ISSFILE][arch]) as issfile: iss_script = issfile.readlines() i = 0 while i < len(iss_script): line = iss_script[i].replace('{{versionstring}}', ptversion) line = line.replace('{{changelogfile}}', self._paths[CHLOGFILE]) line = line.replace( '{{outputbasename}}', self._INST_NAME_PATTERN % (self._release_date, ArchNames.names[arch])) iss_script[i] = line.replace('{{bindir}}', self._paths[BINDIR][arch]) i += 1 iscc_proc = Popen([CMD[ISCC], '/O' + self._paths[PKGBASEDIR], '-'], stdin=PIPE) iscc_proc.communicate(input=bytes('\n'.join(iss_script), 'latin_1')) if iscc_proc.returncode != 0: print_err('ERROR: Creating installer (%s) failed.' % (ArchNames.names[arch])) return False return True
async def handle_auth_challenge(request): ''' challenge, qr daki veri { "nonce": "deneme_nonce", "s_did": "server_did", } callback yapmaya gerek yok challenge in nereye gidecegi belli GET /auth/challenge - challenge jsonu doner. ve terminalde bastirir ''' print_warn("handle auth challenge starting") challenge = {} print_ok(indy_config['steward_did']) challenge['sdid'] = indy_config['steward_did'] # 1. generate nonce nonce = secrets.token_hex(16) print_ok(f"nonce: {nonce}, len: {len(nonce)}") challenge['nonce'] = nonce app['nonces'][nonce] = False qr = pyqrcode.create(json.dumps(challenge), version=8) print(qr.terminal(quiet_zone=1)) return json_response(challenge)
async def did_auth(): wallet_config = json.dumps({"id": "client_wallet"}) wallet_credentials = json.dumps({"key": "very_secret_key"}) wallet_handle = await wallet.open_wallet(wallet_config, wallet_credentials) pool_handle = await pool.open_pool_ledger(config_name='sandbox', config=None) dids = await did.list_my_dids_with_meta(wallet_handle) # print(type(dids)) dids = json.loads(dids) my_did_verkey = dids[0]['verkey'] my_did = dids[0]['did'] print(f"dids: {dids}") print(f"my did verkey: {my_did_verkey}") req = requests.get('http://localhost:3000/auth/challenge') challenge = json.loads(req.text) print_ok(f"challenge: {challenge}") # 1. get server verkey from ledger server_verkey = await did.key_for_did(pool_handle, wallet_handle, str(challenge['sdid'])) print(f"server_verkey: {server_verkey}") print(f"challenge nonce: {challenge['nonce']}") # 2. create response response = {} response['sender_did'] = my_did nonce = challenge['nonce'] msg = await crypto.auth_crypt(wallet_handle, my_did_verkey, server_verkey, nonce.encode('utf-8')) msg_b64 = base64.b64encode(msg).decode('ascii') response['response_msg'] = msg_b64 print_ok(f"response {response}") # 3. send response req = requests.post(url='http://localhost:3000/auth/response', json=response) jwe_resp = json.loads(req.text) print_ok(f"jwt_resp {jwe_resp}") jwe_resp = json.loads(jwe_resp) # 4. retrive jwe jwe = jwe_resp['jwe'] print_warn(jwe) # 5. decript jwt unpacked_msg = await crypto.unpack_message(wallet_handle, str.encode(jwe)) print_ok(f"unpacked msg: {unpacked_msg}") unpacked_msg = json.loads(unpacked_msg) jwt_token = unpacked_msg['message'] print_ok(f"jwt token: {jwt_token}")
def main(cli_params): if len(cli_params) == 0: print_err('Not implemented yet.') return False elif len(cli_params) == 3: srcdir = os.path.abspath(cli_params[0]) destdir = os.path.abspath(cli_params[1]) arch = "win" + cli_params[2] else: print_err('Wrong number of arguments. Must be none or three.') return False if not os.path.isdir(srcdir): print_err('ERROR: Source base directory "%s" missing.'%srcdir) return False os.makedirs(destdir, exist_ok=True) if not os.path.isdir(destdir): print_err('ERROR: Destination base "%s" is not a directory.'%destdir) return False if not arch in ['win32', 'win64']: print_err('ERROR: Unknow architecture. Must be "32" or "64".') return False if not kill_old_libs(destdir): return False src_mingw = os.path.join(srcdir, arch, 'bin') src_qt = os.path.join(srcdir, arch, 'dev', 'qt', 'bin') src_dev = os.path.join(srcdir, arch, 'dev', 'bin') file_dict = FILE_LIST[arch] file_list = [] for file in file_dict['mingw']: file_list.append([os.path.join(src_mingw, file), destdir]) for file in file_dict['qt']: destfile = file if '\\' in file: file_list.append([os.path.join(os.path.dirname(src_qt), file), os.path.join(destdir, destfile)]) os.makedirs(os.path.dirname(os.path.join(destdir, destfile)), exist_ok=True) else: file_list.append([os.path.join(src_qt, file), os.path.join(destdir, destfile)]) for file in file_dict['dev']: file_list.append([os.path.join(src_dev, file), destdir]) if not copy_libs(file_list): return False print_ok('Libraries successfully updated.') return True
def present_quick_feedback(self, error): if error: answer = error[u'answer'] expected = error[u'expected'] msg_or = _(u' or ') expected_options = msg_or.join(expected) msg_error = _(u'Error: %(ans)s -> %(exp)s') % {u'ans': answer, u'exp': expected_options} print_error(msg_error) else: print_ok(u'OK')
def main(): # Instantiate a FileJSONPFAValidator or a PostgreSQLJSONPFAValidator depending which input method # is requested by the user input_method = os.getenv('INPUT_METHOD', 'FILE') validator = None if input_method == 'FILE': pfa_path = os.environ.get('PFA_PATH') validator = FileJSONPFAValidator(pfa_path) validator.load_document() elif input_method == 'POSTGRESQL': db_host = os.environ.get('DB_HOST') db_port = os.environ.get('DB_PORT') db_name = os.environ.get('DB_NAME') db_user = os.environ.get('DB_USER') db_password = os.environ.get('DB_PASSWORD') db_table = os.environ.get('DB_TABLE') db_column = os.environ.get('DB_COLUMN') db_where_lvalue = os.environ.get('DB_WHERE_LVALUE') db_where_rvalue = os.environ.get('DB_WHERE_RVALUE') job_id = os.environ.get('JOB_ID') if job_id: db_where_rvalue = job_id validator = PostgreSQLJSONPFAValidator(db_host, db_port, db_name, db_user, db_password, db_table, db_column, db_where_lvalue, db_where_rvalue) validator.load_document() (valid, reason) = validator.validate() if not valid: print_error(reason) sys.exit(1) features_db_host = os.environ.get('FEATURES_DB_HOST') features_db_port = os.environ.get('FEATURES_DB_PORT') features_db_name = os.environ.get('FEATURES_DB_NAME') features_db_user = os.environ.get('FEATURES_DB_USER') features_db_password = os.environ.get('FEATURES_DB_PASSWORD') features_db_table = os.environ.get('FEATURES_DB_TABLE') # Validate that the model has existing variables names (valid, reason) = validator.validate_io(features_db_host, features_db_port, features_db_name, features_db_user, features_db_password, features_db_table) if not valid: print_error(reason) sys.exit(1) print_ok("This is a valid PFA document!")
async def list_credentials(): ''' list ''' wallet_config = json.dumps({"id": "client_wallet"}) wallet_credentials = json.dumps({"key": "very_secret_key"}) wallet_handle = await wallet.open_wallet(wallet_config, wallet_credentials) credentials = await anoncreds.prover_get_credentials(wallet_handle, '{}') print_ok(credentials) pass
async def handle_cred_request(request): """ POST /credrequest body'de `cred_req_json` bulunmasi gerekir. returns: issued credential cred_req_json'u kullanarak credentiali olusturur ve json yapisi icinde geri doner. """ post_body = await request.read() post_body = json.loads(post_body) cred_req_json = post_body['cred_req_json'] cred_offer_json = post_body['cred_offer_json'] print_ok(f'cred_request_json {cred_req_json}') print_ok('issuing credential') demo_cred_values = create_gvt_cred_values_json( 'ahmet', '23', 'male', '172') print_ok(f'cred values: {demo_cred_values}') cred_json, _, _ = await anoncreds.issuer_create_credential(app['wallet_handle'], cred_offer_json, cred_req_json, demo_cred_values, None, None) print_ok(f'cred_json: {cred_json}') cred_req_ret = {} cred_req_ret['cred_json'] = cred_json return web.json_response(cred_req_ret)
def build(self, arch): """ Build Photivo for the given architecture. arch can be either Arch.win32 or Arch.win64 <return> bool True if build succeeded, False otherwise """ if not self._change_tc_arch(ArchNames.bits[arch]): return False try: os.chdir(self._paths[BUILDDIR][arch]) except OSError as err: print_err('ERROR: Changing directory to "%s" failed.' % self._paths[PKGBASEDIR]) print_err(str(err)) return False print_ok('Building Photivo and ptClear (%s) ...' % ArchNames.names[arch]) # Build production Photivo build_result = run_cmd([CMD[QMAKE], \ os.path.join('..', '..', 'photivo.pro'), \ 'CONFIG+=WithoutGimp', \ 'CONFIG-=debug'], env=self._env) \ and run_cmd([CMD[MAKE]], env=self._env) if not build_result \ or not os.path.isfile(os.path.join(self._paths[BUILDDIR][arch], 'photivo.exe')) \ or not os.path.isfile(os.path.join(self._paths[BUILDDIR][arch], 'ptClear.exe')) \ : print_err('ERROR: Building Photivo failed.') return False # Move fresh binaries to bin dir try: shutil.move( os.path.join(self._paths[BUILDDIR][arch], 'photivo.exe'), self._paths[BINDIR][arch]) shutil.copy( os.path.join(self._paths[BUILDDIR][arch], 'ptClear.exe'), self._paths[BINDIR][arch]) except OSError as err: print_err('ERROR: Copying binaries to "%s" failed.' % self._paths[BINDIR]) print_err(str(err)) return False return True
def main(cli_params): if len(cli_params) == 0: print_err('Not implemented yet.') return False elif len(cli_params) == 2: srcdir = os.path.abspath(cli_params[0]) destdir = os.path.abspath(cli_params[1]) else: print_err('Wrong number of arguments. Must be none or two.') return False if not os.path.isdir(srcdir): print_err('ERROR: Source base directory "%s" missing.' % srcdir) return False os.makedirs(destdir, exist_ok=True) if not os.path.isdir(destdir): print_err('ERROR: Destination base "%s" is not a directory.' % destdir) return False for direntry in DIR_LIST: try: dest_subdir = os.path.join(destdir, direntry[0]) if os.path.exists(dest_subdir): shutil.rmtree(dest_subdir) except Exception as err: print_err('ERROR removing existing destination: ' + direntry[0]) print_err(str(err)) return False for direntry in DIR_LIST: print('Updating:', direntry[0]) try: if direntry[1] == None: ignorer = None else: ignorer = shutil.ignore_patterns(direntry[1]) shutil.copytree(os.path.join(srcdir, direntry[0]), os.path.join(destdir, direntry[0]), ignore=ignorer) except Exception as err: print_err('ERROR copying data directory: ' + direntry[0]) print_err(str(err)) return False print_ok('Data files successfully updated.') return True
def main(cli_params): if len(cli_params) == 0: print_err('Not implemented yet.') return False elif len(cli_params) == 2: srcdir = os.path.abspath(cli_params[0]) destdir = os.path.abspath(cli_params[1]) else: print_err('Wrong number of arguments. Must be none or two.') return False if not os.path.isdir(srcdir): print_err('ERROR: Source base directory "%s" missing.'%srcdir) return False os.makedirs(destdir, exist_ok=True) if not os.path.isdir(destdir): print_err('ERROR: Destination base "%s" is not a directory.'%destdir) return False for direntry in DIR_LIST: try: dest_subdir = os.path.join(destdir, direntry[0]) if os.path.exists(dest_subdir): shutil.rmtree(dest_subdir) except Exception as err: print_err('ERROR removing existing destination: ' + direntry[0]) print_err(str(err)) return False for direntry in DIR_LIST: print('Updating:', direntry[0]) try: if direntry[1] == None: ignorer = None else: ignorer = shutil.ignore_patterns(direntry[1]) shutil.copytree(os.path.join(srcdir, direntry[0]), os.path.join(destdir, direntry[0]), ignore=ignorer) except Exception as err: print_err('ERROR copying data directory: ' + direntry[0]) print_err(str(err)) return False print_ok('Data files successfully updated.') return True
def _copy_data_dlls(self, arch): """ Updates libs and data files in the bin dir. """ print_ok("Packaging files (%s)..." % (ArchNames.names[arch])) # Changelog: make sure it is up to date (i.e. edited today) while True: chlog_moddate = datetime.fromtimestamp(os.path.getmtime(self._paths[CHLOGFILE])).date() if chlog_moddate >= datetime.today().date(): break else: print_warn("Changelog not edited today, but on " + str(chlog_moddate) + ". It is probably outdated.") print("Note that any changes you make after this point will probably not be present") print("in the installers.") cont = wait_for_key("(R)etry, (c)ontinue or (a)bort?", ["r", "c", "a"]) if cont == "r": continue elif cont == "c": break elif cont == "a": raise KeyboardInterrupt shutil.copy(self._paths[CHLOGFILE], self._paths[BINDIR][arch]) # copy licence files shutil.copy(self._paths[LICFILE], os.path.join(self._paths[BINDIR][arch], "License.txt")) shutil.copy(self._paths[LIC3FILE], os.path.join(self._paths[BINDIR][arch], "License 3rd party.txt")) # Call util scripts to updata data files and DLLs if not ptupdata.main([self._paths[PTBASEDIR], self._paths[BINDIR][arch]]): return False try: if not ptuplibs.main( [os.path.dirname(os.environ["tcpath"]), self._paths[BINDIR][arch], ArchNames.bits[arch]] ): return False except KeyError: print_err("Environment variable tcpath not set.") return False # strip unnecessary symbols from binaries for files in ["*.exe", "*.dll"]: if not run_cmd([CMD[STRIP], os.path.join(self._paths[BINDIR][arch], files)]): print_warn("WARNING: Failed to strip " + os.path.join(self._paths[BINDIR][arch], files)) return True
def present_results(self): errors_count = len(self.wrong_answers) if errors_count == 0: print_ok(_(u'Perfect! No errors!!!')) else: print_error(_(u'Unfortunately, there were errors')) msg_summary = (_(u'Number of errors is %(errors)d for %(questions)d questions.') % {u'errors': errors_count, u'questions': self.tests_num}) print_error(msg_summary) for (error_num, wrong_answer) in enumerate(self.wrong_answers, 1): expected = wrong_answer[u'expected'] expected_options = _(u' or ').join(expected) answer = wrong_answer[u'answer'] question = wrong_answer[u'question'] explanation_msg = (_(u'%(num)d. In question "%(question)s": "%(ans)s" -> "%(expected)s"') % {u'num': error_num, u'question': question, u'ans': answer, u'expected': expected_options}) print_error(explanation_msg)
def build(self, arch): """ Build Photivo for the given architecture. arch can be either Arch.win32 or Arch.win64 <return> bool True if build succeeded, False otherwise """ if not self._change_tc_arch(ArchNames.bits[arch]): return False try: os.chdir(self._paths[BUILDDIR][arch]) except OSError as err: print_err('ERROR: Changing directory to "%s" failed.' % self._paths[PKGBASEDIR]) print_err(str(err)) return False print_ok("Building Photivo and ptClear (%s) ..." % ArchNames.names[arch]) # Build production Photivo build_result = run_cmd( [CMD[QMAKE], os.path.join("..", "..", "photivo.pro"), "CONFIG+=WithoutGimp", "CONFIG-=debug"], env=self._env ) and run_cmd([CMD[MAKE]], env=self._env) if ( not build_result or not os.path.isfile(os.path.join(self._paths[BUILDDIR][arch], "photivo.exe")) or not os.path.isfile(os.path.join(self._paths[BUILDDIR][arch], "ptClear.exe")) ): print_err("ERROR: Building Photivo failed.") return False # Move fresh binaries to bin dir try: shutil.move(os.path.join(self._paths[BUILDDIR][arch], "photivo.exe"), self._paths[BINDIR][arch]) shutil.copy(os.path.join(self._paths[BUILDDIR][arch], "ptClear.exe"), self._paths[BINDIR][arch]) except OSError as err: print_err('ERROR: Copying binaries to "%s" failed.' % self._paths[BINDIR]) print_err(str(err)) return False return True
async def _save_schema_to_ledger(pool_handle, wallet_handle, schema): ''' schemayi ledgere indy confige gore ekler ve shema_id yi geri doner returns: schema_id, None ''' steward_did = indy_config['steward_did'] schema_id, schema_json = await anoncreds.issuer_create_schema( steward_did, schema['name'], schema['version'], schema['attributes']) pprint.pprint(schema_json) pprint.pprint(schema_id) logging.info("schema {}\nschema id: {}".format(schema_json, schema_id)) try: schema_request = await ledger.build_schema_request( steward_did, schema_json) logging.info("schema request: {}".format(schema_request)) schema_response = \ await ledger.sign_and_submit_request(pool_handle, wallet_handle, steward_did, schema_request) logging.info("schema response: {}".format(schema_response)) except Exception as e: print("Exception happened") logging.error(e) # print_warn(type(schema_response)) schema_response = json.loads(schema_response) if schema_response['op'] == 'REJECT': print_fail('schema tx rejected !') return None print_ok('schema {} added to ledger !'.format(schema_id)) # append schema id to server_schemas.json return schema_id
async def _init_creds(): ''' schemalari ledgere cred def olarak ekle. ''' schema_ids = get_added_schemas() schema_ids = json.loads(schema_ids) print_fail(f"schema ids: {schema_ids}") cred_def_list = [] for schema_id in schema_ids: cred_def_id = await _save_cred_def_to_ledger(schema_id) if cred_def_id != None: cred_def_list.append(cred_def_id) print_warn(cred_def_list) if len(cred_def_list) > 0: print_ok('writing cred def ids to json') with open('server_json/server_cred_defs.json', 'w') as f: f.write(json.dumps(cred_def_list))
async def on_startup(app): print("Starting Indy") pool_handle, wallet_handle = await start_indy(indy_config) app["pool_handle"] = pool_handle app["wallet_handle"] = wallet_handle print_ok(f"dids: {await did.list_my_dids_with_meta(wallet_handle)}") print_ok(f"schemas: {get_added_schemas()}") print_ok(f"cred defs: {get_cred_defs()}")
async def _save_cred_def_to_ledger(schema_id, cred_def_tag='TAG1', cred_def_type='CL'): """ # TODO logify # TODO indy ledger error handling # TODO ne print edilecek ne loglanacak cli ile kullanilmasi icin tasarlandi. diger moduller kullanmamali schema_id yi ledgerden ceker ve cred def olarak ekler, ayni sekilde birden fazla cred eklenebiliyor return: cred_def_id, None """ steward_did = indy_config["steward_did"] schema_req = await ledger.build_get_schema_request(None, schema_id) schema_response = await ledger.sign_and_submit_request( pool_handle, wallet_handle, steward_did, schema_req) schema_id, schema_json = await ledger.parse_get_schema_response( schema_response) print_warn(f'res schema id: {schema_id} schema json{schema_json}') # TODO if fetch schema then add cred def # if True: # print_ok(schema_id) cred_def_config = json.dumps({"support_revocation": False}) print_fail(schema_json) (cred_def_id, cred_def_json) = \ await anoncreds.issuer_create_and_store_credential_def(wallet_handle, steward_did, schema_json, cred_def_tag, cred_def_type, cred_def_config) print_ok(f"cred def id: {cred_def_id}") print_ok(f"cred def json: {cred_def_json}") cred_def_request = await ledger.build_cred_def_request( steward_did, cred_def_json) cred_def_resp = await ledger.sign_and_submit_request( pool_handle, wallet_handle, steward_did, cred_def_request) print_fail(cred_def_resp) cred_def_resp = json.loads(cred_def_resp) # TODO response burada yermi emin degilim if cred_def_resp['op'] == 'REJET': print_fail('cred def tx rejected !') return None print_ok(f'cred def {cred_def_id} added to ledger !') return cred_def_id
async def init_client(): ''' init client indy TODO ledgere did'i buradaki seed'i kullanarak ekledim, init scripte tx ile did'i ledgere ekleme fonksiyonunu ekle ''' try: await pool.set_protocol_version(PROTOCOL_VERSION) wallet_config = json.dumps({"id": "client_wallet"}) wallet_credentials = json.dumps({"key": "very_secret_key"}) print_warn('creating wallet for client') try: await wallet.create_wallet(wallet_config, wallet_credentials) except IndyError as ex: if ex.error_code == ErrorCode.WalletAlreadyExistsError: print_fail("wallet already exists {}".format(wallet_config)) wallet_handle = await wallet.open_wallet(wallet_config, wallet_credentials) client_seed = '000000000000000000000000Client11' did_json = json.dumps({'seed': client_seed}) client_did, client_verkey = await did.create_and_store_my_did( wallet_handle, did_json) print_ok(f"client_did: {client_did}, client_verkey: {client_verkey}") link_secret_name = 'link_secret' link_secret_id = await anoncreds.prover_create_master_secret( wallet_handle, link_secret_name) except Exception as e: print_fail(e) raise e
async def handle_cred_offer(request): """ GET /credoffer/<cred_def_id> returns `cred_offer_json` gelen cred def id ye gore cred offer olusturur ve geri doner """ # TODO check if valid cred ref cred_def_id = request.match_info['cred_def_id'] print_ok(cred_def_id) cred_offer_ret = {} cred_offer = await anoncreds.issuer_create_credential_offer(app['wallet_handle'], cred_def_id) print_ok(cred_offer) cred_offer_ret['cred_offer_json'] = cred_offer print_ok(cred_offer_ret) return web.json_response(cred_offer_ret)
def main(cli_params): print("\nPhotivo for Windows package builder", SCRIPT_VERSION) print(DIVIDER, end="\n\n") if not os.path.isfile(os.path.join(os.getcwd(), "photivo.pro")): print_err("ERROR: Photivo repository not found. Please run this script from the folder") print_err('where "photivo.pro" is located.') return False # setup, config and pre-build checks if not load_ini_file(): return False paths = build_paths(os.getcwd()) if not check_build_env(paths): return False if not prepare_dirs(paths): return False archlist = Arch.archs fullrelease = True if len(cli_params) > 0: if cli_params[0] == "32": print_warn("Only building 32bit package!") archlist = [Arch.win32] fullrelease = False elif cli_params[0] == "64": print_warn("Only building 64bit package!") archlist = [Arch.win64] fullrelease = False # build and package everything builder = PhotivoBuilder(paths) for arch in archlist: if not builder.build(arch): return False if not builder.package(arch): return False # final summary and option to clean up if not builder.show_summary(): print_err("Something went wrong along the way.") return False if fullrelease: print_ok("Everything looks fine.") print("You can test and upload the release now.") print("\nAfterwards I can clean up automatically, i.e.:") if ARCHIVE_DIR == "": print("* delete everything created during the build process.") else: print("* move installers to", ARCHIVE_DIR) print("* delete everything else created during the build process") if wait_for_yesno("\nShall I clean up now?"): if not builder.cleanup(): return False else: print("OK. The mess stays.") else: print_warn("Remember: Only the " + Archnames.names[archlist[0]] + " installer was built.") print_ok("All done.") return True
async def list_dids(): wallet_config = json.dumps({"id": "client_wallet"}) wallet_credentials = json.dumps({"key": "very_secret_key"}) wallet_handle = await wallet.open_wallet(wallet_config, wallet_credentials) dids = await did.list_my_dids_with_meta(wallet_handle) print_ok(dids)
test_suites = parse_args() for dirname, name in test_suites.items(): dirpath = os.path.join(base_path, dirname) dirpath = os.path.abspath(dirpath) print(name) for f in sorted(os.listdir(dirpath)): if f.endswith(".c"): path = os.path.join(dirpath, f) run_test(path, using_eva=True) run_test(path, using_eva=False) print("=====================================") print("Using EVA") print_ok(" OK: " + str(results_eva["ok"])) print_err(" ERR: " + str(results_eva["err"])) print_todo_works(" TODO_OK: " + str(results_eva["todo_ok"])) print_todo(" TODO_ERR: " + str(results_eva["todo_err"])) print("") print("Without EVA") print_ok(" OK: " + str(results_cil["ok"])) print_err(" ERR: " + str(results_cil["err"])) print_todo_works(" TODO_OK: " + str(results_cil["todo_ok"])) print_todo(" TODO_ERR: " + str(results_cil["todo_err"])) if results_eva["err"] > 0 or results_cil["err"] > 0: exit(1) exit(0)
async def get_credential(): ''' # TODO Hicbirsekilde hata kontrolu yok ! ''' print_ok('get credential started') # 0. open wallet and create master link wallet_config = json.dumps({"id": "client_wallet"}) wallet_credentials = json.dumps({"key": "very_secret_key"}) wallet_handle = await wallet.open_wallet(wallet_config, wallet_credentials) pool_handle = await pool.open_pool_ledger(config_name='sandbox', config=None) link_secret_name = 'link_secret' # 1. get available creds req = requests.get('http://localhost:3000/availablecreds') cred_def_ids = req.text cred_def_ids = json.loads(cred_def_ids) print_ok(cred_def_ids) # TODO credentialin attribute lari ledgerden cekilebilir. # 2. get cred offer cred_offer_url = f'http://localhost:3000/credoffer/{cred_def_ids[0]}' # print(cred_offer_url) req = requests.get(cred_offer_url) cred_offer_ret = req.text cred_offer_ret = json.loads(cred_offer_ret) cred_offer_json = cred_offer_ret['cred_offer_json'] cred_offer_json = json.loads(cred_offer_json) cred_offer_json = json.dumps(cred_offer_json) print_ok(cred_offer_json) # 3. create and send cred req ''' { cred_offer_json: cred_offer_json cred_req_json: cred_req_json } ''' # 3.1 fetch cred def # TODO poolu acmak lazim cred_def_req = await ledger.build_get_cred_def_request( None, cred_def_ids[0]) cred_def_resp = await ledger.submit_request(pool_handle, cred_def_req) _, cred_def_json = await ledger.parse_get_cred_def_response(cred_def_resp) print_ok(cred_def_json) cred_req_body = {} client_dids = await did.list_my_dids_with_meta(wallet_handle) client_dids = json.loads(client_dids) client_did = client_dids[0]['did'] print_warn(f'client did: {client_did}') # print_fail(f'{cred_defs[0]}, {type(cred_defs[0])}') (cred_req_json, cred_req_metadata_json) = \ await anoncreds.prover_create_credential_req(wallet_handle, client_did, cred_offer_json, cred_def_json, link_secret_name) print_ok('credential request created!') cred_req_body['cred_offer_json'] = cred_offer_json cred_req_body['cred_req_json'] = cred_req_json cred_req_url = f'http://localhost:3000/credrequest' cred_req_resp = requests.post(url=cred_req_url, json=cred_req_body) cred_req_resp = cred_req_resp.text cred_req_resp = json.loads(cred_req_resp) print_ok(cred_req_resp) # 4. get and store cred cred_id = await anoncreds.prover_store_credential( wallet_handle, None, cred_req_metadata_json, cred_req_resp['cred_json'], cred_def_json, None) print_ok(cred_id) # 5. list cred pass
port_name = serial if options.verbose >= VERBOSE: print 'Open serial port: ' + port_name + ' (timeout: ' + str( options.timeout) + 's)' bl_scp = BootloaderScp(port_name, options.timeout) try: packets_list = parse_scpcmd_file(filename, bl_scp, options) # if sum(1 for item in iter(list_ports.grep(serial))) == 0: # print 'Waiting for device ' + serial + ' to appears' # while sum(1 for item in iter(list_ports.grep(serial))) == 0: # pass process_packet(packets_list, options) print_ok("SCP session OK") except RuntimeError: print_err("Restricted Data") return_code = 1 except ValueError: print_err("Connection Failed") return_code = -2 except Exception as inst: print inst print_err("error: SCP session FAILED") return_code = -1 finally: bl_scp.close() sys.exit(return_code)
help='did authhentication') args = parser.parse_args() loop = asyncio.get_event_loop() if args.init: loop.run_until_complete(init_client()) pass elif args.get_cred: loop.run_until_complete(get_credential()) pass elif args.list_creds: loop.run_until_complete(list_credentials()) pass elif args.list_dids: loop.run_until_complete(list_dids()) pass elif args.did_auth: loop.run_until_complete(did_auth()) pass else: print_ok("main loop running") loop.run_until_complete(main()) loop.close()
def _copy_data_dlls(self, arch): """ Updates libs and data files in the bin dir. """ print_ok('Packaging files (%s)...' % (ArchNames.names[arch])) # Changelog: make sure it is up to date (i.e. edited today) while True: chlog_moddate = datetime.fromtimestamp( os.path.getmtime(self._paths[CHLOGFILE])).date() if chlog_moddate >= datetime.today().date(): break else: print_warn('Changelog not edited today, but on ' + str(chlog_moddate) + '. It is probably outdated.') print( 'Note that any changes you make after this point will probably not be present' ) print('in the installers.') cont = wait_for_key('(R)etry, (c)ontinue or (a)bort?', ['r', 'c', 'a']) if cont == 'r': continue elif cont == 'c': break elif cont == 'a': raise KeyboardInterrupt shutil.copy(self._paths[CHLOGFILE], self._paths[BINDIR][arch]) # copy licence files shutil.copy(self._paths[LICFILE], os.path.join(self._paths[BINDIR][arch], 'License.txt')) shutil.copy( self._paths[LIC3FILE], os.path.join(self._paths[BINDIR][arch], 'License 3rd party.txt')) # Call util scripts to updata data files and DLLs if not ptupdata.main( [self._paths[PTBASEDIR], self._paths[BINDIR][arch]]): return False try: if not ptuplibs.main([ os.path.dirname(os.environ['tcpath']), self._paths[BINDIR][arch], ArchNames.bits[arch] ]): return False except KeyError: print_err('Environment variable tcpath not set.') return False # strip unnecessary symbols from binaries for files in ['*.exe', '*.dll']: if not run_cmd( [CMD[STRIP], os.path.join(self._paths[BINDIR][arch], files)]): print_warn('WARNING: Failed to strip ' + os.path.join(self._paths[BINDIR][arch], files)) return True
#!/usr/bin/env python3 """Hyperion system tests.""" from sys import exit from score import Evaluator from utils import print_ok, print_warn, print_error, print_critical print_ok("Started Hyperion") print_ok("Hyperion exited successfully") exit(0)
def main(cli_params): print('\nPhotivo for Windows package builder', SCRIPT_VERSION) print(DIVIDER, end='\n\n') if not os.path.isfile(os.path.join(os.getcwd(), 'photivo.pro')): print_err( 'ERROR: Photivo repository not found. Please run this script from the folder' ) print_err('where "photivo.pro" is located.') return False # setup, config and pre-build checks if not load_ini_file(): return False paths = build_paths(os.getcwd()) if not check_build_env(paths): return False if not prepare_dirs(paths): return False archlist = Arch.archs fullrelease = True if len(cli_params) > 0: if cli_params[0] == '32': print_warn('Only building 32bit package!') archlist = [Arch.win32] fullrelease = False elif cli_params[0] == '64': print_warn('Only building 64bit package!') archlist = [Arch.win64] fullrelease = False # build and package everything builder = PhotivoBuilder(paths) for arch in archlist: if not builder.build(arch): return False if not builder.package(arch): return False # final summary and option to clean up if not builder.show_summary(): print_err('Something went wrong along the way.') return False if fullrelease: print_ok('Everything looks fine.') print('You can test and upload the release now.') print('\nAfterwards I can clean up automatically, i.e.:') if ARCHIVE_DIR == '': print('* delete everything created during the build process.') else: print('* move installers to', ARCHIVE_DIR) print('* delete everything else created during the build process') if wait_for_yesno('\nShall I clean up now?'): if not builder.cleanup(): return False else: print('OK. The mess stays.') else: print_warn('Remember: Only the ' + Archnames.names[archlist[0]] + ' installer was built.') print_ok('All done.') return True
async def handle_auth_response(request): ''' responese { sender_did: "sender did" reponse_msg: "authencrypted_base64 encoded message" } POST /auth/response - ozhan agent hazirladigi response' i buraya gonderir - daha sonra kullanmak uzere jwt tokenini alir. ''' print_warn("handle auth response starting") wallet_handle = app['wallet_handle'] pool_handle = app['pool_handle'] # 1. parse request post_body = await request.read() response = json.loads(post_body) print_warn(post_body) client_did = response['sender_did'] # client_fetched_verkey = await did.key_for_did(pool_handle, wallet_handle, client_did) response_msg_b64 = response['response_msg'] response_msg = base64.b64decode(response_msg_b64) # 2. validate/decrypt auth encrypt message steward_did = indy_config['steward_did'] steward_verkey = await did.key_for_local_did(wallet_handle, steward_did) print_warn(f"steward verkey: {steward_verkey}") client_fetched_verkey, msg = await crypto.auth_decrypt(wallet_handle, steward_verkey, response_msg) print_ok(f"client_fetched_verkey: {client_fetched_verkey}") print_ok(f"msg: {msg}") nonce = msg.decode('utf-8') if nonce in app['nonces']: print_ok('nonce gecerli jwt donuluyor') # 4. generate jwt with hs256 payload = {'iss': 'did:sov:' + client_did} print_ok(f"payload: {payload}") encoded_jwt = jwt.encode(payload, 'secret', algorithm="HS256") print_ok(f"jwt: {encoded_jwt}") # 5. return jwt with jwe jwt_jwe = await crypto.pack_message(wallet_handle, encoded_jwt, [client_fetched_verkey], steward_verkey) print(jwt_jwe) # 6. create and return response jwt_resp = {} jwt_resp['jwe'] = jwt_jwe.decode('utf-8') print_ok(f"jwt \n{json.dumps(jwt_resp)}") return Response(text=jwt_jwe.decode('utf-8')) else: print_fail('nonce yok! unauth donuluyor') return web.Response(status=401)