def create_admin_kubeconfig(ca, ha_admin_token=None): """ Create a kubeconfig file. The file in stored under credentials named after the admin :param ca: the ca :param ha_admin_token: the ha_cluster_token """ if not ha_admin_token: token = get_token("admin", "basic_auth.csv") if not token: print("Error, could not locate admin token. Joining cluster failed.") exit(2) else: token = ha_admin_token assert token is not None config_template = "{}/microk8s-resources/{}".format(snap_path, "client.config.template") config = "{}/credentials/client.config".format(snapdata_path) shutil.copyfile(config, "{}.backup".format(config)) try_set_file_permissions("{}.backup".format(config)) ca_line = ca_one_line(ca) with open(config_template, "r") as tfp: with open(config, "w+") as fp: for _, config_txt in enumerate(tfp): if config_txt.strip().startswith("username:"******"CADATA", ca_line) config_txt = config_txt.replace("NAME", "admin") config_txt = config_txt.replace("AUTHTYPE", "token") config_txt = config_txt.replace("PASSWORD", token) fp.write(config_txt) try_set_file_permissions(config)
def rebuild_client_config(): """ Recreate the client config """ token = get_token("admin") if not token: print( "Error, could not locate admin token. Resetting the node failed.") exit(2) config_template = "{}/microk8s-resources/{}".format( snap_path, "client.config.template") config = "{}/credentials/client.config".format(snapdata_path) shutil.copyfile(config, "{}.backup".format(config)) try_set_file_permissions("{}.backup".format(config)) cert_file = "{}/certs/{}".format(snapdata_path, "ca.crt") with open(cert_file) as fp: ca = fp.read() ca_line = base64.b64encode(ca.encode("utf-8")).decode("utf-8") with open(config_template, "r") as tfp: with open(config, "w+") as fp: for _, config_txt in enumerate(tfp): if config_txt.strip().startswith("username:"******"CADATA", ca_line) config_txt = config_txt.replace("NAME", "admin") config_txt = config_txt.replace("AUTHTYPE", "token") config_txt = config_txt.replace("PASSWORD", token) fp.write(config_txt) try_set_file_permissions(config)
def setUp(self): self.endpoint = 'add_scan/' init_test_db() self.api = TestAPI(token=get_token()) self.worker = Worker.objects.get(barcode=111111111111) self.barcode = Board.objects.get(id=3) self.view = BoardScanAPIView.as_view() self.valid_message = { "barcode": self.barcode.barcode, "worker": self.worker.username, "station": Station.objects.get(id=2).name }
def refresh_access_token(): """ 每一个小时执行一次,刷新access_token :return: """ logger.info('RUN | refresh_access_token') for mp in MediaPlatforms: logger.info('GetToken | {}'.format(mp['name'])) access_token = get_token(mp['appid'], mp['appsecret']) BotStatus.tokens.update({ mp['name']: { 'token': access_token, 'refresh_time': datetime.datetime.now().strftime("%F %T"), 'desc': mp.get('desc', '') } })
def join_dqlite_master_node(info, master_ip, token): """ Join this node to a cluster running dqlite. :param info: dictionary with the connection information :param master_ip: the IP of the master node we contacted to connect to the cluster :param token: the token to pass to the master in order to authenticate with it """ hostname_override = info["hostname_override"] store_cert("ca.crt", info["ca"]) store_cert("ca.key", info["ca_key"]) store_cert("serviceaccount.key", info["service_account_key"]) # triplets of [username in known_tokens.csv, username in kubeconfig, kubeconfig filename name] for component in [ ("kube-proxy", "kubeproxy", "proxy.config"), ("kubelet", "kubelet", "kubelet.config"), ("kube-controller-manager", "controller", "controller.config"), ("kube-scheduler", "scheduler", "scheduler.config"), ]: component_token = get_token(component[0]) if not component_token: print("Error, could not locate {} token. Joining cluster failed.".format(component[0])) exit(3) assert token is not None # TODO make this configurable create_kubeconfig( component_token, info["ca"], "127.0.0.1", "16443", component[2], component[1] ) if "admin_token" in info: replace_admin_token(info["admin_token"]) if "api_authz_mode" in info: update_apiserver(info["api_authz_mode"]) create_admin_kubeconfig(info["ca"], info["admin_token"]) store_base_kubelet_args(info["kubelet_args"]) update_kubelet_node_ip(info["kubelet_args"], hostname_override) store_callback_token(info["callback_token"]) update_dqlite(info["cluster_cert"], info["cluster_key"], info["voters"], hostname_override) # We want to update the local CNI yaml but we do not want to apply it. # The cni is applied already in the cluster we join try_initialise_cni_autodetect_for_clustering(master_ip, apply_cni=False) mark_no_cert_reissue()
def setUp(self): self.endpoint = 'stock/' init_test_db() self.api = TestAPI(token=get_token()) self.view = StockDetailAPIView.as_view()
def setUp(self): self.endpoint = 'production/' init_test_db() self.api = TestAPI(token=get_token()) self.view = ProductionDetailAPIView.as_view()
def get_user(request): token = get_token(request) if token: return User.objects.get(pk=token['user_id']) return AnonymousUser()
def get(self, bank, identification): ''' Procura na colecao de APIs do open banking, as contas de um determinado usuario em uma determinada instituicao ''' url = request_handler(request) args = url.get_args() try: db_connection = database() conn, cursor = db_connection.open_connection() for institution in REGISTERED_INSTITUTIONS: token = get_token(institution) ''' Nesse ponto eu vou utilizar duas premissas, devido a ausencia dessa rota na API disponibilizada. Premissa 1: A api disponibilizada pelos bancos contem uma rota que lista os usuários daquela instituicao, e responde aproximadamente no seguinte formato: 'Data' : [ {'AccountId' : '00711234511', 'Identification' : '12345678901211'}, {'AccountId' : '00711234522', 'Identification' : '12345678901222'}, {'AccountId' : '00711234533', 'Identification' : '12345678901233'} ] Abaixo sera possivel ver o mock dessa resposta pra cada instituicao cadastrada ''' if institution == 'SAFRA': list_clients = { 'Data': [{ 'AccountId': '00711234511', 'Identification': '12345678901211' }, { 'AccountId': '00711234522', 'Identification': '12345678901222' }, { 'AccountId': '00711234533', 'Identification': '12345678901233' }] } if institution == 'PLAYER_I': list_clients = { 'Data': [{ 'AccountId': '0034145611', 'Identification': '12345678901211' }, { 'AccountId': '0034145622', 'Identification': '12345678901222' }, { 'AccountId': '0034145633', 'Identification': '12345678901233' }] } if institution == 'PLAYER_S': list_clients = { 'Data': [{ 'AccountId': '00335789311', 'Identification': '12345678901211' }, { 'AccountId': '00335789322', 'Identification': '12345678901222' }, { 'AccountId': '00335789333', 'Identification': '12345678901233' }] } for row in list_clients['Data']: if (row['Identification'] == identification): data = get_account_data(institution, row['AccountId'], token) for transaction in data['data']['transaction']: sql = ''' INSERT INTO `open`.`aux_transactions` (`account_id`, `transaction_id`, `amount`, `currency`, `operation`, `booking_date`, `value_date`, `information`, `aux_banks_code`, `users_identification`) VALUES( %(account_id)s, %(transaction_id)s, %(amount)s, %(currency)s, %(operation)s, %(booking_date)s, %(value_date)s, %(information)s, %(aux_banks_code)s, %(users_identification)s ) ON DUPLICATE KEY UPDATE amount = VALUES(amount), currency = VALUES(currency), operation = VALUES(operation), booking_date = VALUES(booking_date), value_date = VALUES(value_date), information = VALUES(information) ''' val = { 'account_id': transaction['accountId'], 'transaction_id': transaction['transactionId'], 'amount': transaction['amount']['amount'], 'currency': transaction['amount']['currency'], 'operation': transaction['creditDebitIndicator'], 'booking_date': transaction['bookingDateTime'], 'value_date': transaction['valueDateTime'], 'information': transaction['transactionInformation'], 'aux_banks_code': transaction['proprietaryBankTransactionCode'] ['issuer'], 'users_identification': row['Identification'] } cursor.execute(sql, val) conn.commit() resp = jsonify({ 'StatusId': 'banks_synchronization_successful', 'StatusMessage': 'Synchronization successful.', 'Links': { 'Self': url.self_url() } }) resp.status_code = 200 except db_driver.Error as e: resp = jsonify({ 'StatusId': 'banks_database_error', 'StatusMessage': 'Database error.', 'DescriptionError': str(e), 'Links': { 'Self': url.self_url() } }) resp.status_code = 500 except Exception as e: resp = jsonify({ 'StatusId': 'banks_internal_error', 'StatusMessage': 'Synchronization error occurred.', 'DescriptionError': str(e), 'Links': { 'Self': url.self_url() } }) resp.status_code = 500 finally: return resp
def setUpTestData(cls): init_test_db() cls.token = get_token() cls.permission = BaseAccess()
def setUp(self): self.endpoint = 'order_info/' init_test_db() self.api = TestAPI(token=get_token()) self.view = CompanyOrderInfoAPIView.as_view()
def setUp(self): self.endpoint = 'add_sended_board/' init_test_db() self.api = TestAPI(token=get_token()) self.view = SendedBoardRecordAPIView.as_view()
def setUp(self): self.endpoint = 'logout/' init_test_db() self.api = TestAPI(token=get_token()) self.view = WorkerLogoutAPIView.as_view()
def setUp(self): self.endpoint = 'add_worker_scan/' init_test_db() self.api = TestAPI(token=get_token()) self.view = WorkerWorkHistoryAPIView.as_view()