def test_status(test_app): # Given # test_app # When response = test_app.get("/status") # Then assert response.status_code == 200 assert response.json() == { "version": get_settings().version, "status": "running", "environment": get_settings().environment, "testing": get_settings().testing, }
def ensure_settings_file(): settings_path = "/opt/os2mo-data-import-and-export/settings/settings.json" if exists(settings_path): logger.debug("Early return from ensure_settings_file") return False settings = get_settings().dict() settings_mapping = { "crontab.SAML_TOKEN": "saml_token", "mora.base": "mora_url", "integrations.SD_Lon.sd_user": "******", "integrations.SD_Lon.sd_password": "******", "integrations.SD_Lon.base_url": "sd_base_url", "integrations.SD_Lon.institution_identifier": "sd_institution", "integrations.SD_Lon.import.too_deep": "sd_too_deep", } dipex_settings = { dipex_key: str(settings[app_key]) for dipex_key, app_key in settings_mapping.items() if settings[app_key] is not None } logger.info("Creating dipex settings file") with open(settings_path, "w") as settings_file: json.dump(dipex_settings, settings_file) return True
def get_facts(content: str, metadata: dict = {}, settings: Optional[Settings] = None): if settings is None: settings = get_settings() endpoint = "{}/predict".format(settings.svc_fact_extraction) logger.debug("Hitting endpoint for fact extraction: {}".format(endpoint)) payload = { "pipeline_run_id": "", "pipeline_version": "", "pipeline_id": "", "data": { "client_req_id": settings.app_name, "body": { "text_content": content }, "meta": metadata } } out_facts = [] try: resp = requests.post(endpoint, json=payload) status = resp.json()['data'][0]["status"] if status == "SUCCESS": facts = resp.json( )['data'][0]["output"]["model_output"]["data"]["facts"] for f in facts: fc = schemas_grading.FactContent(**f) out_facts.append(fc) except requests.exceptions.ConnectionError: logger.error("Fact extraction service is down") return out_facts
def test_get_scoring(self): self.test_get_facts() settings = get_settings() print(settings.svc_fact_comparison) qnasub = models_grading.AssignmentQnASubmission.objects( ).no_dereference().first() endpoint = "{}/predict".format(settings.svc_fact_comparison) base_fact_list = [] for fact in base_facts: base_fact_list.append(fact.dict()) answer_fact_list = base_fact_list payload = { "pipeline_run_id": "", "pipeline_version": "", "pipeline_id": "", "data": { "client_req_id": str(qnasub.id), "body": { "base_facts": base_fact_list, "answer_facts": answer_fact_list }, "meta": { "assignment_id": str(qnasub.assignment.id), "question_id": str(qnasub.aqna.id), "student_id": str(qnasub.student.id) } } } # pp.pprint(payload) pp.pprint(payload) resp = requests.post(endpoint, json=payload) data = resp.json()['data'] pp.pprint(data)
async def init_contract( self, db: Database, contract_name: str, default_contract_address: str, contract_class: Type[BaseContract], qadmin_party: PartyGet ) -> Union[ERC1155Contract, InvoiceRegistryContract]: contract_service = BlockchainContractService(db) contract_record = await contract_service.get_one_by_name( None, contract_name) if contract_record is None: s = get_settings() contract_address = default_contract_address contract_path = s.compiled_contracts_path / f'{contract_name}.json' with open(contract_path) as f: compiled_contract = json.load(f) contract_abi = compiled_contract['abi'] new_record = BlockchainContractCreate( owner_uid=qadmin_party.uid, name=contract_name, contract_address=contract_address, contract_abi=json.dumps(contract_abi)) await contract_service.create(None, new_record) else: contract = BlockchainContractGet(**contract_record) contract_abi = contract.contract_abi contract_address = contract.contract_address return contract_class(self.w3, contract_address, contract_abi)
def health_check(): settings: Settings = get_settings() return { "title": settings.WEB_APP_TITLE, "description": settings.WEB_APP_DESCRIPTION, "version": settings.WEB_APP_VERSION, "status": StatusEnum.OK, }
async def init_contracts(self, db: Database, qadmin_party: PartyGet): s = get_settings() self.erc1155_contract = await self.init_contract( db, 'ChainvoiceERC1155', s.erc1155_contract_address, ERC1155Contract, qadmin_party) self.invoice_registry_contract = await self.init_contract( db, 'InvoiceRegistry', s.invoice_registry_contract_address, InvoiceRegistryContract, qadmin_party)
async def test_get_offers_from_partner(self, customer, partner_offers_payload): assert await Offer.all().count() == 0 settings = get_settings() url = f"{settings.partner_host}/offers" responses.add(responses.POST, url=url, json=partner_offers_payload, status=200) offers = await customer_service.get_offers(customer.id) assert len(offers) == 5
def init_db(app: FastAPI) -> None: """Register Tortoise ORM.""" register_tortoise( app=app, db_url=get_settings().database_url, modules={'models': ['app.models']}, generate_schemas=False, add_exception_handlers=True, )
def init_db(app: FastAPI) -> None: settings = get_settings() register_tortoise( app, db_url=settings.database_url, modules=settings.modules, generate_schemas=True, add_exception_handlers=True, )
def create_database_engine(): SQLALCHEMY_DATABASE_URL = get_settings().db_url if os.getenv('FASTAPI_ENV'): # check if running in container engine = create_engine(SQLALCHEMY_DATABASE_URL) else: engine = create_engine( 'sqlite:///app.db', connect_args={'check_same_thread': False}) return engine
async def generate_schema() -> None: logger.info("Initializing Tortoise...") settings = get_settings() await Tortoise.init( db_url=settings.database_url, modules=settings.modules, ) logger.info("Generating database schema via Tortoise...") await Tortoise.generate_schemas() await Tortoise.close_connections()
async def generate_schema() -> None: """Generate DB schemes for summarizer app.""" logger.info('Initializing Tortoise...') await Tortoise.init( db_url=get_settings().database_url, modules={'models': ['app.models']}, ) logger.info('Generating database schema via Tortoise...') await Tortoise.generate_schemas() await Tortoise.close_connections()
def client() -> Generator: # set up app = create_application() # app.dependency_overrides[get_settings] = get_settings_override settings = get_settings() initializer(["app.models"], db_url=settings.database_test_url) with TestClient(app) as test_client: # testing yield test_client finalizer()
def __init__(self, settings: Settings = get_settings()): self.client: pymongo.MongoClient = AsyncIOMotorClient( host=settings.db_host, authSource=settings.db_name, username=settings.db_user, password=settings.db_pass, ) db: Database = self.client[settings.db_name] self.annotators: Collection = db.get_collection("annotators") self.segments: Collection = db.get_collection("segment_records") self.audit_events: Collection = db.get_collection("audit_events") self.sessions: Collection = db.get_collection("active_sessions")
async def get_offers(customer_id) -> List[Offer]: settings = get_settings() customer = await customer_repository.get(customer_id) offer_package = await offer_package_repository.get_from_cache( customer, settings.minutes_cache_offer) if offer_package and len(offer_package.offers) > 0: return offer_package.offers external_offers = await partner_client.request_offers(customer) offer_package = await offer_package_service.create_from_partner_offers_schema( customer=customer, partner_offers=external_offers) return offer_package.offers
def configure_logger(): settings = get_settings() loggers = ( logging.getLogger(name) for name in logging.root.manager.loggerDict if name.startswith("uvicorn.") ) for uvicorn_logger in loggers: uvicorn_logger.handlers = [] logging.getLogger("uvicorn").handlers = [InterceptHandler()] logger.configure( handlers=[{"sink": sys.stdout, "level": settings.logging_level, "format": format_record, "colorize": True}] )
def authenticate(self): s = get_settings() form_data = LoginFormData( username=s.su_username, password=s.su_password, ) with self.new_client(authenticated=False) as client: response = client.post(url='/api/login/access-token/', data=form_data.dict()) check_response(response) token = Token.parse_obj(response.json()) self.auth_headers = { "Authorization": f"Bearer {token.access_token}" }
def test_get_offers_from_customer(self, test_app, mocked_responses, customer, partner_offers_payload): settings = get_settings() responses.add( responses.POST, url=f"{settings.partner_host}/offers", json=partner_offers_payload, status=200, ) url = f"/api/v1/customers/{customer.id}/offers/" response = test_app.get(url) response.raise_for_status() assert response.status_code == 200
def compare_facts(base_facts: List[schemas_grading.FactContent], answer_facts: List[schemas_grading.FactContent], metadata: dict = {}, settings: Optional[Settings] = None): if settings is None: settings = get_settings() endpoint = "{}/predict".format(settings.svc_fact_comparison) logger.debug("Hitting endpoint for fact comparison: {}".format(endpoint)) base_fact_list = [] for fact in base_facts: base_fact_list.append(fact.dict()) ans_fact_list = [] for fact in answer_facts: ans_fact_list.append(fact.dict()) similarity = {} if len(ans_fact_list) > 0 and len(base_fact_list) > 0: payload = { "pipeline_run_id": "", "pipeline_version": "", "pipeline_id": "", "data": { "client_req_id": settings.app_name, "body": { "base_facts": base_fact_list, "answer_facts": ans_fact_list }, "meta": metadata } } try: resp = requests.post(endpoint, json=payload) status = resp.json()['data'][0]["status"] if status == "SUCCESS": resp_data = resp.json( )['data'][0]['output']['model_output']['data'] fact_level_similarity_scores = resp_data[ 'fact_level_similarity_scores'] for i in range(len(fact_level_similarity_scores)): similarity[str(base_facts[i].fact_id )] = fact_level_similarity_scores[i] logger.bind(payload=similarity).debug("Similarity dictionary is :") except requests.exceptions.ConnectionError: logger.error("Fact comparison service is down") else: logger.debug("Length base_facts:{} answer_facts:{}".format( len(base_fact_list), len(ans_fact_list))) return similarity
async def request_offers(customer: Customer) -> List[PartnerOfferOutSchema]: base_log = f"Get offers from customer id: " logger.info(f"{base_log} - start") settings = get_settings() timeout = settings.partner_timeout url = f"{settings.partner_host}/offers" async with httpx.AsyncClient() as client: response = await client.post(url, timeout=timeout) logger.info(f"{base_log} - received data, status {response.status_code}.") response.raise_for_status() partner_offers = parse_obj_as( List[PartnerOfferOutSchema], response.json().get("offers") ) return partner_offers
async def _test_invoice_lifecycle(): await startup() s = get_settings() invoice_registry = blockchain_client.invoice_registry_contract token_contract = blockchain_client.erc1155_contract seller = Account.from_key(s.qadmin_private_key) buyer = seller invoice_id = f'0x{uuid4().hex}' debug(invoice_id) invoice = invoice_registry.get_invoice(invoice_id) debug(invoice) assert invoice[0] is False token_id = 0 invoice_amount = 56799 tx_receipt = invoice_registry.register_invoice(seller, invoice_id, buyer.address, token_id, invoice_amount) # debug(tx_receipt) assert tx_receipt['status'] == 1 invoice = invoice_registry.get_invoice(invoice_id) debug(invoice) assert (invoice[0] is True and invoice[2] == seller.address and invoice[3] == buyer.address and invoice[4] == token_id and invoice[5] == invoice_amount and invoice[6] == 0 and # paid amount is 0 invoice[7] == 0 # state is draft ) tx_receipt = invoice_registry.publish_invoice(seller, invoice_id) # debug(tx_receipt) assert tx_receipt['status'] == 1 # invoice accepts payments invoice = invoice_registry.get_invoice(invoice_id) debug(invoice) assert invoice[-1] == 1 tx_receipt = token_contract.safe_transfer_from( buyer, buyer.address, invoice_registry.contract.address, token_id, invoice_amount, invoice_id) debug(tx_receipt) assert tx_receipt['status'] == 1 invoice = invoice_registry.get_invoice(invoice_id) debug(invoice) assert invoice[-1] == 2 # invoice paid in full
def create_api() -> FastAPI: fast_api = FastAPI( title="PoseAPI", description="""By [Lyngon Pte. Ltd.](https://www.lyngon.com) A basic API for extracting human pose key-points from images. Currently relying on [OpenPose from CMU Perceptual Computing Lab](https://github.com/CMU-Perceptual-Computing-Lab/openpose) as engine. Others may come at a later stage. """, version=get_settings().version, openapi_tags=[ { "name": "pose", "description": "Human Pose Estimation " }, { "name": "image", "description": "Extract from image" }, { "name": "url", "description": "Extract from URL" }, { "name": "draw", "description": "Generate image with key-points drawn" }, { "name": "util", "description": "Util features" }, ], ) fast_api.include_router(status.router, prefix="/status", tags=["util"]) fast_api.include_router(pose.router, prefix="/pose", tags=["pose"]) return fast_api
def add(name): length, lowercase, uppercase, numbers, specials = config.get_settings() print('\n~*~*~*~*~*~*~*~*~*~*~* New Password ~*~*~*~*~*~*~*~*~*~*~*~\n') print('{0} is to be added to the database.'.format(name)) print('') print('Your default configuration looks like this:') print(' - length:\t{0}'.format(length)) print(' - lowercase:\t{0}'.format(lowercase)) print(' - uppercase:\t{0}'.format(uppercase)) print(' - numbers:\t{0}'.format(numbers)) print(' - specialsx:\t{0}'.format(specials)) print('') print('However some web services have some ridiculous limitations...') print('Keep the default configurations?') g = shell_io.key_input('(Y)es, (N)o: ', ['y', 'Y', 'n', 'N', '']) if(g == 'n' or g == 'N'): length = shell_io.get_length(length) lowercase = shell_io.get_lowercase(lowercase) uppercase = shell_io.get_uppercase(uppercase) numbers = shell_io.get_numbers(numbers) specials = shell_io.get_specials(specials) db.add_account(name, length, lowercase, uppercase, numbers, specials)
def test_get_facts(self): settings = get_settings() print(settings.svc_fact_extraction) endpoint = "{}/predict".format(settings.svc_fact_extraction) qnasub = models_grading.AssignmentQnASubmission.objects( ).no_dereference().first() answer = qnasub.answer.answer meta = { # "assignment_id": str(qnasub.assignment.id), # "question_id": str(qnasub.aqna.id), # "student_id": str(qnasub.student.id) } payload = { "pipeline_run_id": "", "pipeline_version": "", "pipeline_id": "", "data": { "client_req_id": str(qnasub.id), "body": { "text_content": answer }, "meta": meta } } # pp.pprint(payload) resp = requests.post(endpoint, json=payload) data = resp.json()['data'] pp.pprint(data) for d in data: facts = d["output"]["model_output"]["data"]["facts"] for f in facts: fc = schemas_grading.FactContent(**f) fc.score = 2 base_facts.append(fc) # pp.pprint(fc.dict()) pp.pprint("******************************************") fcmong = models_grading.FactContent(**fc.dict()) pp.pprint(fcmong.to_mongo())
async def test_db() -> AsyncGenerator: """Initialize db connection before run test.""" settings = get_settings() try: await Tortoise.init(db_url=settings.database_url_test, modules=settings.modules) except DBConnectionError: await Tortoise.init( db_url=settings.database_url_test, modules=settings.modules, _create_db=True, ) await Tortoise.generate_schemas() yield try: await Tortoise._drop_databases() except ObjectInUseError: pass await Tortoise.close_connections()
def chainvoice_settings_set(c): settings = get_settings().dict() included_vars = [ 'secret_key', 'access_token_expire_minutes', 'database_url', 'su_username', 'su_password', 'su_email', 'su_name', 'key_vault_url', 'qnode_url', 'qnode_key', 'qadmin_name', 'qadmin_address', 'qadmin_private_key', 'compiled_contracts_path', 'erc1155_contract_address', 'invoice_registry_contract_address' ] var_values = ' '.join(f'chainvoice_{name}="{settings[name]}"' for name in included_vars) webapp_settings_set(c, var_values)
from app.config import get_settings from app.db.database import DbMgr from app.models.pulse import SessionIntervention from app.models.pulse_events import PulseProcessing settings = get_settings() DbMgr.connect(settings.mongo_dbname, settings.mongo_username, settings.mongo_password, settings.mongo_host) SessionIntervention.objects(session="5f43b06862752155f1e7da87").delete()
def setUp(cls): settings = get_settings() DbMgr.connect(settings.mongo_dbname, settings.mongo_username, settings.mongo_password, settings.mongo_host)
def get_settings_override(): return Settings( testing=True, database_url=get_settings().database_url_test, )
from asyncio.tasks import Task from net_gsd.host import Host from app.config import Settings, get_settings from app.core.tasks.tracker import track settings: Settings = get_settings() @track async def get_switch_interface_detail(host: Host) -> dict: """Retrieves switch description, mac, ip, cidr, vlan and desktop details""" result = await host.send_command(["show interfaces", "show vlan", "show mac address-table"]) result_dict = {} async for record in parse_show_interface(host.hostname, result["show interfaces"]): result_dict.update(record) async for record in parse_show_vlan(result["show vlan"]["vlans"]): result_dict[record[0]].update(record[1]) async for record in parse_show_mac( result["show mac address-table"]["mac_table"]["vlans"][str(settings.DATA_VLAN)]["mac_addresses"] ): result_dict[record[0]].update(record[1]) return result_dict @track