def get_telemetry_id() -> Optional[Text]: """Return the unique telemetry identifier for this Rasa X install. The identifier can be any string, but it should be a UUID. Returns: The identifier, if it is configured correctly. """ telemetry_id = None try: if config.LOCAL_MODE: stored_config = rasa_utils.read_global_config_value( CONFIG_FILE_TELEMETRY_KEY) if isinstance(stored_config, dict): telemetry_id = stored_config.get(CONFIG_TELEMETRY_ID) else: with db_utils.session_scope() as session: config_service = ConfigService(session) telemetry_id = config_service.get_value( ConfigKey.TELEMETRY_UUID, expected_type=str) except Exception as e: logger.warning(f"Unable to retrieve telemetry ID: {e}") return telemetry_id
def dump_changes( configuration_change: Optional[Dict[Text, Text]] = None, domain_changed: bool = False, story_changes: Optional[Set[Text]] = None, nlu_changes: Optional[Set[Text]] = None, lookup_table_changes: Optional[Set[int]] = None, ) -> None: """Dump all changes to disk. Args: configuration_change: Properties of the model config which has to be dumped. domain_changed: `True` if the domain was changed and has to be dumped. story_changes: Story files which have to be dumped. nlu_changes: NLU files which have to be dumped. lookup_table_changes: IDs of the lookup tables which have to be dumped. """ from rasax.community.database import utils as db_utils if not utils.should_dump(): return with db_utils.session_scope() as session: dump_service = DumpService(session) dump_service._dump_changes( configuration_change, domain_changed, story_changes, nlu_changes, lookup_table_changes, )
def run_background_synchronization( force_data_injection: bool = False) -> None: from rasax.community.database import utils as db_utils import asyncio # pytype: disable=pyi-error logger.debug("Running scheduled Git synchronization.") loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) with db_utils.session_scope() as session: git_service = GitService(session, DEFAULT_GIT_REPOSITORY_DIRECTORY) loop.run_until_complete( git_service.synchronize_project(force_data_injection)) loop.close()
def main( args: argparse.Namespace, project_path: Text, data_path: Text, token: Optional[Text] = None, config_path: Optional[Text] = config.default_config_path, ) -> None: """Start Rasa X in local mode.""" config.LOCAL_MODE = True print_success("Starting Rasa X in local mode... 🚀") config.self_port = args.rasa_x_port _configure_for_local_server(data_path, config_path, token) rasax.community.jwt.initialise_jwt_keys() app = rasa_x_server.configure_app() with session_scope() as session: auth_endpoints = _enable_development_mode_and_get_additional_auth_endpoints( app) initialize_app(app, class_views=auth_endpoints) sql_migrations.run_migrations(session) initialise.create_community_user(session, app) _initialize_with_local_data(project_path, data_path, session, args.port, config_path) telemetry.track(telemetry.LOCAL_START_EVENT) telemetry.track_project_status(session) # this needs to run after initial database structures are created # otherwise projects assigned to events won't be present _start_event_service() scheduler.start_background_scheduler() app.run( host="0.0.0.0", port=config.self_port, auto_reload=os.environ.get("SANIC_AUTO_RELOAD"), access_log=False, )
async def discover_models() -> None: """Synchronize model metadata with models stored on disk.""" from rasax.community.database import utils as db_utils if config.LOCAL_MODE: from rasax.community.local import LOCAL_MODELS_DIR # pytype: disable=pyi-error model_directory = LOCAL_MODELS_DIR else: model_directory = config.rasa_model_dir with db_utils.session_scope() as session: model_service = ModelService(model_directory, session, DEFAULT_RASA_ENVIRONMENT) await model_service.discover_models() if config.LOCAL_MODE: await model_service.mark_latest_as_production()
def run_analytics_caching() -> None: import rasax.community.database.utils as database_utils with database_utils.session_scope() as session: # Use new session for all sql operations analytics_service = AnalyticsService(session) now = time.time() user_service = UserService(session) platform_users = user_service.fetch_all_users(config.team_name) platform_user_ids = [u["username"] for u in platform_users] for k, v in CACHED_ANALYTICS_CONFIG.items(): window = rasa_x_utils.duration_to_seconds(v["window"]) start = now - rasa_x_utils.duration_to_seconds(v["range"]) for include_platform_users in [False, True]: result = analytics_service.calculate_analytics( start, now, window, platform_user_ids) analytics_service._persist_analytics( k, result, include_platform_users)
def track_project_status(session: Optional[Session] = None) -> None: """Tracks an event which describes the current state of the project. Args: session: Optional database session to use. If not provided, create a new one with `session_scope`. """ loop = asyncio.new_event_loop() try: if not session: with db_utils.session_scope() as db_session: status_event = loop.run_until_complete( _get_project_status_event(db_session)) else: status_event = loop.run_until_complete( _get_project_status_event(session)) track(STATUS_EVENT, status_event) finally: loop.close()
def list_users(userservice, team_name): users = userservice.fetch_all_users(team_name) format_template = "{:<3}{:12}{:8}{:20}" print("Found {} user{}".format(len(users), "" if len(users) == 1 else "s")) if len(users): print_user_table(sorted(users), format_template) if __name__ == "__main__": parser = create_argparser() args = parser.parse_args() with session_scope() as session: user_service, settings_service, domain_service, role_service = initialise_services( session) team_name = config.team_name if args.mode == "create": create_project_and_settings(settings_service, role_service, team_name) if args.update: change_password(user_service, args.username, args.password) else: create_user(user_service, args.username, args.password, args.role, team_name)