def register_sql_datastore( workspace: Workspace, sql_datastore_name: str, sql_server_name: str, sql_database_name: str, sql_username: str, sql_password: str, ) -> AzureSqlDatabaseDatastore: """ Register a Azure SQL DB with the Azure Machine Learning Workspace :param workspace: Azure Machine Learning Workspace :param sql_datastore_name: Name used to id the SQL Datastore :param sql_server_name: Azure SQL Server Name :param sql_database_name: Azure SQL Database Name :param sql_username: Azure SQL Database Username :param sql_password: Azure SQL Database Password :return: Pointer to Azure Machine Learning SQL Datastore """ return Datastore.register_azure_sql_database( workspace=workspace, datastore_name=sql_datastore_name, server_name=sql_server_name, database_name=sql_database_name, username=sql_username, password=sql_password, )
) rc = RunConfiguration() rc.framework = "R" rc.environment.r = RSection() # rc.environment.r.cran_packages = [aml] rc.environment.docker.enabled = True py_rc = RunConfiguration() py_rc.framework = "Python" py_rc.environment.python.conda_dependencies = cd sql_datastore = Datastore.register_azure_sql_database( workspace=ws, datastore_name="modelling_db", server_name="dbserver-mlops-demo", database_name="asq-mlops-demo", username=kv.get_secret("db-user"), password=kv.get_secret("db-pass"), ) traindata = Dataset.Tabular.from_sql_query( (sql_datastore, "SELECT * FROM dbo.traindata")) outdata = PipelineData("outdata", datastore=ws.get_default_datastore()) download_step = PythonScriptStep( name="Load training data from database", script_name="download_dataset.py", arguments=["--dataset-name", "traindata", "--outpath", outdata], inputs=[traindata.as_named_input("traindata")], compute_target=compute_target, source_directory=".",