def execute_user_prompts(deployment_name: str, install_config: InstallConfiguration, resource_group: str, subscription_id: str, validate_default_params: bool = True): install_config.prompt_all_required(deployment_name=deployment_name, subscription_id=subscription_id, validate_default_params=validate_default_params) if install_config.sql_auth: # Generating service password without prompting, it will be saved in Az Key Vault. # Because the password will be saved in Az Key Vault using az cli, # only these special characters are allowed: #%^-_+{}\\:/~ install_config.wc_service_db_user_password = make_strong_password(length=12, special_chars='#%^-_+{}\\:/~') install_config.jwc_db_user_password = make_strong_password(length=12, special_chars='#%^-_+{}\\:/~')
def initialize_databricks_cluster(install_config: InstallConfiguration, resource_group: str, artifacts_path: str, tenant_id: str = None, subscription_id: str = None): print("Creating Databricks cluster ... ") backend_keyvault_name = install_config.backend_keyvault_name gdc_sp_secret_value = install_config.gdc_service_principal['password'] adb_ws_name = install_config.databricks_workspace_name ws_url = arm_ops.get_databricks_workspace_url( resource_group=resource_group, ws_name=adb_ws_name) if not ws_url.startswith("https://"): ws_url = "https://" + ws_url adb_access_token = ad_ops.get_databricks_access_token( tenant_id, subscription_id) managed_libraries = [] with open("cluster_libraries.json", "r") as libs_file: managed_libraries = json.load(libs_file) provisioning_rsp = arm_ops.provision_databricks_cluster( install_config=install_config, workspace_url=ws_url, oauth_access_token=adb_access_token, gdc_sp_secret_value=gdc_sp_secret_value, managed_libraries=managed_libraries) install_config.adb_cluster_details = provisioning_rsp secrets_ops.set_secret(keyvault_name=backend_keyvault_name, secret_name="gdc-databricks-token", value=provisioning_rsp['api_token']['token_value']) print("Uploading artifacts onto DBFS ...") arm_ops.upload_artifacts(workspace_url=ws_url, oauth_access_token=adb_access_token, local_artifacts_path=artifacts_path, dbfs_dir_path="/mnt/gdc-artifacts")
def create_gdc_deployer_identity(install_config: InstallConfiguration, resource_group: str): admin_ad_group_name = install_config.gdc_admin_ad_group['ad_group_name'] gdc_deployer = ad_ops.add_gdc_deployer_identity( resource_group=resource_group, gdc_admin_ad_group=admin_ad_group_name, identity_name="gdc-deployer") install_config.gdc_deployer_identity = { "principalId": gdc_deployer["principalId"], "id": gdc_deployer["id"], "name": gdc_deployer["name"] }
def initialize_databricks_cluster(install_config: InstallConfiguration, resource_group: str, artifacts_path: str, tenant_id: str = None, subscription_id: str = None): runtime_storage = install_config.runtime_storage_account_name storage_account_keys_list_res = az.az_cli( "storage account keys list --account-name " + runtime_storage) storage_account_access_key = storage_account_keys_list_res[0]["value"] print("Creating Databricks cluster ... ") backend_keyvault_name = install_config.backend_keyvault_name wc_sp_secret_value = install_config.wc_service_principal['password'] adb_ws_name = install_config.databricks_workspace_name ws_url = arm_ops.get_databricks_workspace_url( resource_group=resource_group, ws_name=adb_ws_name) if not ws_url.startswith("https://"): ws_url = "https://" + ws_url adb_access_token = ad_ops.get_databricks_access_token( tenant_id, subscription_id) managed_libraries = [] with open("cluster_libraries.json", "r") as libs_file: managed_libraries = json.load(libs_file) provisioning_rsp = arm_ops.provision_databricks_cluster( install_config=install_config, workspace_url=ws_url, oauth_access_token=adb_access_token, wc_sp_secret_value=wc_sp_secret_value, managed_libraries=managed_libraries) install_config.adb_cluster_details = provisioning_rsp upload_mount_storage_file(ws_url, adb_access_token) execute_script_mount_storage_script( databricks_host=ws_url, token=adb_access_token, cluster_id=provisioning_rsp['cluster_id'], storage_account_name=runtime_storage, container_name="data", secret_key=storage_account_access_key) secrets_ops.set_secret(keyvault_name=backend_keyvault_name, secret_name="wc-databricks-token", value=provisioning_rsp['api_token']['token_value']) print("Uploading artifacts onto DBFS ...") arm_ops.upload_artifacts(workspace_url=ws_url, oauth_access_token=adb_access_token, local_artifacts_path=artifacts_path, dbfs_dir_path="/mnt/watercooler/scripts")
def initialize_db_schema_via_arm(install_config: InstallConfiguration, template_base_uri: str, storage_sas: str, rs_group_name: str, custom_init_file: str): if not install_config.gdc_deployer_identity: create_gdc_deployer_identity(install_config=install_config, resource_group=rs_group_name) if custom_init_file: matcher = re.search("https:\/\/(\w+)\..*", template_base_uri, re.IGNORECASE) deployment_storage = matcher.group(1) blob_ops.copy_file(source_path=custom_init_file, resource_group=rs_group_name, runtime_storage=deployment_storage, dest_container_name="gdc-artifacts", dest_path="sql-server/custom-init.sql") else: print("No custom-init.sql provided") template_uri = template_base_uri + "sql-server/init_sql_schema.json" + "?" + storage_sas json_params = arm_ops.create_sql_init_arm_parameters( install_config=install_config, base_uri=template_base_uri, sas_token=storage_sas) arm_params_json_file = os.path.join(install_config.get_gdc_dir(), "gdc_sql_arm_params.json") with open(arm_params_json_file, "w") as param_file: param_file.write(json_params) print("Validating SQL schema ARM configuration ...") arm_ops.validate_templates(template_uri=template_uri, param_file_path=arm_params_json_file, resource_group=rs_group_name) print("Deploying SQL schema using managed identity %s " % str(install_config.gdc_deployer_identity.get('id'))) arm_ops.deploy_arm_template(template_uri=template_uri, param_file_path=arm_params_json_file, resource_group=rs_group_name)
def init_active_directory_entities(deployment_name: str, install_config: InstallConfiguration, resource_group: str, tenant_id: str, non_interactive_mode: bool = False): print( "GDC requires several records in your Active Directory. Let's verify them now... " ) graph_user_read_permission = ad_ops.find_graph_user_read_all_role() if not graph_user_read_permission: raise RuntimeError( "Couldn't find 'User.Read' permission in 'Microsoft Graph' for your tenant " ) if not install_config.gdc_admin_ad_group: if not non_interactive_mode: print( "\nThe Project Staffing admins group defines a list of AD users which are going to have Owner role over all Azure resources created by this deployment." ) print( "They will also have access to restricted application functionalities such as switching the ingestion mode or uploading new HR Data files." ) print( "This Security group is mandatory and needs to be created before continuing. You can pause and create it now" ) provided_admin_group_id = install_config.get_provided_param_value( "gdcAdmins.groupId") admin_ad_group = ad_ops.prompt_or_create_ad_group( "Enter the name or id of an existing Active Directory group for Project Staffing admins: ", add_signed_user=False, provided_ad_group_id=provided_admin_group_id, no_input=non_interactive_mode, create_if_not_exists=False) install_config.gdc_admin_ad_group = admin_ad_group if not install_config.gdc_employees_ad_group: if not non_interactive_mode: print( "\nThe Project Staffing application ingests and processes employee M365 profiles and email data to infer skills and build better teams." ) print( "You should select an AD group to restrict the list of processed accounts. Only the data of the members of this group will be processed by the application, and therefore, only the employees in this group will be recommended by the application in searches." ) print( "This Security group is mandatory and needs to be created before continuing. You can pause and create it now" ) provided_employee_group_id = install_config.get_provided_param_value( "gdc_employees_ad_group_id") employees_ad_group = ad_ops.prompt_or_create_ad_group( "Enter the name or id of an existing Active Directory group for processed employees: ", add_signed_user=False, provided_ad_group_id=provided_employee_group_id, no_input=non_interactive_mode, create_if_not_exists=False) install_config.gdc_employees_ad_group = employees_ad_group if not install_config.gdc_service_principal: gdc_service_sp_name = None if non_interactive_mode: gdc_service_sp_name = install_config.get_provided_param_value( "gdc-service-sp.name") if not gdc_service_sp_name: gdc_service_sp_name = install_config.appservice_name + "-gdc-service" print("Creating %s service principal " % gdc_service_sp_name) graph_read_all_role = ad_ops.find_graph_user_read_all_role() if not graph_read_all_role: raise RuntimeError( "Couldn't find 'User.Read.All' permission in 'Microsoft Graph' for your tenant " ) gdc_sp = ad_ops.get_or_create_service_principal( name=gdc_service_sp_name, tenant_id=tenant_id, non_interactive_mode=non_interactive_mode) install_config.gdc_service_principal = gdc_sp ad_ops.add_service_principal_app_permission( sp_app_id=gdc_sp['appId'], api_resource_id=graph_read_all_role['appId'], permission_id=graph_read_all_role['id']) if not install_config.m365_reader_service_principal: gdc_m365_reader_sp_name = None if non_interactive_mode: gdc_m365_reader_sp_name = install_config.get_provided_param_value( "gdc-m365-reader-sp.name") if not gdc_m365_reader_sp_name: gdc_m365_reader_sp_name = install_config.appservice_name + "-gdc-m365-reader" print("Creating %s service principal " % gdc_m365_reader_sp_name) graph_user_read_all_role = ad_ops.find_graph_user_read_all_role() if not graph_user_read_all_role: raise RuntimeError( "Couldn't find 'User.Read.All' permission in 'Microsoft Graph' for your tenant " ) graph_mail_read_role = ad_ops.find_graph_mail_read_role() if not graph_mail_read_role: raise RuntimeError( "Couldn't find 'Mail.Read' permission in 'Microsoft Graph' for your tenant " ) m365_reader_sp = ad_ops.get_or_create_service_principal( gdc_m365_reader_sp_name, tenant_id=tenant_id, non_interactive_mode=non_interactive_mode) install_config.m365_reader_service_principal = m365_reader_sp ad_ops.add_service_principal_app_permission( sp_app_id=m365_reader_sp['appId'], api_resource_id=graph_user_read_all_role['appId'], permission_id=graph_user_read_all_role['id']) ad_ops.add_service_principal_app_permission( sp_app_id=m365_reader_sp['appId'], api_resource_id=graph_mail_read_role['appId'], permission_id=graph_mail_read_role['id']) try: admin_group_members = ad_ops.get_group_members( group_object_id=install_config.gdc_admin_ad_group["objectId"]) for member in admin_group_members: ad_ops.make_user_owner_for_app( user_object_id=member['objectId'], app_id=m365_reader_sp['appId']) except Exception as azError: print( "Failed to make members of admin group owners over %s service principal!" % gdc_m365_reader_sp_name) print(azError) if not install_config.jgraph_aad_app: app_registration_name = deployment_name + "-jgraph-aad-web-app" print("Creating %s app registration " % app_registration_name) appservice_name = install_config.appservice_name jgraph_aad_app = ad_ops.\ get_or_create_service_principal(app_registration_name, is_web_app=True, credentials_valid_years=3, reply_url="https://%s.azurewebsites.net/.auth/login/aad/callback" % appservice_name, logout_url="https://%s.azurewebsites.net/.auth/logout" % appservice_name, tenant_id=tenant_id, non_interactive_mode=non_interactive_mode) install_config.jgraph_aad_app = jgraph_aad_app
required=False, type=lambda x: bool(strtobool(str(x)))) parsed_args = arg_parser.parse_args() deployment_name = parsed_args.deployment_name tenant_id = parsed_args.tenant_id subscription_id = parsed_args.subscription_id resource_group = parsed_args.resource_group debug_enabled = parsed_args.debug parameter_file = parsed_args.parameter_file no_input = parsed_args.no_input if debug_enabled: az.DEBUG_ENABLED = True install_config: InstallConfiguration = InstallConfiguration.load( default_param_file=parameter_file) install_state = DeploymentState.load() install_config.sql_auth = parsed_args.sql_auth if install_state.is_user_prompts_taken(): prompt_again = False if not no_input: prompt_again = install_state.prompt_stage_repeat( "Previously entered values have been found. Would you like to ignore them and re-enter deployment parameters? (Y/n) " ) if prompt_again: execute_user_prompts( deployment_name=deployment_name, install_config=install_config, resource_group=resource_group,
metavar='resource-group', type=str, help='Azure resource group of deployment', required=True) arg_parser.add_argument( "--artifacts-path", metavar="artifacts-path", help='Local path to artifacts to be uploaded to DBFS', default=DEFAULT_ARTIFACTS_LOCATION) arg_parser.add_argument('--debug', default=False, required=False, type=lambda x: bool(strtobool(str(x)))) parsed_args = arg_parser.parse_args() group = parsed_args.resource_group artifacts_local_path = parsed_args.artifacts_path debug_enabled = parsed_args.debug config: InstallConfiguration = InstallConfiguration.load() install_state = DeploymentState.load() if debug_enabled: az.DEBUG_ENABLED = True initialize_databricks_cluster(install_config=config, resource_group=group, artifacts_path=artifacts_local_path, tenant_id=parsed_args.tenant_id, subscription_id=parsed_args.subscription_id) install_state.complete_stage(Stages.DATABRICKS_CLUSTER_INITIALIZED) print("Databricks cluster has been initialized")
def init_active_directory_entities(deployment_name: str, install_config: InstallConfiguration, resource_group: str, tenant_id: str, non_interactive_mode: bool = False): print("Watercooler requires several records in your Active Directory. Let's verify them now... ") graph_user_read_permission = ad_ops.find_graph_user_read_all_role() if not graph_user_read_permission: raise RuntimeError("Couldn't find 'User.Read' permission in 'Microsoft Graph' for your tenant ") if not install_config.wc_admin_ad_group: if not non_interactive_mode: print("\nThe Watercooler admins group defines a list of AD users which are going to have Owner role over all Azure resources created by this deployment.") print("This Security group is mandatory and needs to be created before continuing. You can pause and create it now") provided_admin_group_id = install_config.get_provided_param_value("wcAdmins.groupId") admin_ad_group = ad_ops.prompt_or_create_ad_group("Enter the name or id of an existing Active Directory group for Watercooler admins: ", add_signed_user=False, provided_ad_group_id=provided_admin_group_id, no_input=non_interactive_mode, create_if_not_exists=False) install_config.wc_admin_ad_group = admin_ad_group if not install_config.wc_service_principal: wc_service_sp_name = None if non_interactive_mode: wc_service_sp_name = install_config.get_provided_param_value("wc-service-sp.name") if not wc_service_sp_name: wc_service_sp_name = install_config.appservice_name + "-wc-service" print("Creating %s service principal " % wc_service_sp_name) graph_read_all_role = ad_ops.find_graph_user_read_all_role() online_meetings_read_all_role = ad_ops.find_online_meetings_read_all() online_meetings_readwrite_all_role = ad_ops.find_online_meetings_readwrite_all() calendars_read_role = ad_ops.find_calendar_read() calendars_readwrite_role = ad_ops.find_calendar_readwrite() if not graph_read_all_role: raise RuntimeError("Couldn't find 'User.Read.All' permission in 'Microsoft Graph' for your tenant ") if not online_meetings_read_all_role: raise RuntimeError("Couldn't find 'OnlineMeetings.Read.All' permission in 'Microsoft Graph' for your tenant ") if not online_meetings_readwrite_all_role: raise RuntimeError("Couldn't find 'OnlineMeetings.ReadWrite.All' permission in 'Microsoft Graph' for your tenant ") if not calendars_read_role: raise RuntimeError("Couldn't find 'Calendars.Read' permission in 'Microsoft Graph' for your tenant ") if not calendars_readwrite_role: raise RuntimeError("Couldn't find 'Calendars.ReadWrite' permission in 'Microsoft Graph' for your tenant ") wc_sp = ad_ops.get_or_create_service_principal(wc_service_sp_name, tenant_id=tenant_id, non_interactive_mode=non_interactive_mode) install_config.wc_service_principal = wc_sp ad_ops.add_service_principal_app_permission(sp_app_id=wc_sp['appId'], api_resource_id=graph_read_all_role['appId'], permission_id=graph_read_all_role['id']) ad_ops.add_service_principal_app_permission(sp_app_id=wc_sp['appId'], api_resource_id=online_meetings_read_all_role['appId'], permission_id=online_meetings_read_all_role['id']) ad_ops.add_service_principal_app_permission(sp_app_id=wc_sp['appId'], api_resource_id=online_meetings_readwrite_all_role['appId'], permission_id=online_meetings_readwrite_all_role['id']) ad_ops.add_service_principal_app_permission(sp_app_id=wc_sp['appId'], api_resource_id=calendars_read_role['appId'], permission_id=calendars_read_role['id']) ad_ops.add_service_principal_app_permission(sp_app_id=wc_sp['appId'], api_resource_id=calendars_readwrite_role['appId'], permission_id=calendars_readwrite_role['id']) if not install_config.m365_reader_service_principal: wc_m365_reader_sp_name = None if non_interactive_mode: wc_m365_reader_sp_name = install_config.get_provided_param_value("wc-m365-reader-sp.name") if not wc_m365_reader_sp_name: wc_m365_reader_sp_name = install_config.appservice_name + "-wc-m365-reader" print("Creating %s service principal " % wc_m365_reader_sp_name) graph_user_read_all_role = ad_ops.find_graph_user_read_all_role() if not graph_user_read_all_role: raise RuntimeError("Couldn't find 'User.Read.All' permission in 'Microsoft Graph' for your tenant ") m365_reader_sp = ad_ops.get_or_create_service_principal(wc_m365_reader_sp_name, tenant_id=tenant_id, non_interactive_mode=non_interactive_mode) install_config.m365_reader_service_principal = m365_reader_sp ad_ops.add_service_principal_app_permission(sp_app_id=m365_reader_sp['appId'], api_resource_id=graph_user_read_all_role['appId'], permission_id=graph_user_read_all_role['id']) try: admin_group_members = ad_ops.get_group_members(group_object_id=install_config.wc_admin_ad_group["objectId"]) for member in admin_group_members: ad_ops.make_user_owner_for_app(user_object_id=member['objectId'], app_id=m365_reader_sp['appId']) except Exception as azError: print("Failed to make members of admin group owners over %s service principal!" % wc_m365_reader_sp_name) print(azError) if not install_config.jwc_aad_app: app_registration_name = deployment_name + "-jwc-aad-web-app" print("Creating %s app registration " % app_registration_name) appservice_name = install_config.appservice_name jwc_aad_app = ad_ops. \ get_or_create_service_principal(app_registration_name, is_web_app=True, credentials_valid_years=3, reply_url="https://%s.azurewebsites.net/.auth/login/aad/callback" % appservice_name, logout_url="https://%s.azurewebsites.net/.auth/logout" % appservice_name, tenant_id=tenant_id, non_interactive_mode=non_interactive_mode) install_config.jwc_aad_app = jwc_aad_app if not install_config.watercooler_user: print("Updating Watercooler Meetings Organizer user") watercooler_user_sp = ad_ops.get_watercooler_user() install_config.watercooler_user = watercooler_user_sp print('Creating custom application policy for creating online meetings on behalf of Watercooler Meetings Organizer') script_path = "../meetings/run_policy_assign.ps1" subprocess.call(["pwsh", script_path, str(install_config.watercooler_user['objectId']), str(install_config.wc_service_principal['appId'])])
def provision_databricks_cluster(install_config: InstallConfiguration, workspace_url: str, oauth_access_token: str, gdc_sp_secret_value: str, managed_libraries: list = None, gdc_sp_secret_name: str = "gdc-service-principal-secret", gdc_graph_api_sp_secret_name = "graph-api-service-principal-secret", secret_scope_name: str = "gdc", adb_cluster_name: str = "default-gdc-cluster", max_worker: int = 2, node_type_id: str = "Standard_DS3_v2", autotermination_minutes: int = 60): """ :param managed_libraries: list of json object in format https://docs.databricks.com/dev-tools/api/latest/libraries.html#example-request :param workspace_url: :param oauth_access_token: :param gdc_sp_secret_value: :param gdc_sp_secret_name: :param secret_scope_name: :param adb_cluster_name: :param max_worker: :param node_type_id: :param autotermination_minutes: :return: dict { "cluster_id": cluster_id, "api_token": adb_api_token } """ print("Provisioning ADB cluster ...") assert oauth_access_token is not None adb_client = DatabricksAPI(host=workspace_url, token=oauth_access_token) scopes = adb_client.secret.list_scopes().get("scopes", []) if not any(x for x in scopes if x.get("name") == secret_scope_name): adb_client.secret.create_scope(scope=secret_scope_name, initial_manage_principal="users") adb_client.secret.put_secret(scope=secret_scope_name, key=gdc_sp_secret_name, string_value=gdc_sp_secret_value) # both databricks jobs use gdc-service service principal to access Graph API and other component # but we've introduce two secrets for flexibility even thought they have same value for now adb_client.secret.put_secret(scope=secret_scope_name, key=gdc_graph_api_sp_secret_name, string_value=gdc_sp_secret_value) adb_api_token = adb_client.token.create_token(comment="GDC Pipeline API token") cluster_id = None clusters = adb_client.cluster.list_clusters().get("clusters", []) cluster_rsp = list([x for x in clusters if x.get("cluster_name") == adb_cluster_name]) if not cluster_rsp: print("Creating a new cluster %s" % adb_cluster_name) cluster_rsp = adb_client.cluster.create_cluster(cluster_name=adb_cluster_name, autoscale={ "min_workers": 1, "max_workers": max_worker }, node_type_id=node_type_id, driver_node_type_id=node_type_id, autotermination_minutes=autotermination_minutes, enable_elastic_disk=True, spark_version="6.6.x-scala2.11") else: print("Cluster %s exists at %s" % (adb_cluster_name, workspace_url)) cluster_rsp = cluster_rsp[0] # capture cluster details as soon as it's available install_config.adb_cluster_details = { "cluster_id": cluster_rsp['cluster_id'], "api_token": adb_api_token } cluster_id = cluster_rsp['cluster_id'] if managed_libraries: cluster_info = adb_client.cluster.get_cluster(cluster_id=cluster_id) cluster_state = cluster_info['state'] # possible values PENDING, TERMINATED and RUNNING if cluster_state == "TERMINATED": print("Starting cluster %s " % cluster_id) adb_client.cluster.start_cluster(cluster_id=cluster_id) cluster_state = "PENDING" while cluster_state == "PENDING" or cluster_state == "RESTARTING" or cluster_state == "RESIZING": print("Waiting cluster %s " % cluster_id) sleep(5) cluster_info = adb_client.cluster.get_cluster(cluster_id=cluster_id) cluster_state = cluster_info['state'] print("Cluster is now in state %s " % cluster_state) if cluster_state == "TERMINATING" or cluster_state == "TERMINATED" or cluster_state == "ERROR": print("Can't install managed libraries, cluster %s is not running" % cluster_id) raise RuntimeError("Can't install managed libraries, cluster %s is not running. Check Databricks Workspace Portal for details and try again later" % cluster_id) else: try: print("Installing managed libraries on cluster %s " % cluster_id) install_managed_libraries(adb_client, cluster_id, managed_libraries) except BaseException as e: print("Failed to install libraries into cluster %s " % cluster_id) print(e) return { "cluster_id": cluster_id, "api_token": adb_api_token }
def create_main_arm_parameters(install_config: InstallConfiguration, base_uri: str = None, sas_token: str = None, docker_login: str = None, docker_password: str = None, app_version: str = "1.0.2", log_analytic_ws_name: str = None, admin_full_name: str = None, admin_email: str = None): parameters = dict() for key, value in install_config.required_arm_params.items(): parameters[key] = { "value": value } parameters["appservice.version"] = { "value": app_version } if base_uri: parameters["_artifactsLocation"] = { "value": base_uri } if sas_token: _artifactsLocationSasToken = sas_token if not sas_token.startswith("?"): _artifactsLocationSasToken = "?" + sas_token parameters["_artifactsLocationSasToken"] = { "value": _artifactsLocationSasToken } if docker_login: parameters["docker.login"] = { "value": docker_login } if docker_password: parameters["docker.password"] = { "value": docker_password } if install_config.jgraph_aad_app and "appId" in install_config.jgraph_aad_app: parameters["appservice.aad.clientId"] = { "value": install_config.jgraph_aad_app["appId"] } parameters["appservice.aad.clientSecret"] = { "value": install_config.jgraph_aad_app["password"] } if install_config.gdc_service_principal and "appId" in install_config.gdc_service_principal: parameters["gdc-service.sp.clientId"] = { "value": install_config.gdc_service_principal["appId"] } if "gdc-service.sp.clientSecret" in install_config.arm_params: parameters["gdc-service.sp.clientSecret"] = { "value": install_config.gdc_service_principal["password"] } if "gdc-service.sp.objectId" in install_config.arm_params: parameters["gdc-service.sp.objectId"] = { "value": get_service_principal_object_id(app_id=install_config.gdc_service_principal["appId"]) } if install_config.gdc_admin_ad_group and "objectId" in install_config.gdc_admin_ad_group: parameters["sqlsever.database.aad.admin-object-id"] = { "value": install_config.gdc_admin_ad_group["objectId"] } parameters["sqlsever.database.aad.admin-login"] = { "value": install_config.gdc_admin_ad_group["ad_group_name"] } if install_config.m365_reader_service_principal and "appId" in install_config.m365_reader_service_principal: parameters['m365Adf-reader.sp.clientId'] = { "value": install_config.m365_reader_service_principal['appId'] } if "365Adf-reader.sp.objectId" in install_config.arm_params: parameters["365Adf-reader.sp.objectId"] = { "value": get_service_principal_object_id(app_id=install_config.m365_reader_service_principal["objectId"]) } if install_config.gdc_employees_ad_group and "objectId" in install_config.gdc_employees_ad_group: parameters['gdc_employees_ad_group_id'] = { "value": install_config.gdc_employees_ad_group['objectId'] } if log_analytic_ws_name: parameters['logs.workspace.name'] = { "value": log_analytic_ws_name } if admin_full_name: parameters['alert.admin.fullname'] = { "value": admin_full_name } if admin_email: parameters['alert.admin.email'] = { "value": admin_email } parameters['sqlsever.sql-auth'] = { "value": install_config.sql_auth } utc_now = datetime.utcnow() pipeline_start_time_str = _get_earliest_6am_in_past(utc_now) parameters['pipeline_start_time'] = { "value": pipeline_start_time_str } parameters['pygraph_utils_library.name'] = { "value": install_config.get_pygraph_utils_library_name() } parameters["gdcAdmins.groupId"] = { "value": install_config.gdc_admin_ad_group['objectId'] } json_data = { "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentParameters.json#", "contentVersion": "1.0.0.0", "parameters": parameters } return json.dumps(json_data, indent=4, sort_keys=True)