def connect(self, client_id: str = None, tenant_id: str = None, secret: str = None): """Authenticate with the SDK.""" # Use details of msticpyyaml if not provided if client_id is None and tenant_id is None and secret is None: az_cli_config = config.settings.get("AzureCLI") if not az_cli_config: raise MsticpyAzureException( "No AzureCLI configuration found in configuration settings." ) config_items = az_cli_config["Args"] client_id = config_items["clientId"] tenant_id = config_items["tenantId"] secret = config_items["clientSecret"] # Create credentials and connect to the subscription client to validate self.credentials = ServicePrincipalCredentials(client_id=client_id, secret=secret, tenant=tenant_id) if not self.credentials: raise CloudError("Could not obtain credentials.") self.sub_client = SubscriptionClient(self.credentials) if not self.sub_client: raise CloudError("Could not create a Subscription client.") self.connected = True
def upload_to_blob(self, blob: Any, container_name: str, blob_name: str, overwrite: bool = True): """ Upload a blob of data. Parameters ---------- blob : Any The data to upload. container_name : str The name of the container to upload the blob to. blob_name : str The name to give the blob. overwrite : bool, optional Whether or not you want to overwrite the blob if it exists, by default True. """ try: blob_client = self.abs_client.get_blob_client( # type:ignore container=container_name, blob=blob_name) upload = blob_client.upload_blob(blob, overwrite=overwrite) except ResourceNotFoundError as err: raise CloudError( "Unknown container, check container name or create it first." ) from err if not upload["error_code"]: print("Upload complete") else: raise CloudError( f"There was a problem uploading the blob: {upload['error_code']}" ) return True
def connect(self, client_id: str = None, tenant_id: str = None, secret: str = None): """Authenticate with the SDK.""" # Use details of msticpyyaml if not provided if client_id is None and tenant_id is None and secret is None: data_provs = get_provider_settings(config_section="DataProviders") az_cli_config = data_provs.get("AzureCLI") # az_cli_config = config.settings.get("AzureCLI") if not az_cli_config: raise MsticpyAzureConfigError( "No AzureCLI section found in configuration settings.", title="no AzureCLI settings available.", ) config_items = az_cli_config.args try: client_id = config_items["clientId"] tenant_id = config_items["tenantId"] secret = config_items["clientSecret"] except KeyError as key_err: key_name = key_err.args[0] raise MsticpyAzureConfigError( f"{key_name} is missing from AzureCLI section in your", "configuration.", title="missing f{key_name} settings for AzureCLI.", ) from key_err # Create credentials and connect to the subscription client to validate self.credentials = ServicePrincipalCredentials( client_id=client_id, secret=secret, tenant=tenant_id ) if not self.credentials: raise CloudError("Could not obtain credentials.") self.sub_client = SubscriptionClient(self.credentials) if not self.sub_client: raise CloudError("Could not create a Subscription client.") self.connected = True
def connect( self, auth_methods: List = None, silent: bool = False, ): """Authenticate with the SDK.""" self.credentials = az_connect(auth_methods=auth_methods, silent=silent) if not self.credentials: raise CloudError("Could not obtain credentials.") self._check_client("sub_client") self.sub_client = SubscriptionClient(self.credentials.legacy) if not self.sub_client: raise CloudError("Could not create a Subscription client.") self.connected = True
def connect( self, auth_methods: List = None, silent: bool = False, ): """Authenticate with the SDK.""" self.credentials = az_connect(auth_methods=auth_methods, silent=silent) if not self.credentials: raise CloudError("Could not obtain credentials.") self.abs_client = BlobServiceClient(self.abs_site, self.credentials.modern) if not self.abs_client: raise CloudError("Could not create a Blob Storage client.") self.connected = True
def create_container(self, container_name: str, **kwargs) -> pd.DataFrame: """ Create a new container within the Azure Blob Storage account. Parameters ---------- container_name : str The name for the new container. Additional container parameters can be passed as kwargs Returns ------- pd.DataFrame Details of the created container. """ try: new_container = self.abs_client.create_container( # type: ignore container_name, **kwargs) # type:ignore except ResourceExistsError as err: raise CloudError( f"Container {container_name} already exists.") from err properties = new_container.get_container_properties() container_df = _parse_returned_items([properties], ["encryption_scope", "lease"]) return container_df
def delete_blob(self, container_name: str, blob_name: str) -> bool: """ Delete a blob from the Azure Blob Storage account. Parameters ---------- container_name : str The container name that has the blob. blob_name : str The name of the blob to delete. Note deleting a blob also deletes associated snapshots. Returns ------- bool True if blob successfully deleted """ blob_client = self.abs_client.get_blob_client( # type: ignore container=container_name, blob=blob_name) if blob_client.exists(): blob_client.delete_blob(delete_snapshots="include") else: raise CloudError( f"The blob {blob_name} does not exist in {container_name}") return True
def get_blob(self, container_name: str, blob_name: str) -> bytes: """ Get a blob from the Azure Blob Storage account. Parameters ---------- container_name : str The name of the container that holds the blob. blob_name : str The name of the blob to download. Returns ------- bytes The content of the blob in bytes. """ blob_client = self.abs_client.get_blob_client( # type: ignore container=container_name, blob=blob_name) if blob_client.exists(): data_stream = blob_client.download_blob() data = data_stream.content_as_bytes() else: raise CloudError( f"The blob {blob_name} does not exist in {container_name}") return data
def post_comment( self, incident_id: str, comment: str, res_id: str = None, sub_id: str = None, res_grp: str = None, ws_name: str = None, ): """ Write a comment for an incident. Parameters ---------- incident_id : str Incident ID GUID. comment : str Comment message to post. res_id : str, optional Resource ID of the workspace, if not provided details from config file will be used. sub_id : str, optional Sub ID of the workspace, to be used if not providing Resource ID. res_grp : str, optional Resource Group name of the workspace, to be used if not providing Resource ID. ws_name : str, optional Workspace name of the workspace, to be used if not providing Resource ID. Raises ------ CloudError If message could not be posted. """ if not res_id: if not sub_id or not res_grp or not ws_name: config = self._check_config( ["subscription_id", "resource_group", "workspace_name"]) sub_id = config["subscription_id"] res_grp = config["resource_group"] ws_name = config["workspace_name"] res_id = f"/subscriptions/{sub_id}/resourcegroups/{res_grp}" res_id = ( res_id + f"/providers/Microsoft.OperationalInsights/workspaces/{ws_name}" ) url = _build_paths(res_id) incident_url = url + _PATH_MAPPING["incidents"] comment_url = incident_url + f"/{incident_id}/comments/{str(uuid4())}" params = {"api-version": "2020-01-01"} data = _build_data({"message": comment}) response = requests.put( comment_url, headers=_get_api_headers(self.token), params=params, data=str(data), ) if response.status_code == 201: print("Comment posted.") else: raise CloudError(response=response)
def get_api(self, resource_id: str = None, sub_id: str = None, resource_provider: str = None) -> str: """ Return the latest avaliable API version for the resource. Parameters ---------- resource_id: str, optional The ID of the resources to get an API version for sub_id: str, optional The ID of the subscription to get details from resource_provider: str, optional The resource provider namespace and service to get an API version for Returns ------- api_ver: str The latest avaliable non-preview API version """ if self.connected is False: raise Exception("Please connect before continuing") if self.resource_client is None: self.resource_client = ResourceManagementClient( self.credentials, sub_id) if not self.resource_client: raise CloudError( "Could not create a ResourceManagementClient.") if resource_id is not None: namespace = resource_id.split("/")[6] service = resource_id.split("/")[7] elif resource_provider is not None: namespace = resource_provider.split("/")[0] service = resource_provider.split("/")[1] else: raise ValueError( "Please provide an resource ID or resource provider namespace") provider = self.resource_client.providers.get(namespace) resource_types = next( (t for t in provider.resource_types if t.resource_type == service), None) if resource_types: api_version = [ v for v in resource_types.api_versions if "preview" not in v.lower() ] if api_version is None or not api_version: api_ver = resource_types.api_versions[0] else: api_ver = api_version[0] else: raise MsticpyAzureException("Resource provider not found") return str(api_ver)
def get_bookmarks( self, res_id: str = None, sub_id: str = None, res_grp: str = None, ws_name: str = None, ) -> pd.DataFrame: """ Return a list of Bookmarks from a Sentinel workspace. Parameters ---------- res_id : str, optional Resource ID of the workspace, if not provided details from config file will be used. sub_id : str, optional Sub ID of the workspace, to be used if not providing Resource ID. res_grp : str, optional Resource Group name of the workspace, to be used if not providing Resource ID. ws_name : str, optional Workspace name of the workspace, to be used if not providing Resource ID. Returns ------- pd.DataFrame A set of bookmarks. Raises ------ CloudError If bookmark collection fails. """ if not res_id: if not sub_id or not res_grp or not ws_name: config = self._check_config( ["subscription_id", "resource_group", "workspace_name"]) sub_id = config["subscription_id"] res_grp = config["resource_group"] ws_name = config["workspace_name"] res_id = f"/subscriptions/{sub_id}/resourcegroups/{res_grp}" res_id = ( res_id + "/providers/Microsoft.OperationalInsights/workspaces/{ws_name}" ) url = _build_paths(res_id) bookmarks_url = url + _PATH_MAPPING["bookmarks"] params = {"api-version": "2020-01-01"} response = requests.get(bookmarks_url, headers=_get_api_headers(self.token), params=params) if response.status_code == 200: bookmarks_df = _azs_api_result_to_df(response) else: raise CloudError("Could not get bookmarks.") return bookmarks_df
def get_hunting_queries( self, res_id: str = None, sub_id: str = None, res_grp: str = None, ws_name: str = None, ) -> pd.DataFrame: """ Return all hunting queries in an Azure Sentinel workspace. Parameters ---------- res_id : str, optional Resource ID of the workspace, if not provided details from config file will be used. sub_id : str, optional Sub ID of the workspace, to be used if not providing Resource ID. res_grp : str, optional Resource Group name of the workspace, to be used if not providing Resource ID. ws_name : str, optional Workspace name of the workspace, to be used if not providing Resource ID. Returns ------- pd.DataFrame A table of the hunting queries. """ # If res_id isn't provided try and get them from config if not res_id: if not sub_id or not res_grp or not ws_name: config = self._check_config( ["subscription_id", "resource_group", "workspace_name"]) sub_id = config["subscription_id"] res_grp = config["resource_group"] ws_name = config["workspace_name"] res_id = f"/subscriptions/{sub_id}/resourcegroups/{res_grp}" res_id = ( res_id + "/providers/Microsoft.OperationalInsights/workspaces/{ws_name}" ) url = _build_paths(res_id) saved_searches_url = url + _PATH_MAPPING["ss_path"] params = {"api-version": "2017-04-26-preview"} response = requests.get(saved_searches_url, headers=_get_api_headers(self.token), params=params) if response.status_code == 200: queries_df = _azs_api_result_to_df(response) else: raise CloudError("Could not get alert rules.") return queries_df[queries_df["properties.Category"] == "Hunting Queries"]
def az_connect( auth_methods: List[str] = None, silent: bool = False, ) -> AzCredentials: """ Connect to Azure SDK/API. Parameters ---------- auth_methods : List[str], optional List of authentication methods to try Possible options are: - "env" - to get authentication details from environment varibales - "cli" - to use Azure CLI authentication details - "msi" - to user Managed Service Indenity details - "interactive" - to prompt for interactive login Default is ["env", "cli", "msi", "interactive"] silent : bool, optional Set True to hide all output during connection, by default False Returns ------- AzCredentials Named tuple of: - legacy (ADAL) credentials - modern (MSAL) credentials Raises ------ CloudError If chained token credential creation fails. """ # If using env options try to load from msticpy data_provs = get_provider_settings(config_section="DataProviders") az_cli_config = data_provs.get("AzureCLI") auth_methods = auth_methods or default_auth_methods() if az_cli_config and az_cli_config.args: if "auth_methods" in az_cli_config.args: auth_methods = az_cli_config.args.get("auth_methods") if isinstance(auth_methods, list) and "env" in auth_methods: os.environ["AZURE_CLIENT_ID"] = az_cli_config.args.get( "clientId") or "" os.environ["AZURE_TENANT_ID"] = az_cli_config.args.get( "tenantId") or "" os.environ["AZURE_CLIENT_SECRET"] = ( az_cli_config.args.get("clientSecret") or "") credentials = az_connect_core(auth_methods=auth_methods, silent=silent) sub_client = SubscriptionClient(credentials.modern) # type: ignore if not sub_client: raise CloudError("Could not create a Subscription client.") return credentials
def containers(self) -> pd.DataFrame: """Return containers in the Azure Blob Storage Account.""" try: container_list = self.abs_client.list_containers() # type:ignore except ServiceRequestError as err: raise CloudError( "Unable to connect check the Azure Blob Store account name" ) from err if container_list: containers_df = _parse_returned_items( container_list, remove_list=["lease", "encryption_scope"]) else: containers_df = None return containers_df
def _check_client(self, client_name: str, sub_id: str): """ Check required client is present, if not create it. Parameters ---------- self: client_name: The name of the client to be checked. sub_id: The subscription ID for the client to connect to. """ client = _CLIENT_MAPPING[client_name] if getattr(self, client_name) is None: setattr(self, client_name, client(self.credentials, sub_id)) if getattr(self, client_name) is None: raise CloudError("Could not create client")
def _check_client(self, client_name: str, sub_id: str = None): """ Check required client is present, if not create it. Parameters ---------- client_name : str The name of the client to be checked. sub_id : str, optional The subscription ID for the client to connect to, by default None """ client = _CLIENT_MAPPING[client_name] if getattr(self, client_name) is None: if sub_id is None: setattr(self, client_name, client(self.credentials.modern)) # type: ignore else: setattr(self, client_name, client(self.credentials.modern, sub_id)) # type: ignore if getattr(self, client_name) is None: raise CloudError("Could not create client")
def get_resources(self, sub_id: str, rgroup: str = None, get_props: bool = False) -> pd.DataFrame: """ Return details on all resources in a subscription or Resoruce Group. Parameters ---------- sub_id: str The subscription ID to get resources for rgroup: str (Optional) The name of a Resource Group to get resources for get_props: bool (Optional) Set to True if you want to get the full properties of every resource Warning this may be a slow process depending on the number of resources Returns ------- resrouce_df: pd.DataFrame A dataframe of resource details """ if self.connected is False: raise Exception("Please connect before continuing") self.resource_client = ResourceManagementClient( self.credentials, sub_id) if not self.resource_client: raise CloudError("Could not create a ResourceManagementClient.") if rgroup is None: resources = self.resource_client.resources.list() else: resources = self.resource_client.resources.list_by_resource_group( rgroup) if get_props is True: print( "Collecting properties for every resource may take some time..." ) resource_items = [] for resource in resources: if get_props is True: try: props = self.resource_client.resources.get_by_id( resource.id, "2019-08-01").properties except CloudError: props = self.resource_client.resources.get_by_id( resource.id, self.get_api(resource.id)).properties else: props = resource.properties resource_details = attr.asdict( Items( resource.id, resource.name, resource.type, resource.location, resource.tags, resource.plan, props, resource.kind, resource.managed_by, resource.sku, resource.identity, )) resource_items.append(resource_details) resource_df = pd.DataFrame(resource_items) return resource_df
def update_incident( self, incident_id: str, update_items: dict, res_id: str = None, sub_id: str = None, res_grp: str = None, ws_name: str = None, ): """ Update properties of an incident. Parameters ---------- incident_id : str Incident ID GUID. update_items : dict Dictionary of properties to update and their values. Ref: https://docs.microsoft.com/en-us/rest/api/securityinsights/incidents/createorupdate res_id : str, optional Resource ID of the workspace, if not provided details from config file will be used. sub_id : str, optional Sub ID of the workspace, to be used if not providing Resource ID. res_grp : str, optional Resource Group name of the workspace, to be used if not providing Resource ID. ws_name : str, optional Workspace name of the workspace, to be used if not providing Resource ID. Raises ------ CloudError If incident could not be updated. """ if not res_id: if not sub_id or not res_grp or not ws_name: config = self._check_config( ["subscription_id", "resource_group", "workspace_name"]) sub_id = config["subscription_id"] res_grp = config["resource_group"] ws_name = config["workspace_name"] res_id = f"/subscriptions/{sub_id}/resourcegroups/{res_grp}" res_id = ( res_id + "/providers/Microsoft.OperationalInsights/workspaces/{ws_name}" ) incident_dets = self.get_incident(incident_id=incident_id, res_id=res_id) url = _build_paths(res_id) incidents_url = url + _PATH_MAPPING["incidents"] incident_url = incidents_url + f"/{incident_id}" params = {"api-version": "2020-01-01"} if "title" not in update_items.keys(): update_items.update( {"title": incident_dets.iloc[0]["properties.title"]}) if "status" not in update_items.keys(): update_items.update( {"status": incident_dets.iloc[0]["properties.status"]}) data = _build_data(update_items, etag=incident_dets.iloc[0]["etag"]) response = requests.put( incident_url, headers=_get_api_headers(self.token), params=params, data=str(data), ) if response.status_code == 200: print("Incident updated.") else: raise CloudError( f"Could not get incident status: {response.status_code}")
def get_incident( self, incident_id: str, res_id: str = None, sub_id: str = None, res_grp: str = None, ws_name: str = None, ) -> pd.DataFrame: """ Get details on a specific incident. Parameters ---------- incident_id : str Incident ID GUID. res_id : str, optional Resource ID of the workspace, if not provided details from config file will be used. sub_id : str, optional Sub ID of the workspace, to be used if not providing Resource ID. res_grp : str, optional Resource Group name of the workspace, to be used if not providing Resource ID. ws_name : str, optional Workspace name of the workspace, to be used if not providing Resource ID. Returns ------- pd.DataFrame Table containing incident details. Raises ------ CloudError If incident could not be retrieved. """ if not res_id: if not sub_id or not res_grp or not ws_name: config = self._check_config( ["subscription_id", "resource_group", "workspace_name"]) sub_id = config["subscription_id"] res_grp = config["resource_group"] ws_name = config["workspace_name"] res_id = f"/subscriptions/{sub_id}/resourcegroups/{res_grp}" res_id = ( res_id + "/providers/Microsoft.OperationalInsights/workspaces/{ws_name}" ) url = _build_paths(res_id) incidents_url = url + _PATH_MAPPING["incidents"] incident_url = incidents_url + f"/{incident_id}" params = {"api-version": "2020-01-01"} response = requests.get(incident_url, headers=_get_api_headers(self.token), params=params) if response.status_code == 200: incident_df = _azs_api_result_to_df(response) else: raise CloudError( f"Could not get incident status: {response.status_code}") return incident_df
def az_connect_core(auth_methods: List[str] = None, silent: bool = False) -> AzCredentials: """ Authenticate using multiple authentication sources. Parameters ---------- auth_methods List of authentication methods to try Possible options are: - "env" - to get authentication details from environment varibales - "cli" - to use Azure CLI authentication details - "msi" - to user Managed Service Indenity details - "interactive" - to prompt for interactive login Default is ["env", "cli", "msi", "interactive"] silent Whether to display any output during auth process. Default is False. Returns ------- AzCredentials Named tuple of: - legacy (ADAL) credentials - modern (MSAL) credentials Raises ------ CloudError If chained token credential creation fails. MsticpyAzureConnectionError If invalid auth options are presented. Notes ----- The function tries to obtain credentials from the following sources: - Azure Auth Environment variables - Azure CLI (if an active session is logged on) - Managed Service Identity - Interactive browser logon If the authentication is successful both ADAL (legacy) and MSAL (modern) credential types are returned. """ if not auth_methods: auth_methods = default_auth_methods() try: auths = [_AUTH_OPTIONS[meth] for meth in auth_methods] except KeyError as err: raise MsticpyAzureConnectionError( "Unknown authentication option, valid options are; env, cli, msi, interactive" ) from err # Filter and replace error message when credentials not found handler = logging.StreamHandler(sys.stdout) if silent: handler.addFilter(_filter_all_warnings) else: handler.addFilter(_filter_credential_warning) logging.basicConfig(level=logging.WARNING, handlers=[handler]) # Create credentials and connect to the subscription client to validate creds = ChainedTokenCredential(*auths) legacy_creds = CredentialWrapper(creds) if not creds: raise CloudError("Could not obtain credentials.") return AzCredentials(legacy_creds, creds)
def get_resource_details(self, resource_id: str = None, resource_details: dict = None, sub_id: str = None) -> dict: """ Return the details of a specific Azure resource. Parameters ---------- resource_id: str, optional The ID of the resource to get details on resource_details: dict, optional If ID is unknown provide the following details: -resource_group_name -resource_provider_namespace -resource_type -resource_name -parent_resource_path sub_id: str, optional The ID of the subscription to get resources from Returns ------- resource_deatils: dict The details of the requested resource """ if self.connected is False: raise MsticpyAzureException("Please connect before continuing") if self.resource_client is None: self.resource_client = ResourceManagementClient( self.credentials, sub_id) if not self.resource_client: raise CloudError( "Could not create a ResourceManagementClient.") if resource_id is not None: resource = self.resource_client.resources.get_by_id( resource_id, self.get_api(resource_id)) elif resource_details is not None: resource = self.resource_client.resources.get( resource_details["resource_group_name"], resource_details["resource_provider_namespace"], resource_details["parent_resource_path"], resource_details["resource_type"], resource_details["resource_name"], self.get_api(resource_provider=( resource_details["resource_provider_namespace"] + "/" + resource_details["resource_type"])), ) else: raise Exception( "Please provide either a resource ID or resource details") resource_details = attr.asdict( Items( resource.id, resource.name, resource.type, resource.location, resource.tags, resource.plan, resource.properties, resource.kind, resource.managed_by, resource.sku, resource.identity, )) return resource_details