Exemple #1
0
 def credentials(self):
     return ClientSecretCredential(
         tenant_id=self.tenant_id,
         client_id=self.client_id,
         client_secret=self.client_secret,
     )
Exemple #2
0
    def _generate_oauth_token(self):

        return ClientSecretCredential(
            self.settings.ACTIVE_DIRECTORY_TENANT_ID,
            self.settings.ACTIVE_DIRECTORY_APPLICATION_ID,
            self.settings.ACTIVE_DIRECTORY_APPLICATION_SECRET)
Exemple #3
0
taskclusterOptions = {'rootUrl': os.environ['TASKCLUSTER_PROXY_URL']}

auth = taskcluster.Auth(taskclusterOptions)
queue = taskcluster.Queue(taskclusterOptions)
index = taskcluster.Index(taskcluster.optionsFromEnvironment())
secrets = taskcluster.Secrets(taskclusterOptions)

secret = secrets.get('project/relops/image-builder/dev')['secret']

azureDeployment = 'azure_gamma'  # if 'stage.taskcluster.nonprod' in os.environ['TASKCLUSTER_ROOT_URL'] else 'azure_alpha'
platformClient = {
    'azure':
    ComputeManagementClient(
        ClientSecretCredential(
            tenant_id=secret[azureDeployment]['tenant_id'],
            client_id=secret[azureDeployment]['app_id'],
            client_secret=secret[azureDeployment]['password']),
        secret[azureDeployment]['subscription_id'])
}

commitSha = os.getenv('GITHUB_HEAD_SHA')
allKeyConfigPaths = glob.glob('{}/../config/win*.yaml'.format(
    os.path.dirname(__file__)))
includeKeys = list(map(lambda x: pathlib.Path(x).stem, allKeyConfigPaths))
includePools = [
]  #[poolName for poolNames in map(lambda configPath: map(lambda pool: '{}/{}'.format(pool['domain'], pool['variant']), yaml.safe_load(open(configPath, 'r'))['manager']['pool']), allKeyConfigPaths) for poolName in poolNames]
includeRegions = sorted(
    list(
        set([
            region for regions in map(
                lambda configPath: map(
Exemple #4
0
def test_no_scopes():
    """The credential should raise ValueError when get_token is called with no scopes"""

    credential = ClientSecretCredential("tenant-id", "client-id", "client-secret")
    with pytest.raises(ValueError):
        credential.get_token()
        default_params = {
            k: (v if v not in ('True', 'False') else eval(v))
            for k, v in default_params.items()
        }

        args = SimpleNamespace(**default_params)

        SERVICE_PRINCIPAL_SECRET = json.load(
            open("config_test.json"))["SERVICE_PRINCIPAL_SECRET"]

    if args.log_analytics_workspace_id is None or not (
            args.log_analytics_workspace_id.strip()):
        logger = LogAnalyticsLogger(name="[calendar_information_extractor]")
    else:
        credential = ClientSecretCredential(
            tenant_id=args.directory_id,
            client_id=args.application_id,
            client_secret=SERVICE_PRINCIPAL_SECRET)

        client = SecretClient(vault_url=args.key_vault_url,
                              credential=credential)

        try:
            logAnalyticsApiKey = client.get_secret(
                name=args.log_analytics_workspace_key_name).value
            logger = LogAnalyticsLogger(
                workspace_id=args.log_analytics_workspace_id,
                shared_key=logAnalyticsApiKey,
                log_type="CalendarInformationExtractor",
                log_server_time=True,
                name="[calendar_information_extractor]")
        except Exception as e:
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# Delete Storage Account function is used to delete the Storage Account created for the terraform state files..
from azure.identity import ClientSecretCredential
from azure.mgmt.storage import StorageManagementClient
import os
import sys

# Following Credentials needs to be passed as environment variables.
credential = ClientSecretCredential(tenant_id=os.getenv('tenant_id'),
                                    client_id=os.getenv('client_id'),
                                    client_secret=os.getenv('client_secret'))
subscription_id = os.getenv('subscription_id')


def delete_storage_account(resource_group_name, storage_account_name):
    storage_client = StorageManagementClient(credential, subscription_id)
    """ delete the Storage Account created for the terraform state files. """
    try:
        storage_client.storage_accounts.delete(resource_group_name,
                                               storage_account_name)
        print("storage Account {} deleted".format(storage_account_name))
    except:
        print("Storage Account does not exists")

Exemple #7
0
    def __init__(self,
                 cloud_endpoints,
                 authorization_file=None,
                 subscription_id_override=None):
        # type: (*str, *str) -> None

        if authorization_file:
            with open(authorization_file) as json_file:
                self._auth_params = json.load(json_file)
        else:
            self._auth_params = {
                'client_id':
                os.environ.get(constants.ENV_CLIENT_ID),
                'client_secret':
                os.environ.get(constants.ENV_CLIENT_SECRET),
                'access_token':
                os.environ.get(constants.ENV_ACCESS_TOKEN),
                'tenant_id':
                os.environ.get(constants.ENV_TENANT_ID),
                'use_msi':
                bool(os.environ.get(constants.ENV_USE_MSI)),
                'subscription_id':
                os.environ.get(constants.ENV_SUB_ID),
                'keyvault_client_id':
                os.environ.get(constants.ENV_KEYVAULT_CLIENT_ID),
                'keyvault_secret_id':
                os.environ.get(constants.ENV_KEYVAULT_SECRET_ID),
                'enable_cli_auth':
                True
            }

        self._auth_params[
            'authority'] = cloud_endpoints.endpoints.active_directory

        keyvault_client_id = self._auth_params.get('keyvault_client_id')
        keyvault_secret_id = self._auth_params.get('keyvault_secret_id')

        # If user provided KeyVault secret, we will pull auth params information from it
        try:
            if keyvault_secret_id:
                self._auth_params.update(
                    json.loads(
                        get_keyvault_secret(keyvault_client_id,
                                            keyvault_secret_id)))
        except HTTPError as e:
            e.message = 'Failed to retrieve SP credential ' \
                        'from Key Vault with client id: {0}'.format(keyvault_client_id)
            raise

        self._credential = None
        if self._auth_params.get('access_token') is not None:
            auth_name = 'Access Token'
            pass
        elif (self._auth_params.get('client_id')
              and self._auth_params.get('client_secret')
              and self._auth_params.get('tenant_id')):
            auth_name = 'Principal'
            self._credential = ClientSecretCredential(
                client_id=self._auth_params['client_id'],
                client_secret=self._auth_params['client_secret'],
                tenant_id=self._auth_params['tenant_id'],
                authority=self._auth_params['authority'])
        elif self._auth_params.get('use_msi'):
            auth_name = 'MSI'
            self._credential = ManagedIdentityCredential(
                client_id=self._auth_params.get('client_id'))
        elif self._auth_params.get('enable_cli_auth'):
            auth_name = 'Azure CLI'
            self._credential = AzureCliCredential()
            account_info = _run_command('az account show --output json')
            account_json = json.loads(account_info)
            self._auth_params['subscription_id'] = account_json['id']
            self._auth_params['tenant_id'] = account_json['tenantId']

        if subscription_id_override is not None:
            self._auth_params['subscription_id'] = subscription_id_override

        self._subscription_id = self._auth_params['subscription_id']
        self._tenant_id = self._auth_params['tenant_id']
        log.info('Authenticated [%s | %s%s]', auth_name, self.subscription_id,
                 ' | Authorization File' if authorization_file else '')
Exemple #8
0
 def connect(self):
     account = AzureAccount.get_azure_account()
     for connection_step in account.connection_steps:
         credential = ClientSecretCredential(tenant_id=connection_step.tenant_id, client_id=connection_step.client_id,
                                             client_secret=connection_step.secret)
         self._client = self.CLIENT_CLASS(credential, connection_step.subscription_id)
import json

#read config file
config = json.loads(open('config.json','r').read())

#App details and further config
tenantID = config["tenantID"]
clientID = config["clientID"]
clientSecret = config["clientSecret"]
ehaddress = config["eventhubNamespaceURI"] #needs to be in FQDN format without protocal prefix, e.g. "myeventhubnamespace.servicebus.windows.net"

eventhub_name = "mitest"
consumer_group = '$Default'

#create identity
identity = ClientSecretCredential(tenant_id=tenantID, client_id=clientID, client_secret=clientSecret )

#create client object for consumer
ehclient = EventHubConsumerClient(credential= identity, fully_qualified_namespace=ehaddress, eventhub_name=eventhub_name, consumer_group = consumer_group)

#funtion that needs to be implemented and passed as a parameter to receive()
def on_event(partition_context, event):
    print("Received event from partition {}".format(partition_context.partition_id))
    print(event.body_as_str())
    partition_context.update_checkpoint(event)

ehclient.receive(
    on_event=on_event, 
    starting_position="-1",  # "-1" is from the beginning of the partition.
)
# receive events from specified partition:
Exemple #10
0
 def _get_secret_credential(credentials):
     return ClientSecretCredential(
         tenant_id=credentials['tenantId'],
         client_id=credentials['clientId'],
         client_secret=credentials['clientSecret'])
Exemple #11
0
def cleanup_database(spark_args):
    client_secret = SERVICE_PRINCIPAL_SECRET
    database = args.jdbc_database
    jdbcHost = args.jdbc_host
    jdbcPort = args.jdbc_port
    jdbc_username_key_name = spark_args.jdbc_username_key_name
    jdbc_password_key_name = spark_args.jdbc_password_key_name
    use_msi_azure_sql_auth = spark_args.use_msi_azure_sql_auth
    application_id = spark_args.application_id
    directory_id = spark_args.directory_id

    connectionProperties = {
        'databaseName': database,
        'url': 'watercooler-sql-wt.database.windows.net',
        'hostNameInCertificate': '*.database.windows.net',
        'encrypt': 'true',
        'Driver': 'com.microsoft.sqlserver.jdbc.SQLServerDriver',
        'ServerCertificate': 'false',
        'trustServerCertificate': 'false',
        'loginTimeout': '30'
    }

    if use_msi_azure_sql_auth:
        sts_url = "https://login.microsoftonline.com/" + directory_id
        auth_context = AuthenticationContext(sts_url)
        token_obj = auth_context.acquire_token_with_client_credentials(
            "https://database.windows.net/", application_id, client_secret)
        access_token = token_obj['accessToken']
        connectionProperties['accessToken'] = access_token

    else:
        service_principal_credential = ClientSecretCredential(
            tenant_id=spark_args.directory_id,
            client_id=spark_args.application_id,
            client_secret=SERVICE_PRINCIPAL_SECRET)
        secret_client = SecretClient(vault_url=spark_args.key_vault_url,
                                     credential=service_principal_credential)

        connectionProperties["user"] = secret_client.get_secret(
            name=jdbc_username_key_name).value
        connectionProperties["password"] = secret_client.get_secret(
            name=jdbc_password_key_name).value

    connection = jaydebeapi.connect(
        "com.microsoft.sqlserver.jdbc.SQLServerDriver",
        f"jdbc:sqlserver://{jdbcHost}:{jdbcPort};databaseName={database};",
        connectionProperties)
    cursor = connection.cursor()

    truncate_groups_per_day = "truncate table groups_per_day;"
    truncate_groups_per_week = "truncate table groups_per_week;"
    truncate_members_group_personal_meetings = "truncate table members_group_personal_meetings;"
    members_to_group_participation = "truncate table members_to_group_participation;"

    cursor.execute(truncate_groups_per_day)
    logger.info("Truncated groups_per_day table.")

    cursor.execute(truncate_groups_per_week)
    logger.info("Truncated groups_per_week table.")

    cursor.execute(truncate_members_group_personal_meetings)
    logger.info("Truncated members_group_personal_meetings table.")

    cursor.execute(members_to_group_participation)
    logger.info("Truncated members_to_group_participation table.")

    cursor.close()
    connection.close()
Exemple #12
0
def export_data_to_azure_sql(spark_args):
    application_id = spark_args.application_id
    directory_id = spark_args.directory_id
    adb_secret_scope = spark_args.adb_secret_scope_name
    adb_sp_client_key_secret_name = spark_args.adb_sp_client_key_secret_name

    employee_profile = spark_args.employee_profile_file_name
    groups_per_day_file = spark_args.groups_per_day_file_name
    groups_per_week = spark_args.groups_per_weeks_file_name
    members_group_personal_meetings = spark_args.members_group_personal_meetings_file_name
    members_to_group_participation = spark_args.members_to_group_participation_file_name

    database = spark_args.jdbc_database
    jdbc_host = spark_args.jdbc_host
    jdbc_port = spark_args.jdbc_port
    use_msi_azure_sql_auth = spark_args.use_msi_azure_sql_auth
    key_vault_url = spark_args.key_vault_url
    jdbc_username_key_name = spark_args.jdbc_username_key_name
    jdbc_password_key_name = spark_args.jdbc_password_key_name
    jdbc_username = spark_args.jdbc_user  # this parameter should be used only for running the application locally
    jdbc_password = spark_args.jdbc_password  # this parameter should be used only for running the application locally
    export_batch_size = int(spark_args.export_batch_size)

    base_folder = spark_args.csv_input_data_path

    full_base_path = retrieve_latest_run(base_folder)

    logger.info(f"*******#####Result {full_base_path}")

    employee_profile_file = os.path.join(full_base_path, employee_profile)
    groups_per_day_file = os.path.join(full_base_path, groups_per_day_file)
    groups_per_week_file = os.path.join(full_base_path, groups_per_week)
    members_group_personal_meetings_file = os.path.join(
        full_base_path, members_group_personal_meetings)
    members_to_group_participation_file = os.path.join(
        full_base_path, members_to_group_participation)

    client_secret = SERVICE_PRINCIPAL_SECRET if SERVICE_PRINCIPAL_SECRET is not None else \
        dbutils.secrets.get(scope=adb_secret_scope, key=adb_sp_client_key_secret_name)

    if application_id is None:
        ValueError("Missing application_id parameter!")
    if directory_id is None:
        ValueError("Missing directory_id parameter!")
    if adb_secret_scope is None:
        ValueError("Missing adb_secret_scope_name parameter!")
    if adb_sp_client_key_secret_name is None:
        ValueError("Missing adb_sp_client_key_secret_name parameter!")
    if database is None:
        ValueError("Missing database parameter!")
    if jdbc_host is None:
        ValueError("Missing jdbc_host parameter!")
    if jdbc_port is None:
        ValueError("Missing jdbc_port parameter!")
    if use_msi_azure_sql_auth is None:
        ValueError("Missing use_msi_azure_sql_auth parameter!")
    if key_vault_url is None:
        ValueError("Missing key_vault_url parameter!")
    if jdbc_username_key_name is None:
        ValueError("Missing jdbc_username_key_name parameter!")
    if jdbc_password_key_name is None:
        ValueError("Missing jdbc_password_key_name parameter!")
    if jdbc_username is None:
        ValueError("Missing jdbc_username parameter!")
    if jdbc_password is None:
        ValueError("Missing jdbc_password parameter!")
    if export_batch_size is None:
        ValueError("Missing export_batch_size parameter!")

    connectionProperties = {
        'databaseName': database,
        'url': 'watercooler-sql-wt.database.windows.net',
        'hostNameInCertificate': '*.database.windows.net',
        'encrypt': 'true',
        'Driver': 'com.microsoft.sqlserver.jdbc.SQLServerDriver',
        'ServerCertificate': 'false',
        'trustServerCertificate': 'false',
        'loginTimeout': '30'
    }
    if use_msi_azure_sql_auth:
        sts_url = "https://login.microsoftonline.com/" + directory_id
        auth_context = AuthenticationContext(sts_url)
        token_obj = auth_context.acquire_token_with_client_credentials(
            "https://database.windows.net/", application_id, client_secret)
        access_token = token_obj['accessToken']
        connectionProperties['accessToken'] = access_token

    else:
        service_principal_credential = ClientSecretCredential(
            tenant_id=spark_args.directory_id,
            client_id=spark_args.application_id,
            client_secret=SERVICE_PRINCIPAL_SECRET)
        secret_client = SecretClient(vault_url=spark_args.key_vault_url,
                                     credential=service_principal_credential)

        if jdbc_username_key_name and jdbc_password_key_name:
            connectionProperties["user"] = secret_client.get_secret(
                name=jdbc_username_key_name).value
            connectionProperties["password"] = secret_client.get_secret(
                name=jdbc_password_key_name).value
        else:
            connectionProperties["user"] = jdbc_username
            connectionProperties["password"] = jdbc_password
            connectionProperties["encrypt"] = "false"

    connection = jaydebeapi.connect(
        "com.microsoft.sqlserver.jdbc.SQLServerDriver",
        f"jdbc:sqlserver://{jdbc_host}:{jdbc_port};databaseName={database};",
        connectionProperties)

    cursor = connection.cursor()

    # logger.info(f"*******#####Processing {employee_profile_file}")
    # write_employee_profile_to_az_sql(employee_profile_file, export_batch_size, cursor)

    logger.info("*******#####Processing {groups_per_day_file}")
    write_groups_per_day_to_az_sql(groups_per_day_file, export_batch_size,
                                   cursor)

    logger.info(f"*******#####Processing {groups_per_week_file}")
    write_groups_per_week_to_az_sql(groups_per_week_file, export_batch_size,
                                    cursor)

    logger.info(
        f"*******#####Processing {members_group_personal_meetings_file}")
    write_members_group_personal_meetings_to_az_sql(
        members_group_personal_meetings_file, export_batch_size, cursor)

    logger.info(
        f"*******#####Processing {members_to_group_participation_file}")
    write_members_to_group_participation_to_az_sql(
        members_to_group_participation_file, export_batch_size, cursor)

    cursor.close()
    connection.close()
    def init(self):

        self.provisioning_host = self.data["ProvisioningHost"]

        if self.data["UseKeyVault"]:

            self.logger.info("[USING KEY VAULT SECRETS]")

            # key vault account uri
            key_vault_uri = self.data["KeyVaultSecrets"]["KeyVaultUri"]
            self.logger.debug("[KEY VAULT URI] %s" % key_vault_uri)

            tenant_id = self.data["KeyVaultSecrets"]["TenantId"]
            client_id = self.data["KeyVaultSecrets"]["ClientId"]
            client_secret = self.data["KeyVaultSecrets"]["ClientSecret"]

            # Get access to Key Vault Secrets
            credential = ClientSecretCredential(tenant_id, client_id,
                                                client_secret)
            secret_client = SecretClient(vault_url=key_vault_uri,
                                         credential=credential)

            self.logger.debug("[credential] %s" % credential)
            self.logger.debug("[secret_client] %s" % secret_client)

            # Read all of our Secrets for Accessing IoT Central
            self.scope_id = self.secret_client.get_secret(
                self.data["KeyVaultSecrets"]["ScopeId"])
            self.device_primary_key = self.secret_client.get_secret(
                self.data["KeyVaultSecrets"]["DeviceConnect"]["SaSKeys"]
                ["Primary"])
            self.device_secondary_key = self.secret_client.get_secret(
                self.data["KeyVaultSecrets"]["DeviceConnect"]["SaSKeys"]
                ["Secondary"])
            self.gateway_primary_key = self.secret_client.get_secret(
                self.data["KeyVaultSecrets"]["GatewayConnect"]["SaSKeys"]
                ["Primary"])
            self.gateway_secondary_key = self.secret_client.get_secret(
                self.data["KeyVaultSecrets"]["GatewayConnect"]["SaSKeys"]
                ["Secondary"])

        else:

            # Read all of our LOCAL Secrets for Accessing IoT Central
            self.logger.info("[USING LOCAL SECRETS]")
            self.scope_id = self.data["LocalSecrets"]["ScopeId"]
            self.device_primary_key = self.data["LocalSecrets"][
                "DeviceConnect"]["SaSKeys"]["Primary"]
            self.device_secondary_key = self.data["LocalSecrets"][
                "DeviceConnect"]["SaSKeys"]["Secondary"]
            self.gateway_primary_key = self.data["LocalSecrets"][
                "GatewayConnect"]["SaSKeys"]["Primary"]
            self.gateway_secondary_key = self.data["LocalSecrets"][
                "GatewayConnect"]["SaSKeys"]["Secondary"]

        # Debug Only
        self.logger.debug("[SCOPE ID]: %s" % self.scope_id)
        self.logger.debug("[DEVICE PRIMARY KEY]: %s" % self.device_primary_key)
        self.logger.debug("[DEVICE SECONDARY KEY]: %s" %
                          self.device_secondary_key)
        self.logger.debug("[GATEWAY PRIMARY KEY]: %s" %
                          self.gateway_primary_key)
        self.logger.debug("[GATEWAY SECONDARY KEY]: %s" %
                          self.gateway_secondary_key)
        return
Exemple #14
0
 def get_conn(self) -> DataFactoryManagementClient:
     credentials = ClientSecretCredential(client_id=self.conn.login,
                                          client_secret=self.conn.password,
                                          tenant_id=self.tenant_id)
     return DataFactoryManagementClient(credentials, self.subscription_id)
    async def provision_devices(self):

        # Load the Devices Cache File for any devices
        # that have already been provisioned
        devicescache = DevicesCache(self.logger)
        self.logger.info("[DEVICES] devicescache.data Count %s" %
                         str(len(devicescache.data["Devices"])))

        # Make a working copy of the cache file
        self.data = devicescache.data
        self.data["Devices"] = [
            x for x in devicescache.data["Devices"]
            if x["DeviceName"] == "Simulated Device"
        ]
        self.logger.info("[DEVICES] self.data Count %s" %
                         str(len(self.data["Devices"])))
        devicescache.load_file()
        self.devices_provision = devicescache.data
        self.devices_provision["Devices"] = [
            x for x in devicescache.data["Devices"]
            if x["DeviceName"] != "Simulated Device"
        ]
        self.logger.info("[DEVICES] self.devices_provision.data Count %s" %
                         str(len(self.devices_provision["Devices"])))

        # secrets
        scope_id = None
        device_primary_key = None
        device_secondary_key = None
        gateway_primary_key = None
        gateway_secondary_key = None

        # load the secrets
        secrets = Secrets(self.logger)
        if secrets.data["KeyVaultSecrets"]:
            self.logger.info("[USING KEY VAULT SECRETS]")

            # key vault account uri
            key_vault_uri = secrets.data["KeyVaultSecrets"]["KeyVaultUri"]
            self.logger.info("[KEY VAULT URI] %s" % key_vault_uri)

            tenant_id = secrets.data["KeyVaultSecrets"]["TenantId"]
            client_id = secrets.data["KeyVaultSecrets"]["ClientId"]
            client_secret = secrets.data["KeyVaultSecrets"]["ClientSecret"]

            # Get access to Key Vault Secrets
            credential = ClientSecretCredential(tenant_id, client_id,
                                                client_secret)
            self.logger.info("[credential] %s" % credential)
            secret_client = SecretClient(vault_url=key_vault_uri,
                                         credential=credential)
            self.logger.info("[secret_client] %s" % secret_client)

            # Read all of our Secrets for Accessing IoT Central
            scope_id = secret_client.get_secret(
                secrets.data["KeyVaultSecrets"]["ScopeId"])
            device_primary_key = secret_client.get_secret(
                secrets.data["KeyVaultSecrets"]["DeviceConnect"]["SaSKeys"]
                ["Primary"])
            device_secondary_key = secret_client.get_secret(
                secrets.data["KeyVaultSecrets"]["DeviceConnect"]["SaSKeys"]
                ["Secondary"])
            gateway_primary_key = secret_client.get_secret(
                secrets.data["KeyVaultSecrets"]["GatewayConnect"]["SaSKeys"]
                ["Primary"])
            gateway_secondary_key = secret_client.get_secret(
                secrets.data["KeyVaultSecrets"]["GatewayConnect"]["SaSKeys"]
                ["Secondary"])

        else:
            # Read all of our LOCAL Secrets for Accessing IoT Central
            self.logger.info("[USING LOCAL SECRETS]")
            scope_id = secret_client.get_secret(
                secrets.data["LocalSecrets"]["ScopeId"])
            device_primary_key = secret_client.get_secret(
                secrets.data["LocalSecrets"]["DeviceConnect"]["SaSKeys"]
                ["Primary"])
            device_secondary_key = secret_client.get_secret(
                secrets.data["LocalSecrets"]["DeviceConnect"]["SaSKeys"]
                ["Secondary"])
            gateway_primary_key = secret_client.get_secret(
                secrets.data["LocalSecrets"]["GatewayConnect"]["SaSKeys"]
                ["Primary"])
            gateway_secondary_key = secret_client.get_secret(
                secrets.data["LocalSecrets"]["GatewayConnect"]["SaSKeys"]
                ["Secondary"])

            # Verbose
            self.logger.info("[SCOPE ID]: %s" % scope_id.value)
            self.logger.info("[DEVICE PRIMARY KEY]: %s" %
                             device_primary_key.value)
            self.logger.info("[DEVICE SECONDARY KEY]: %s" %
                             device_secondary_key.value)
            self.logger.info("[GATEWAY PRIMARY KEY]: %s" %
                             gateway_primary_key.value)
            self.logger.info("[GATEWAY SECONDARY KEY]: %s" %
                             gateway_secondary_key.value)

        # Symetric Key for handling Device Specific SaS Keys
        symmetrickey = SymmetricKey(self.logger)

        try:
            # Iterate the Discovered Devices and Provision
            # the devicescache.json file element [DeviceNamePrefix]...
            for device in self.devices_provision["Devices"]:
                provision_this_device = False

                if (self.provisioning_scope == "ALL"):
                    provision_this_device = True
                elif (self.provisioning_scope == "NEW"
                      and device["LastProvisioned"] == None):
                    provision_this_device = True
                elif (self.provisioning_scope == device["DeviceName"]):
                    provision_this_device = True

                if provision_this_device:
                    # Get a Device Specific Symetric Key
                    device_symmetrickey = symmetrickey.compute_derived_symmetric_key(
                        device["DeviceName"], device_secondary_key.value)
                    self.logger.info("[SYMETRIC KEY] %s" % device_symmetrickey)

                    # Provision the Device
                    self.logger.warning("[PROVISIONING] %s" %
                                        device["DeviceName"])
                    provisioning_device_client = ProvisioningDeviceClient.create_from_symmetric_key(
                        provisioning_host=secrets.data["ProvisioningHost"],
                        registration_id=device["DeviceName"],
                        id_scope=scope_id.value,
                        symmetric_key=device_symmetrickey,
                        websockets=True)

                    provisioning_device_client.provisioning_payload = '{"iotcModelId":"%s"}' % (
                        device["DCM"])
                    registration_result = await provisioning_device_client.register(
                    )

                    newDevice = {
                        "DeviceName":
                        device["DeviceName"],
                        "Address":
                        device["Address"],
                        "LastRSSI":
                        device["LastRSSI"],
                        "DCM":
                        device["DCM"],
                        "DeviceInfoInterface":
                        device["DeviceInfoInterface"],
                        "DeviceInfoInterfaceInstanceName":
                        device["DeviceInfoInterfaceInstanceName"],
                        "NanoBLEInterface":
                        device["NanoBLEInterface"],
                        "NanoBLEInterfaceInstanceName":
                        device["NanoBLEInterfaceInstanceName"],
                        "LastProvisioned":
                        str(datetime.datetime.now())
                    }
                    self.data["Devices"].append(newDevice)
                    continue
                else:
                    newDevice = {
                        "DeviceName":
                        device["DeviceName"],
                        "Address":
                        device["Address"],
                        "LastRSSI":
                        device["LastRSSI"],
                        "DCM":
                        device["DCM"],
                        "DeviceInfoInterface":
                        device["DeviceInfoInterface"],
                        "DeviceInfoInterfaceInstanceName":
                        device["DeviceInfoInterfaceInstanceName"],
                        "NanoBLEInterface":
                        device["NanoBLEInterface"],
                        "NanoBLEInterfaceInstanceName":
                        device["NanoBLEInterfaceInstanceName"],
                        "LastProvisioned":
                        None
                    }
                    self.data["Devices"].append(newDevice)
                    continue

        except Exception as ex:
            self.logger.error("[ERROR] %s" % ex)
            self.logger.error(
                "[TERMINATING] We encountered an error provisioning for BLE Devices"
            )
            return

        # Update the Cache
        devicescache.update_file(self.data)
        return
Exemple #16
0
 def __init__(self, sp_client_id, sp_pwd, tenant_id, subscription_id):
     self.sp_client_id = sp_client_id
     self.sp_pwd = sp_pwd
     self.tenant_id = tenant_id
     self.subscription_id = subscription_id
     self.credentials = ClientSecretCredential(self.tenant_id, self.sp_client_id, self.sp_pwd)
Exemple #17
0
def main(mytimer: func.TimerRequest) -> None:
    logging.info('Build 060121-1704')
    utc_timestamp = datetime.datetime.utcnow().replace(
        tzinfo=datetime.timezone.utc).isoformat()

    if mytimer.past_due:
        logging.info('The timer is past due!')

    logging.info('Python timer trigger function ran at %s', utc_timestamp)

    # Verify the environment variables have been set
    #def verify_env_variables():
    logging.info("Verifying variables...")
    try:
        if 'CG_API_KEY' in os.environ:
            pass
        else:
            logging.info(
                f"ERROR : The CloudGuard API key has not been defined in environment variables"
            )
            sys.exit(0)
        if 'CG_API_SECRET' in os.environ:
            pass
        else:
            logging.info(
                f"ERROR : The CloudGuard API key secret not been defined in environment variables"
            )
            sys.exit(0)
        if 'AZURE_TENANT_ID' in os.environ:
            pass
        else:
            logging.info(
                f"ERROR : The Azure AD tenant ID has not been defined in environment variables"
            )
            sys.exit(0)
        if 'AZURE_CLIENT_ID' in os.environ:
            pass
        else:
            logging.info(
                f"ERROR : The Azure AD application ID has not been defined in environment variables"
            )
            sys.exit(0)
        if 'AZURE_CLIENT_SECRET' in os.environ:
            pass
        else:
            logging.info(
                f"ERROR : The Azure AD application secret key has not been defined in environment variables"
            )
            sys.exit(0)
    except:
        sys.exit(0)

#verify_env_variables()

# Set Azure AD credentials from the environment variables
    credentials = ClientSecretCredential(
        client_id=os.environ['AZURE_CLIENT_ID'],
        client_secret=os.environ['AZURE_CLIENT_SECRET'],
        tenant_id=os.environ['AZURE_TENANT_ID'])

    # Instantiate an instance of the Azure SDK Subscription Client
    sub_client = SubscriptionClient(credentials)

    # Read in required environment variables
    cg_api_key = os.environ['CG_API_KEY']
    cg_api_secret = os.environ['CG_API_SECRET']
    az_tenant = os.environ['AZURE_TENANT_ID']
    az_appid = os.environ['AZURE_CLIENT_ID']
    az_appkey = os.environ['AZURE_CLIENT_SECRET']

    # Set header parameters for CloudGuard HTTP POST
    headers = {
        'Content-Type': 'application/json',
        'Accept': 'application/json'
    }

    # Set the account mode for automatic CloudGuard onboarding - values are Read (default) or Manage
    cg_operation_mode = 'Read'

    # Run the subscription loop to add new or missing subscriptions to CloudGuard in read only mode
    #def list_subscriptions():
    try:
        for sub in sub_client.subscriptions.list():
            logging.info(
                f'Subscription found:, {sub.subscription_id}, {sub.display_name}'
            )
            payload = {
                'name': sub.display_name,
                'subscriptionId': sub.subscription_id,
                'tenantId': az_tenant,
                'credentials': {
                    'clientId': az_appid,
                    'clientPassword': az_appkey
                },
                'operationMode': cg_operation_mode,
                'vendor': 'azure'
            }
            r = requests.post('https://api.dome9.com/v2/AzureCloudAccount',
                              json=payload,
                              headers=headers,
                              auth=(cg_api_key, cg_api_secret))
            if r.status_code == 201:
                logging.info(
                    f'Subscription successfully added to CloudGuard: {sub.subscription_id}'
                )
            elif r.status_code == 400:
                logging.info(
                    f'There was an error with the subscription {sub.subscription_id}, please check credentials and that it does not already exist in CloudGuard'
                )
            elif r.status_code == 401:
                logging.info(
                    f'Bad credentials onboarding subscription to CloudGuard: {sub.subscription_id}'
                )
            else:
                logging.info(
                    f'Unknown error onboarding subscription to CloudGuard: {sub.subscription_id} Status Code: {r.status_code}'
                )
            logging.info(r.content)
        msg = "Operation complete"
        return msg
    except CloudError as e:
        logging.info(e)
# limitation of liability; and (iv) to indemnify, hold harmless, and defend Microsoft, its affiliates and
# suppliers from and against any third party claims or lawsuits, including attorneys’ fees, that arise or result
# from the use or distribution of the sample code.

# The below sample connects to ADLS Gen2 Account via a Service Principle and then perform listing of file systems

import os
from azure.storage.filedatalake import DataLakeServiceClient
from azure.identity import ClientSecretCredential

AZURE_CLIENT_ID = "XXXXXXXXXXXXXXXXXXXXXXXXXX"
AZURE_TENANT_ID = "XXXXXXXXXXXXXXXXXXXXXXXXXX"
AZURE_CLIENT_SECRET = "XXXXXXXXXXXXXXXXXXXXXXXXXX"
AZURE_STORAGE_ACCOUNT_NAME = 'XXXXXXXXXXXXXXXXXXXXXXXXXX'

configcredentials = ClientSecretCredential(
    client_id=AZURE_CLIENT_ID,
    tenant_id=AZURE_TENANT_ID,
    client_secret=AZURE_CLIENT_SECRET)

# Construct the DatalakeServiceClient
service_client = DataLakeServiceClient(account_url="{}://{}.dfs.core.windows.net".format(
        "https",
        AZURE_STORAGE_ACCOUNT_NAME),
        credential=configcredentials)

# Performing Listing
file_systems = service_client.list_file_systems()
for file_system in file_systems:
    print(file_system.name)
Exemple #19
0
An example to show authentication using aad credentials
"""

import os
from azure.eventhub import EventHubClient
from azure.eventhub import EventData
from azure.identity import ClientSecretCredential

HOSTNAME = os.environ[
    'EVENT_HUB_HOSTNAME']  # <mynamespace>.servicebus.windows.net
EVENT_HUB = os.environ['EVENT_HUB_NAME']

USER = os.environ['EVENT_HUB_SAS_POLICY']
KEY = os.environ['EVENT_HUB_SAS_KEY']

CLIENT_ID = os.environ.get('AAD_CLIENT_ID')
SECRET = os.environ.get('AAD_SECRET')
TENANT_ID = os.environ.get('AAD_TENANT_ID')

credential = ClientSecretCredential(client_id=CLIENT_ID,
                                    client_secret=SECRET,
                                    tenant_id=TENANT_ID)
client = EventHubClient(host=HOSTNAME,
                        event_hub_path=EVENT_HUB,
                        credential=credential)

producer = client.create_producer(partition_id='0')
with producer:
    event = EventData(body='A single message')
    producer.send(event)
Exemple #20
0
import requests.exceptions

if(len(sys.argv) == 2):
    config = json.load(open(sys.argv[1]))
else:
    config = json.load(open("parameters.local.json"))

tenant_id = config["tenant_id"]
client_id = config["client_id"]
client_secret = config["client_secret"]
scopes = config["scope"]
sample_user = config["sample_user"]

credential = ClientSecretCredential(
    tenant_id = tenant_id,
    client_id = client_id,
    client_secret = client_secret
)

graph_session = GraphSession(credential, scopes)

users = graph_session.get('/users')
print(json.dumps(users.json(), indent=2))

single_user = graph_session.get('/users/' + sample_user)
print(json.dumps(single_user.json(), indent=2))

body = {
    'message': {
        'subject': 'Python SDK Meet for lunch?',
        'body': {
SCHEMA_GROUP='default'
SCHEMA_STRING = """
{"namespace": "example.avro",
 "type": "record",
 "name": "User",
 "fields": [
     {"name": "name", "type": "string"},
     {"name": "favorite_number",  "type": ["int", "null"]},
     {"name": "favorite_color", "type": ["string", "null"]}
 ]
}"""


token_credential = ClientSecretCredential(
    tenant_id=TENANT_ID,
    client_id=CLIENT_ID,
    client_secret=CLIENT_SECRET
)

# For Managed Identity
# token_credential = DefaultAzureCredential()


def serialize(serializer):
    dict_data_ben = {"name": u"Ben", "favorite_number": 7, "favorite_color": u"red"}
    dict_data_alice = {"name": u"Alice", "favorite_number": 15, "favorite_color": u"green"}

    # Schema would be automatically registered into Schema Registry and cached locally.
    payload_ben = serializer.serialize(dict_data_ben, SCHEMA_STRING)
    # The second call won't trigger a service call.
    payload_alice = serializer.serialize(dict_data_alice, SCHEMA_STRING)
Exemple #22
0
    def get_conn(self) -> BlobServiceClient:
        """Return the BlobServiceClient object."""
        conn = self.get_connection(self.conn_id)
        extra = conn.extra_dejson or {}

        if self.public_read:
            # Here we use anonymous public read
            # more info
            # https://docs.microsoft.com/en-us/azure/storage/blobs/storage-manage-access-to-resources
            return BlobServiceClient(account_url=conn.host, **extra)

        connection_string = extra.pop(
            'connection_string',
            extra.pop('extra__wasb__connection_string', None))
        if connection_string:
            # connection_string auth takes priority
            return BlobServiceClient.from_connection_string(
                connection_string, **extra)

        shared_access_key = extra.pop(
            'shared_access_key',
            extra.pop('extra__wasb__shared_access_key', None))
        if shared_access_key:
            # using shared access key
            return BlobServiceClient(account_url=conn.host,
                                     credential=shared_access_key,
                                     **extra)

        tenant = extra.pop('tenant_id',
                           extra.pop('extra__wasb__tenant_id', None))
        if tenant:
            # use Active Directory auth
            app_id = conn.login
            app_secret = conn.password
            token_credential = ClientSecretCredential(tenant, app_id,
                                                      app_secret)
            return BlobServiceClient(account_url=conn.host,
                                     credential=token_credential,
                                     **extra)

        sas_token = extra.pop('sas_token',
                              extra.pop('extra__wasb__sas_token', None))
        if sas_token:
            if sas_token.startswith('https'):
                return BlobServiceClient(account_url=sas_token, **extra)
            else:
                return BlobServiceClient(
                    account_url=
                    f'https://{conn.login}.blob.core.windows.net/{sas_token}',
                    **extra)

        # Fall back to old auth (password) or use managed identity if not provided.
        credential = conn.password
        if not credential:
            credential = DefaultAzureCredential()
            self.log.info("Using DefaultAzureCredential as credential")
        return BlobServiceClient(
            account_url=f"https://{conn.login}.blob.core.windows.net/",
            credential=credential,
            **extra,
        )
Exemple #23
0
            pass
        else:
            print(
                "ERROR : The Azure AD application secret key has not been defined in environment variables"
            )
            sys.exit(0)
    except:
        sys.exit(0)


verify_env_variables()

# Set Azure AD credentials from the environment variables

credentials = ClientSecretCredential(
    client_id=os.environ['AZURE_CLIENT_ID'],
    client_secret=os.environ['AZURE_CLIENT_SECRET'],
    tenant_id=os.environ['AZURE_TENANT_ID'])

# Read in required environment variables
az_tenant = os.environ['AZURE_TENANT_ID']
az_appid = os.environ['AZURE_CLIENT_ID']
az_appkey = os.environ['AZURE_CLIENT_SECRET']

# INSTANTIATE SDK CLIENT INSTANCES
sub_client = SubscriptionClient(credentials)


# Connect to each subscription in turn and list all VMs, Functions and Azure SQL servers, collecting CloudGuard billable asset counts
def run_sizer():
    total_number_sql_servers = 0
    total_number_vms = 0
Exemple #24
0
import os    
from azure.storage.blob import BlockBlobService
from azure.keyvault.secrets import SecretClient
from azure.identity import ClientSecretCredential


CLIENT_ID = '4b53bd8b-c698-479c-a9bb-464bcaf10ba0'
CLIENT_SECRET = 'QT65-xIYJv4_kLxDRN8B5Ow4E1k0-64~mG'
TENANT_ID = 'f25493ae-1c98-41d7-8a33-0be75f5fe603'
VAULT_NAME = 'smleurope-dev-weu-rg'
VAULT_URI = 'https://smleurope-dev-weu-kv.vault.azure.net/'

credential = ClientSecretCredential(TENANT_ID, CLIENT_ID, CLIENT_SECRET)
client = SecretClient(vault_url=VAULT_URI, credential=credential)

#client.get_secret('github-da')


STORAGE_ACCOUNT = 'smleuropedevweusa'
STORAGE_KEY = 'x3dMwsiGRfceIPoFjej78WbWculgS01LJm+NSbfpu6WLUNdeld7dBPb3O5xQhaeTy3EF+0xJ3jHAFLpoyH2T/Q=='
CONTAINER = 'excel-files'

os.environ['http_proxy']="http://httppxgot.srv.volvo.com:8080"
os.environ['https_proxy']="https://httppxgot.srv.volvo.com:8080"
    
blob_service = BlockBlobService(STORAGE_ACCOUNT, STORAGE_KEY)


def get_file(filename):
    
    blob_service.get_blob_to_path(CONTAINER, filename, filename)
def run_spark_job(spark_args):
    """
    :param spark_args:
    :type spark_args:
    :return:
    :rtype:
    """
    storage_account_name = spark_args.storage_account_name
    input_container = spark_args.input_container_name
    output_container = spark_args.output_container_name
    input_folder = spark_args.input_folder_path
    output_folder = spark_args.output_folder_path
    application_id = spark_args.application_id
    directory_id = spark_args.directory_id
    adb_secret_scope = spark_args.adb_secret_scope_name
    adb_sp_client_key_secret_name = spark_args.adb_sp_client_key_secret_name

    if storage_account_name is None:
        ValueError("Missing storage_account_name parameter!")
    if output_container is None:
        ValueError("Missing output_container_name parameter!")
    if output_folder is None:
        ValueError("Missing output_folder_path parameter!")
    if application_id is None:
        ValueError("Missing application_id parameter!")
    if directory_id is None:
        ValueError("Missing directory_id parameter!")
    if adb_secret_scope is None:
        ValueError("Missing adb_secret_scope_name parameter!")
    if adb_sp_client_key_secret_name is None:
        ValueError("Missing adb_sp_client_key_secret_name parameter!")

    client_secret = SERVICE_PRINCIPAL_SECRET if SERVICE_PRINCIPAL_SECRET is not None else \
        dbutils.secrets.get(scope=adb_secret_scope, key=adb_sp_client_key_secret_name)

    # conf  = SparkConf().setAppName("CalendarProcesor").setMaster("local")
    spark = SparkSession.builder.master("local").getOrCreate()
    # spark = SparkSession.builder.config(conf).getOrCreate()
    # spark.conf.set("spark.app.name", "CalendarProcesor")
    spark.sparkContext.setLogLevel("ERROR")

    spark.conf.set(
        f"fs.azure.account.auth.type.{storage_account_name}.dfs.core.windows.net",
        "OAuth")
    spark.conf.set(
        f"fs.azure.account.oauth.provider.type.{storage_account_name}.dfs.core.windows.net",
        "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider")
    spark.conf.set(
        f"fs.azure.account.oauth2.client.id.{storage_account_name}.dfs.core.windows.net",
        application_id)
    spark.conf.set(
        f"fs.azure.account.oauth2.client.secret.{storage_account_name}.dfs.core.windows.net",
        client_secret)
    spark.conf.set(
        f"fs.azure.account.oauth2.client.endpoint.{storage_account_name}.dfs.core.windows.net",
        f"https://login.microsoftonline.com/{directory_id}/oauth2/token")

    spark.sparkContext.setJobGroup("Running calendar processing",
                                   f"[calendar_information_extractor]")

    logger.info(
        "[calendar_information_extractor] Preparing the jobs for analyzing the calendar data"
    )
    container_client_credential = ClientSecretCredential(
        tenant_id=directory_id,
        client_id=application_id,
        client_secret=client_secret)

    blob_service_client = BlobServiceClient(
        account_url=f"https://{storage_account_name}.blob.core.windows.net",
        credential=container_client_credential)

    container_client = blob_service_client.get_container_client(
        input_container)

    if input_container is not None and input_folder is not None:

        input_folder_name = input_folder
        list_of_json_files_from_folder = []
        if input_folder_name.startswith("/"):
            input_folder_name = input_folder_name[1:]
        # next will determine the latest folder to be used
        for entry in container_client.list_blobs(
                name_starts_with=input_folder_name + "/"):
            list_of_json_files_from_folder.append(entry.name)

        collected_folders = []
        for entry in list_of_json_files_from_folder:
            if "events_" not in entry: continue
            tokens = entry.split("/")
            standard_folder_name = tokens[1]
            try:
                tag = int(standard_folder_name.replace("events_", ""))
                collected_folders.append(tag)
            except Exception as e:
                pass
        if not len(collected_folders):
            raise Exception("Could not retrieve the latest run")
        collected_folders.sort(reverse=True)
        input_folder_name = input_folder_name + "/" + "events_" + str(
            collected_folders[0])

        wasb_file_path = f"abfss://{input_container}@{storage_account_name}.dfs.core.windows.net/{input_folder_name}"
        logger.info(
            f"[calendar_information_extractor] input wasb_file_path: {wasb_file_path}"
        )

        input_df = spark.read.json(wasb_file_path)

        extract_essential_calendar_info_and_write_to_output(
            input_df=input_df,
            output_folder=output_folder,
            output_container=output_container,
            storage_account_name=storage_account_name,
            container_client=container_client)
    else:
        raise ValueError(
            "Not enough arguments given in order to read input data: jdbc-database & input-container are missing."
        )
Exemple #26
0
def run_spark_job(args):
    storage_account_name = args.storage_account_name
    input_container = args.input_container_name
    output_container = args.output_container_name
    input_folder = args.input_folder_path
    output_folder = args.output_folder_path
    application_id = args.application_id
    directory_id = args.directory_id
    adbSecretScope = args.adb_secret_scope_name
    adbSPClientKeySecretName = args.adb_sp_client_key_secret_name
    database = args.jdbc_database
    jdbcHost = args.jdbc_host
    jdbcPort = args.jdbc_port
    jdbc_username_key_name = args.jdbc_username_key_name
    jdbc_password_key_name = args.jdbc_password_key_name
    use_msi_azure_sql_auth = args.use_msi_azure_sql_auth

    if storage_account_name is None:
        ValueError("Missing storage_account_name parameter!")
    if output_container is None:
        ValueError("Missing output_container_name parameter!")
    if output_folder is None:
        ValueError("Missing output_folder_path parameter!")
    if application_id is None:
        ValueError("Missing application_id parameter!")
    if directory_id is None:
        ValueError("Missing directory_id parameter!")
    if adbSecretScope is None:
        ValueError("Missing adb_secret_scope_name parameter!")
    if adbSPClientKeySecretName is None:
        ValueError("Missing adb_sp_client_key_secret_name parameter!")

    client_secret = SERVICE_PRINCIPAL_SECRET if SERVICE_PRINCIPAL_SECRET is not None else dbutils.secrets.get(scope=adbSecretScope, key=adbSPClientKeySecretName)

    spark = SparkSession.builder.master("local").getOrCreate()
    spark.sparkContext.setLogLevel("ERROR")

    spark.conf.set(f"fs.azure.account.auth.type.{storage_account_name}.dfs.core.windows.net", "OAuth")
    spark.conf.set(f"fs.azure.account.oauth.provider.type.{storage_account_name}.dfs.core.windows.net",
                   "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider")
    spark.conf.set(f"fs.azure.account.oauth2.client.id.{storage_account_name}.dfs.core.windows.net", application_id)
    spark.conf.set(f"fs.azure.account.oauth2.client.secret.{storage_account_name}.dfs.core.windows.net", client_secret)
    spark.conf.set(f"fs.azure.account.oauth2.client.endpoint.{storage_account_name}.dfs.core.windows.net",
                   f"https://login.microsoftonline.com/{directory_id}/oauth2/token")

    spark.sparkContext.setJobGroup("Running mail processing", f"[employee_profile_processor]")

    logger.info("[employee_profile_processor] Preparing the jobs for data lemmatization and augmentation")
    container_client_credential = ClientSecretCredential(tenant_id=directory_id, client_id=application_id,
                                                         client_secret=client_secret)
    blob_service_client = BlobServiceClient(account_url=f"https://{storage_account_name}.blob.core.windows.net",
                                            credential=container_client_credential)
    containerClient = blob_service_client.get_container_client(output_container)

    index = 0

    print(f"Database: ", database)
    print(f"Host: ", jdbcHost)
    print(f"Port: ", jdbcPort)

    employees_res_df = None

    if database is not None and jdbcHost is not None and jdbcPort is not None:

        table = f"""
            (select ep.id, ep.mail, ep.display_name , ep.about_me , ep.job_title , ep.company_name , ep.department , ep.office_location , ep.city , ep.state , ep.country, e_skills.skills, e_responsibilities.responsibilities from {database}.dbo.employee_profile ep
            left join (select cs.employee_profile_id, cs.employee_profile_version, string_agg(cs.skill, ',') as skills from {database}.dbo.employee_skills cs group by cs.employee_profile_id, cs.employee_profile_version ) as e_skills on ep.id=e_skills.employee_profile_id and ep.version=e_skills.employee_profile_version 
            left join (select er.employee_profile_id, er.employee_profile_version, string_agg(er.responsibility, ',') as responsibilities from {database}.dbo.employee_responsibilities er group by er.employee_profile_id, er.employee_profile_version) as e_responsibilities on ep.id=e_responsibilities.employee_profile_id and ep.version=e_responsibilities.employee_profile_version 
            where ep.version=(select CONVERT(datetime2, JSON_VALUE(configs, '$.date')) from configurations c where c.[type] = 'LatestVersionOfEmployeeProfile')
            ) foo
            """

        input_df = generate_dataframe_from_table(spark, args, table)

        employees_res_df = enrich_user_profiles(input_df=input_df,
                                                output_folder=output_folder,
                                                containerClient=containerClient)

    elif input_container is not None and input_folder is not None:

        input_folder_name = input_folder
        list_of_json_files_from_folder = []
        if input_folder_name.startswith("/"):
            input_folder_name = input_folder_name[1:]

        for entry in containerClient.list_blobs():
            if entry.name.startswith(input_folder_name + "/") and entry.size and entry.name.lower().endswith(
                    "json") > 0:
                list_of_json_files_from_folder.append(entry.name)

        last_output_full_path = ""

        for index, json_file_to_process in enumerate(list_of_json_files_from_folder):
            logger.info(f"[employee_profile_processor] processing: {json_file_to_process}")
            wasb_file_path = f"abfss://{input_container}@{storage_account_name}.dfs.core.windows.net/{json_file_to_process}"
            logger.info(f"[employee_profile_processor] input wasb_file_path: {wasb_file_path}")

            input_df = spark.read.json(wasb_file_path)

            result_df = enrich_user_profiles(input_df=input_df,
                                             output_folder=output_folder,
                                             containerClient=containerClient)

            if employees_res_df is None:
                employees_res_df = result_df
            else:
                employees_res_df = employees_res_df.union(result_df)

    else:
        raise ValueError(
            "Not enough arguments given in order to read input data: jdbc-database & input-container are missing.")

    hr_df = get_hr_df(spark, args)

    employees_res_df = employees_res_df.join(hr_df, on=['mail'], how='left')

    out_file_name = f"out_{str(index).zfill(4)}" + str(datetime.now().strftime("%Y_%m_%d_%H_%M"))
    out_file_full_path = os.path.join(output_folder, out_file_name)

    wasb_output_file_path = f"abfss://{output_container}@{storage_account_name}.dfs.core.windows.net/{out_file_full_path}"
    logger.info(f"[employee_profile_processor] output wasb_file_path: {wasb_output_file_path}")

    employees_res_df.write.mode("overwrite").json(wasb_output_file_path)

    list_of_files_to_clean = []
    for entry in containerClient.list_blobs(name_starts_with=out_file_full_path + "/"):
        if entry.name.lower().endswith("json") == False or entry.size == 0:
            logger.debug("detected file to delete: " + str(entry.name))
            list_of_files_to_clean.append(entry.name)

    for file_to_del in list_of_files_to_clean:
        blobclient = containerClient.get_blob_client(blob=file_to_del)
        logger.debug(f"Delete {file_to_del}")
        blobclient.delete_blob()

        last_output_full_path = out_file_full_path
Exemple #27
0
#!/usr/bin/env python3
import os
import logging
from azure.identity import ClientSecretCredential
from azure.mgmt.resource import ResourceManagementClient
from azure.mgmt.subscription import SubscriptionClient
from azure.mgmt.compute import ComputeManagementClient

client_id = os.environ["ARM_CLIENT_ID"]
secret = os.environ["ARM_CLIENT_SECRET"]
tenant = os.environ["TENANT_ID"]

CREDENTIALS = ClientSecretCredential(
    client_id=client_id,
    client_secret=secret,
    tenant_id=tenant,
)


def list_subscriptions():
    client = SubscriptionClient(CREDENTIALS)
    # ignore disabled subscriptions
    subs = [
        sub.subscription_id for sub in client.subscriptions.list()
        if sub.state == "Enabled"
    ]

    return subs


def list_resource_groups():
 def get_access_token(tenant_id, client_id, client_secret, scope):
     credentials = ClientSecretCredential(tenant_id=tenant_id,
                                          client_id=client_id,
                                          client_secret=client_secret)
     return credentials.get_token(scope)
#!/usr/bin/env python

from azure.identity import ClientSecretCredential
from azure.mgmt.compute import ComputeManagementClient
from relay_sdk import Interface, Dynamic as D
import logging

logging.basicConfig(level=logging.WARNING)

relay = Interface()

credentials = ClientSecretCredential(
    client_id=relay.get(D.azure.connection.clientID),
    client_secret=relay.get(D.azure.connection.secret),
    tenant_id=relay.get(D.azure.connection.tenantID))
subscription_id = relay.get(D.azure.connection.subscriptionID)
compute_client = ComputeManagementClient(credentials, subscription_id)

# Getting resource ids & options
resource_ids = None
wait = False

try:
    resource_ids = relay.get(D.resourceIDs)
except:
    print('No Resource IDs found. Exiting.')
    exit

try:
    wait = relay.get(D.waitForDeletion)
except:
Exemple #30
0
auth = taskcluster.Auth(taskclusterOptions)
queue = taskcluster.Queue(taskclusterOptions)
index = taskcluster.Index(taskclusterOptions)
secrets = taskcluster.Secrets(taskclusterOptions)

secret = secrets.get('project/relops/image-builder/dev')['secret']

platformClient = {
    'azure': ComputeManagementClient(
        #ServicePrincipalCredentials(
        #    client_id = secret['azure']['id'],
        #    secret = secret['azure']['key'],
        #    tenant = secret['azure']['account']),
        ClientSecretCredential(
            tenant_id=secret['azure']['account'],
            client_id=secret['azure']['id'],
            client_secret=secret['azure']['key']),
        secret['azure']['subscription'])
}

if runEnvironment == 'travis':
    commitSha = os.getenv('TRAVIS_COMMIT')
    taskGroupId = slugid.nice()
    createTask(
        queue = queue,
        taskId = taskGroupId,
        taskName = '00 :: task group placeholder',
        taskDescription = 'this task only serves as a task grouping when triggered from travis. it does no actual work',
        provisioner = 'relops-3',
        workerType = 'win2019',
        commands = [ 'echo "task: {}, sha: {}"'.format(taskGroupId, commitSha) ])