def create_connection_from_config(): """ Creates a new Azure api connection """ resource_client = None compute_client = None network_client = None try: os.environ['AZURE_AUTH_LOCATION'] except KeyError: try: subscription_id = os.environ['AZURE_SUBSCRIPTION_ID'] credentials = ServicePrincipalCredentials( client_id=os.environ['AZURE_CLIENT_ID'], secret=os.environ['AZURE_CLIENT_SECRET'], tenant=os.environ['AZURE_TENANT_ID'] ) except KeyError: sys.exit("No Azure Connection Defined") else: resource_client = ResourceManagementClient(credentials, subscription_id) compute_client = ComputeManagementClient(credentials, subscription_id) network_client = NetworkManagementClient(credentials, subscription_id) else: resource_client = get_client_from_auth_file(ResourceManagementClient) compute_client = get_client_from_auth_file(ComputeManagementClient) network_client = get_client_from_auth_file(NetworkManagementClient) return resource_client, compute_client, network_client
def get_conn(self): conn = self.get_connection(self.conn_id) key_path = conn.extra_dejson.get('key_path', False) if key_path: if key_path.endswith('.json'): self.log.info('Getting connection using a JSON key file.') return get_client_from_auth_file(ContainerInstanceManagementClient, key_path) else: raise AirflowException('Unrecognised extension for key file.') if os.environ.get('AZURE_AUTH_LOCATION'): key_path = os.environ.get('AZURE_AUTH_LOCATION') if key_path.endswith('.json'): self.log.info('Getting connection using a JSON key file.') return get_client_from_auth_file(ContainerInstanceManagementClient, key_path) else: raise AirflowException('Unrecognised extension for key file.') credentials = ServicePrincipalCredentials( client_id=conn.login, secret=conn.password, tenant=conn.extra_dejson['tenantId'] ) subscription_id = conn.extra_dejson['subscriptionId'] return ContainerInstanceManagementClient(credentials, str(subscription_id))
def __init__(self, options): self.region = options["azureregion"] self.vmPassword = options["attackinstancepassword"] self.myAvailabilitySetName = "av-" + str(uuid.uuid4()) self.myIpAddressName = "myip-" + str(uuid.uuid4()) self.myVmNetName = "myvmnet-" + str(uuid.uuid4()) self.mySubetName = "mysubnet-" + str(uuid.uuid4()) self.myIpConfig = "myipconfig-" + str(uuid.uuid4()) self.myNic = "mynic-" + str(uuid.uuid4()) self.vmName = "vm-" + str(uuid.uuid4()) self.resourceId = '' self.controlledSnapshot = None self.vm = None self.diskId = '' self.victimComputeClient = get_client_from_auth_file( ComputeManagementClient, Path(options['victimauthfile']).absolute()) self.victimResourceClient = get_client_from_auth_file( ResourceManagementClient, Path(options['victimauthfile']).absolute()) self.attackerComputeClient = get_client_from_auth_file( ComputeManagementClient, Path(options['attackerauthfile']).absolute()) self.attackerStorageClient = get_client_from_auth_file( StorageManagementClient, Path(options['attackerauthfile']).absolute()) self.network_client = get_client_from_auth_file( NetworkManagementClient, Path(options['attackerauthfile']).absolute()) self.clientContext = self.victimComputeClient
def __init__(self, resource_group_name): auth_file_path = os.getenv('AZURE_AUTH_LOCATION', None) if auth_file_path is not None: cloudhunky_logger.info("Authenticating with Azure using credentials in file at {0}" .format(auth_file_path)) self.aci_client = get_client_from_auth_file( ContainerInstanceManagementClient) res_client = get_client_from_auth_file(ResourceManagementClient) self.resource_group = res_client.resource_groups.get(resource_group_name) else: cloudhunky_logger.warning("\nFailed to authenticate to Azure. Have you set the" " AZURE_AUTH_LOCATION environment variable?\n")
def __init__(self, resource_group_name='virriusResourceGroup', container_image_name="virrius/worker:v1", auth_file_path="my.azureauth"): self.resource_group_name = resource_group_name self.container_image_name = container_image_name self.auth_file_path = os.path.join(os.path.dirname(__file__), auth_file_path) self.aciclient = get_client_from_auth_file( ContainerInstanceManagementClient, self.auth_file_path) self.resclient = get_client_from_auth_file(ResourceManagementClient, self.auth_file_path) self.resource_group = self.resclient.resource_groups.get( resource_group_name)
def get_conn(self): """ Return a HDInsight client. This hook requires a service principal in order to work. After creating this service principal (Azure Active Directory/App Registrations), you need to fill in the client_id (Application ID) as login, the generated password as password, and tenantId and subscriptionId in the extra's field as a json. References https://docs.microsoft.com/en-us/azure/active-directory/develop/howto-create-service-principal-portal https://docs.microsoft.com/en-us/python/api/overview/azure/key-vault?toc=%2Fpython%2Fazure%2FTOC.json&view=azure-python :return: HDInsight manage client :rtype: HDInsightManagementClient """ conn = self.get_connection(self.conn_id) extra_options = conn.extra_dejson key_path = extra_options.get('key_path', False) self.resource_group_name = str( extra_options.get("resource_group_name")) self.resource_group_location = str( extra_options.get("resource_group_location")) if key_path: if key_path.endswith('.json'): self.log.info('Getting connection using a JSON key file.') return get_client_from_auth_file(HDInsightManagementClient, key_path) else: raise AirflowException('Unrecognised extension for key file.') if os.environ.get('AZURE_AUTH_LOCATION'): key_path = os.environ.get('AZURE_AUTH_LOCATION') if key_path.endswith('.json'): self.log.info('Getting connection using a JSON key file.') return get_client_from_auth_file(HDInsightManagementClient, key_path) else: raise AirflowException('Unrecognised extension for key file.') credentials = ServicePrincipalCredentials( client_id=conn.login, secret=conn.password, tenant=conn.extra_dejson['tenantId']) subscription_id = conn.extra_dejson['subscriptionId'] return HDInsightManagementClient(credentials, str(subscription_id))
def get_conn(self): """ :return: """ if self._adf_client: return self._adf_client key_path = os.environ.get('AZURE_AUTH_LOCATION', False) if not key_path: conn = self.get_connection(self.conn_id) key_path = conn.extra_dejson.get('key_path', False) if key_path: self.log.info('Getting connection using a JSON key file.') self._adf_client = get_client_from_auth_file( DataFactoryManagementClient, key_path) return self._adf_client self.log.info('Getting connection using a service principal.') credentials = ServicePrincipalCredentials( client_id=conn.login, secret=conn.password, tenant=conn.extra_dejson['tenantId']) self._adf_client = DataFactoryManagementClient( credentials, conn.extra_dejson['subscriptionId']) return self._adf_client
def get_conn(self) -> Any: """ Authenticates the resource using the connection id passed during init. :return: the authenticated client. """ conn = self.get_connection(self.conn_id) tenant = conn.extra_dejson.get('extra__azure__tenantId') or conn.extra_dejson.get('tenantId') subscription_id = conn.extra_dejson.get('extra__azure__subscriptionId') or conn.extra_dejson.get( 'subscriptionId' ) key_path = conn.extra_dejson.get('key_path') if key_path: if not key_path.endswith('.json'): raise AirflowException('Unrecognised extension for key file.') self.log.info('Getting connection using a JSON key file.') return get_client_from_auth_file(client_class=self.sdk_client, auth_path=key_path) key_json = conn.extra_dejson.get('key_json') if key_json: self.log.info('Getting connection using a JSON config.') return get_client_from_json_dict(client_class=self.sdk_client, config_dict=key_json) self.log.info('Getting connection using specific credentials and subscription_id.') return self.sdk_client( credentials=ServicePrincipalCredentials( client_id=conn.login, secret=conn.password, tenant=tenant ), subscription_id=subscription_id, )
def _get_aci_client(): """ Retrieves a `ContainerInstanceManagementClient` """ global _ACI_CLIENT if not _ACI_CLIENT: _ACI_CLIENT = get_client_from_auth_file( ContainerInstanceManagementClient) return _ACI_CLIENT
def is_configured(): """Return if Azure account is configured.""" try: client = get_client_from_auth_file(ResourceManagementClient) for item in client.resource_groups.list(): break except: return False return True
def set_client(self, service): self.service = service try: self.client = get_client_from_auth_file(service) except (FileNotFoundError, KeyError): print( '[!] Credentials file not found. Set path in "AZURE_AUTH_LOCATION" environment variable' ) sys.exit(0)
def test_azure_config(): auth_file_path = os.getenv('AZURE_AUTH_LOCATION', None) if auth_file_path is not None: print("Authenticating with Azure using credentials in file at {0}". format(auth_file_path)) aci_client = get_client_from_auth_file( ContainerInstanceManagementClient) else: print("\nFailed to authenticate to Azure. Have you set the" " AZURE_AUTH_LOCATION environment variable?\n") assert aci_client is not None
def __init__(self): os.environ['AZURE_AUTH_LOCATION'] = '/root/azure_auth.json' self.compute_client = get_client_from_auth_file( ComputeManagementClient) self.resource_client = get_client_from_auth_file( ResourceManagementClient) self.network_client = get_client_from_auth_file( NetworkManagementClient) self.storage_client = get_client_from_auth_file( StorageManagementClient) self.datalake_client = get_client_from_auth_file( DataLakeStoreAccountManagementClient) #self.authorization_client = get_client_from_auth_file(AuthorizationManagementClient) self.sp_creds = json.loads( open(os.environ['AZURE_AUTH_LOCATION']).read()) self.dl_filesystem_creds = lib.auth( tenant_id=json.dumps(self.sp_creds['tenantId']).replace('"', ''), client_secret=json.dumps(self.sp_creds['clientSecret']).replace( '"', ''), client_id=json.dumps(self.sp_creds['clientId']).replace('"', ''), resource='https://datalake.azure.net/')
def return_session(self, classname, method="authfile"): try: if method == "client": session = get_client_from_cli_profile(classname) elif method == "authfile": session = get_client_from_auth_file(classname, auth_path=self.authpath) except: logging.error( "Cannot get a session (class: {}, method: {})".format( classname, method)) return False return session
def get_all_fleets(c): """Get all Spot Fleet requests.""" if c is None: c = get_client_from_auth_file(ComputeManagementClient) fleets = [] next_token = None while True: if next_token is None: resp = c.describe_spot_fleet_requests() else: resp = c.describe_spot_fleet_requests(NextToken=next_token) fleets.extend(resp['SpotFleetRequestConfigs']) next_token = resp.get('NextToken', None) if next_token is None: break return fleets
def graceful_shutdown(as_group, spot_fleet, id, logger=None): """Gracefully shutdown supervisord, detach from AutoScale group or spot fleet, and shutdown.""" # stop docker containers try: logging.info("Stopping all docker containers.") os.system("/usr/bin/docker stop --time=30 $(/usr/bin/docker ps -aq)") except: pass # shutdown supervisord try: logging.info("Stopping supervisord.") call(["/usr/bin/sudo", "/usr/bin/systemctl", "stop", "supervisord"]) except: pass # let supervisord shutdown its processes time.sleep(60) # detach and die logging.info("Committing seppuku.") # detach if part of a spot fleet or autoscaling group try: if as_group is not None: c = get_client_from_auth_file(ComputeManagementClient) detach_instance(c, as_group, id) if spot_fleet is not None: c = get_client_from_auth_file(ComputeManagementClient) decrement_fleet(c, spot_fleet) except Exception, e: logging.error("Got exception in graceful_shutdown(): %s\n%s" % (str(e), traceback.format_exc()))
def upload_azure_blob(): try: from azure.mgmt.storage import StorageManagementClient from azure.storage.blob import BlockBlobService from azure.common.client_factory import get_client_from_auth_file storage_client = get_client_from_auth_file(StorageManagementClient) resource_group_name = '' for i in storage_client.storage_accounts.list(): if args.storage.replace('container', 'storage') == str(i.tags.get('Name')): resource_group_name = str(i.tags.get('SBN')) secret_key = storage_client.storage_accounts.list_keys(resource_group_name, args.azure_storage_account).keys[0].value block_blob_service = BlockBlobService(account_name=args.azure_storage_account, account_key=secret_key) for f in dataset_file: block_blob_service.create_blob_from_path(args.storage, '{0}_dataset/{1}'.format(args.notebook, f), '/tmp/{0}'.format(f)) except Exception as err: print('Failed to upload test dataset to blob storage', str(err)) sys.exit(1)
def get_dns_client(self): """ Get a connection to the Azure DNS service """ if self.FILE_PATH is not None and self.FILE_PATH != "": from azure.common.client_factory import get_client_from_auth_file return get_client_from_auth_file(DnsManagementClient) elif self.KEY is not None and self.KEY != "": from azure.common.credentials import ServicePrincipalCredentials credentials = ServicePrincipalCredentials(client_id=self.CLIENT_ID, secret=self.KEY, tenant=self.TENANT_ID) dns_client = DnsManagementClient(credentials, self.SUBSCRIPTION_ID) return dns_client
def _get_management_client(self, client_class): """ Return instance of resource management client. """ try: client = get_client_from_auth_file( client_class, auth_path=self.service_account_file) except ValueError as error: raise AzureCloudException( 'Service account file format is invalid: {0}.'.format(error)) except KeyError as error: raise AzureCloudException( 'Service account file missing key: {0}.'.format(error)) except Exception as error: raise AzureCloudException( 'Unable to create resource management client: ' '{0}.'.format(error)) return client
def seppuku(logger=None): """Shutdown supervisord and the instance if it detects that it is currently part of an autoscale group.""" logging.info("Initiating seppuku.") # introduce random sleep meditation_time = randint(0, 600) logging.info("Meditating for %s seconds to avoid thundering herd." % meditation_time) time.sleep(meditation_time) # instances may be part of autoscaling group or spot fleet as_group = 'vmss' instance = None spot_fleet = None # check if instance part of an autoscale group id = str( requests.get( 'http://169.254.169.254/metadata/instance/compute/vmId?api-version=2017-08-01&format=text', headers={ "Metadata": "true" }).content) logging.info("Our instance id: %s" % id) c = get_client_from_auth_file(ComputeManagementClient) instances = c.virtual_machine_scale_set_vms.list('HySDS', as_group) for ins in instances: if id == ins.vm_id: instance = ins.instance_id id = instance # gracefully shutdown while True: try: graceful_shutdown(as_group, spot_fleet, id, logger) except Exception, e: logging.error("Got exception in graceful_shutdown(): %s\n%s" % (str(e), traceback.format_exc())) time.sleep(randint(0, 600))
def get_azure_api_client(cls, **kwargs): """ Returns an Azure API client Returns an API client class either from a service principal auth file (if specified by AZURE_AUTH_LOCATION environment variable, or from a CLI profile. This function is just a helper for AzureClient.get() Args: cls(class): The client class, for example azure.mgmt.resource.SubscriptionClient kwargs(dict): optional keyword arguments used by either get_client_from_auth_file() or get_client_from_cli_profile(), for example client_id, secret, tenant https://github.com/Azure/azure-sdk-for-python/blob/master/azure-common/azure/common/client_factory.py # noqa """ if os.environ.get("AZURE_AUTH_LOCATION"): return get_client_from_auth_file(cls, **kwargs) return get_client_from_cli_profile(cls, **kwargs)
def get_lcs(c=None): """List all launch configurations.""" """ the return is in VirtualMachineScaleSetSku object format and the resource group and scale group name is still hard coded """ if c is None: c = get_client_from_auth_file(ComputeManagementClient)
def get_images(c=None, **kargs): """List all AMIs.""" if c is None: c = get_client_from_auth_file(ComputeManagementClient) return c.images.list_by_resource_group('HySDS')
#!/usr/bin/env python3 # # Referenced example https://docs.microsoft.com/en-us/azure/virtual-machines/windows/python # from azure.common.client_factory import get_client_from_auth_file from azure.mgmt.network import NetworkManagementClient network_client = get_client_from_auth_file(NetworkManagementClient) GROUP_NAME = 'Mastering-Python-Networking' LOCATION = 'westus2' def create_subnet(network_client): subnet_params = {'address_prefix': '192.168.0.128/25'} creation_result = network_client.subnets.create_or_update( GROUP_NAME, 'WEST-US-2_VNet_1', 'WEST-US-2_VNet_1_Subnet_2', subnet_params) return creation_result.result() creation_result = create_subnet(network_client) print(creation_result)
def get_asgs(c=None): """List all Autoscaling groups.""" """ the return is in VirtualMachineScaleSet object format and the resource group name is still hard coded """ if c is None: c = get_client_from_auth_file(ComputeManagementClient)
from azure.mgmt.storage import StorageManagementClient from azure.common.client_factory import get_client_from_auth_file storage_client = get_client_from_auth_file(StorageManagementClient) for item in storage_client.storage_accounts.list(): foo = vars(item) encrypt_obj = foo['encryption'] print(encrypt_obj.services.file.enabled)
'vm_size': 'Standard_DS1_v2' }, 'network_profile': { 'network_interfaces': [{ 'id': '/subscriptions/a9e7f5b3-273a-4ebf-8ea5-81dec14515ee/resourceGroups/NetworkWatcherRG/providers/Microsoft.Network/networkInterfaces/test1ss372', }] }, } compute_vm = compute_client.virtual_machines.create_or_update( GROUP_NAME, VM_NAME, VM_PARAMETERS) return compute_vm.result() print('\nCreate Resource Group') resource_client = get_client_from_auth_file(ResourceManagementClient) compute_client = get_client_from_auth_file(ComputeManagementClient) resource_client.resource_groups.create_or_update(GROUP_NAME, {'location': LOCATION}) network_client = get_client_from_auth_file(NetworkManagementClient) nic = create_nic(network_client) async_vm_creation = create_vm(compute_client) # Tag the VM print('\nTag Virtual Machine') async_vm_update = compute_client.virtual_machines.create_or_update( GROUP_NAME, VM_NAME, { 'location': LOCATION, 'tags': { 'who-rocks': 'python', 'where': 'on azure' }
def __init__(self, resource_group, account_json=None): self.resource_group = resource_group self.dns_client = get_client_from_auth_file(DnsManagementClient, auth_path=account_json)
def __init__(self, region): self.region = region self.storage_client = get_client_from_auth_file( StorageManagementClient)
from azure.common.client_factory import get_client_from_auth_file from azure.mgmt.compute import ComputeManagementClient from azure.mgmt.network import NetworkManagementClient from azure.mgmt.resource import ResourceManagementClient from msrestazure.azure_exceptions import CloudError DEFAULT_GROUP_NAME = "Hestia" ZONES = ['westus', 'eastus', 'northeurope', 'westeurope', 'eastasia', 'southeastasia', 'northcentralus', 'southcentralus', 'centralus', 'eastus2', 'japaneast', 'japanwest', 'brazilsouth', 'australiaeast', 'australiasoutheast', 'centralindia', 'southindia', 'westindia', 'canadacentral', 'canadaeast', 'westcentralus', 'westus2', 'ukwest', 'uksouth', 'koreacentral', 'koreasouth', 'francecentral', 'australiacentral', 'southafricanorth', 'uaenorth', 'switzerlandnorth', 'germanywestcentral', 'norwayeast'] zones = ZONES[:2] compute_client = get_client_from_auth_file(ComputeManagementClient, auth_path=os.path.expanduser("~/keys/azure-credentials.json")) resource_client = get_client_from_auth_file(ResourceManagementClient, auth_path=os.path.expanduser("~/keys/azure-credentials.json")) network_client = get_client_from_auth_file(NetworkManagementClient, auth_path=os.path.expanduser("~/keys/azure-credentials.json")) def create_resource_group(group_name=DEFAULT_GROUP_NAME): try: res = resource_client.resource_groups.get(group_name) return res except CloudError as e: pass res = resource_client.resource_groups.create_or_update( group_name, {