def create_resource(self, name, **kwargs): if self.is_live: self.client = self.create_mgmt_client( azure.mgmt.batch.BatchManagementClient) group = self._get_resource_group(**kwargs) batch_account = models.BatchAccountCreateParameters( location=self.location, ) storage = self._get_storage_account(**kwargs) if storage: storage_resource = '/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}'.format( self.test_class_instance.settings.SUBSCRIPTION_ID, group.name, storage.name) batch_account.auto_storage = models.AutoStorageBaseProperties( storage_account_id=storage_resource) account_setup = self.client.batch_account.create( group.name, name, batch_account) self.resource = account_setup.result() keys = self.client.batch_account.get_keys(group.name, name) credentials = SharedKeyCredentials(keys.account_name, keys.primary) if storage: self._add_app_package(group.name, name) self.test_class_instance.scrubber.register_name_pair( name, self.resource_moniker) else: self.resource = FakeAccount( name=name, account_endpoint="https://{}.{}.batch.azure.com".format( name, self.location)) credentials = SharedKeyCredentials(name, 'ZmFrZV9hY29jdW50X2tleQ==') return { self.parameter_name: self.resource, self.creds_parameter: credentials }
def _configure_plugin(self): """Set up the the config file, authenticate the SDK clients and set up the log file. """ if not os.path.exists(self._data_dir): os.makedirs(self._data_dir) config_file = os.path.join(self._data_dir, self._ini_file) try: self._cfg.read(config_file) self._storage = storage.BlockBlobService( self._cfg.get('AzureBatch', 'storage_account'), self._cfg.get('AzureBatch', 'storage_key'), endpoint_suffix=self._cfg.get('AzureBatch', 'storage_suffix')) self._storage.MAX_SINGLE_PUT_SIZE = 2 * 1024 * 1024 credentials = SharedKeyCredentials( self._cfg.get('AzureBatch', 'batch_account'), self._cfg.get('AzureBatch', 'batch_key')) self._client = batch.BatchExtensionsClient( credentials, base_url=self._cfg.get('AzureBatch', 'batch_url'), storage_client=self._storage) self._client.threads = self._cfg.getint('AzureBatch', 'threads') self._client.config.add_user_agent(self._user_agent) self._log = self._configure_logging( self._cfg.get('AzureBatch', 'logging')) except Exception as exp: # We should only worry about this if it happens when authenticating # using the UI, otherwise it's expected. if self.ui: raise ValueError("Invalid Configuration: {}".format(exp)) else: # We'll need a place holder logger self._log = self._configure_logging(LOG_LEVELS['debug'])
def __init__(self, args): super().__init__(args) credentials: SharedKeyCredentials = SharedKeyCredentials( self.args.account_name, self.args.account_key) self._batch_client: BatchServiceClient = BatchServiceClient( credentials, batch_url=self.args.service_url) self._batch_client.config.retry_policy.retries = 1
def __init__(self, image=None, **kwargs): """ Args: image (azure.batch.models.ImageReference): The VM image to use for the pool nodes defaults to ```python azure.batch.models.ImageReference( publisher="microsoft-azure-batch", offer="ubuntu-server-container", sku="16-04-lts", version="latest", ) ``` **kwargs: Additinal arguments passed to :class:`super_barch.BatchConfig` """ self.image = image if image is not None else _IMAGE_REF self.config = BatchConfig(**kwargs) self.output_files = [] self.tasks = [] # -------------------------------------------------- # BLOB STORAGE CONFIGURATION: # -------------------------------------------------- # Create the blob client, for use in obtaining references to # blob storage containers and uploading files to containers. self.blob_client = BlobServiceClient.from_connection_string( self.config.STORAGE_ACCOUNT_CONNECTION_STRING) # Use the blob client to create the containers in Azure Storage if they # don't yet exist. self.container_client = self.blob_client.get_container_client( self.config.BLOB_CONTAINER_NAME) try: self.container_client.create_container() except ResourceExistsError: pass # -------------------------------------------------- # AZURE BATCH CONFIGURATION # -------------------------------------------------- # Create a Batch service client. We'll now be interacting with the Batch # service in addition to Storage self.batch_client = BatchServiceClient( SharedKeyCredentials(self.config.BATCH_ACCOUNT_NAME, self.config.BATCH_ACCOUNT_KEY), batch_url=self.config.BATCH_ACCOUNT_URL, )
def __init__(self, source_control: GithubService, storage: BlockBlobService): from azure.batch.batch_auth import SharedKeyCredentials batch_account = Setting.objects.get(name__exact='BATCH_ACCOUNT').value batch_account_key = Setting.objects.get( name__exact='BATCH_ACCOUNT_KEY').value batch_account_endpoint = Setting.objects.get( name__exact='BATCH_ACCOUNT_ENDPOINT').value self.client = BatchServiceClient( SharedKeyCredentials(batch_account, batch_account_key), batch_account_endpoint) self.logger = logging.getLogger(AzureBatchClient.__name__) self.source = source_control self.storage = storage
def _authenticate(cfg_path): global batch_client, storage_client cfg = ConfigParser.ConfigParser() try: cfg.read(cfg_path) credentials = SharedKeyCredentials( cfg.get("AzureBatch", "batch_account"), cfg.get("AzureBatch", "batch_key")) storage_client = storage.BlockBlobService( cfg.get("AzureBatch", "storage_account"), cfg.get("AzureBatch", "storage_key"), endpoint_suffix = cfg.get('AzureBatch', 'storage_suffix')) batch_client = batch.BatchExtensionsClient( credentials, base_url=cfg.get("AzureBatch", "batch_url"), storage_client=storage_client) try: batch_client.threads = cfg.get("AzureBatch", "threads") except ConfigParser.NoOptionError: batch_client.threads = 20 except (EnvironmentError, ConfigParser.NoOptionError, ConfigParser.NoSectionError) as exp: raise ValueError("Failed to authenticate.\n" "Using Maya configuration file: {}\n" "Error: {}".format(cfg_path, exp))
def __init__(self, image=IMAGE_REF, **kwargs): self.image = image self.config = BatchConfig(**kwargs) self.output_files = [] self.tasks = [] # -------------------------------------------------- # BLOB STORAGE CONFIGURATION: # -------------------------------------------------- # Create the blob client, for use in obtaining references to # blob storage containers and uploading files to containers. self.blob_client = BlobServiceClient.from_connection_string( self.config.STORAGE_ACCOUNT_CONNECTION_STRING) # Use the blob client to create the containers in Azure Storage if they # don't yet exist. self.container_client = self.blob_client.get_container_client( self.config.BLOB_CONTAINER_NAME) try: self.container_client.create_container() except ResourceExistsError: pass # -------------------------------------------------- # AZURE BATCH CONFIGURATION # -------------------------------------------------- # Create a Batch service client. We'll now be interacting with the Batch # service in addition to Storage self.batch_client = BatchServiceClient( SharedKeyCredentials(self.config.BATCH_ACCOUNT_NAME, self.config.BATCH_ACCOUNT_KEY), batch_url=self.config.BATCH_ACCOUNT_URL, )
def get_client(*args): # pylint: disable=unused-argument creds = SharedKeyCredentials('test1', 'ZmFrZV9hY29jdW50X2tleQ==') return BatchServiceClient(creds, 'https://test1.westus.batch.azure.com/')
def get_client(*_): creds = SharedKeyCredentials('test1', 'ZmFrZV9hY29jdW50X2tleQ==') return BatchServiceClient(creds, 'https://test1.westus.batch.azure.com/')
#print("Node: {}".format(node_id)) try: write_output(batch_client, job_id, task.id, 'stdout.txt', 'stdout.txt') except: pass try: write_output(batch_client, job_id, task.id, 'stderr.txt', 'stderr.txt') except: pass def write_output(batch_client, job_id, task_id, path_file, file_name): with open(os.path.join(OUTPUT_DIR, f'{job_id}_{task_id}_{file_name}'), 'wb') as file_output: output = batch_client.file.get_from_task(job_id=job_id, task_id=task_id, file_path=path_file) for data in output: file_output.write(data) if __name__ == "__main__": credentials_batch = SharedKeyCredentials( account_name=config_resources.batch['name'], key=config_resources.batch['key']) batch_client = azure.batch.BatchServiceClient( credentials=credentials_batch, batch_url=config_resources.batch['url']) output_tasks_by_job(batch_client)
"""Generate SAS token for file group container with read and list permissions. TODO: Move this into BatchExtensions file utils. """ container_name = fileutils.get_container_name(file_group) container_url = fileutils.generate_container_sas_token( container_name, storage_client, permission='rl') return container_url if __name__ == '__main__': # Setup client storage_client = BlockBlobService(STORAGE_ACCOUNT, STORAGE_KEY, endpoint_suffix="core.windows.net") credentials = SharedKeyCredentials(BATCH_ACCOUNT, BATCH_KEY) client = batch.BatchExtensionsClient(credentials, base_url=BATCH_ENDPOINT, storage_client=storage_client) # Setup test render input data scene_file = 'test_scene.mb' maya_data = 'maya-data-{}'.format(uuid.uuid4()) client.file.upload(SAMPLE_DIR, maya_data, flatten=True) client.file.upload(os.path.join(SCRIPT_DIR, 'generate_thumbnails.py'), maya_data, flatten=True) # Create pool using existing pool template file pool_ref = client.pool.get(POOL_ID) os_flavor = os_flavor(pool_ref.virtual_machine_configuration.image_reference) pool_info = {'poolId': POOL_ID} # Create a pool model with an application template reference job_id = 'maya_test_{}_{}'.format(os_flavor.lower(), uuid.uuid4())
# The collection of data files that are to be processed by the tasks. input_file_paths = [ os.path.join(sys.path[0], 'taskdata0.txt'), os.path.join(sys.path[0], 'taskdata1.txt'), os.path.join(sys.path[0], 'taskdata2.txt') ] # Upload the data files. input_files = [ upload_file_to_container(blob_service_client, input_container_name, file_path) for file_path in input_file_paths ] # Create a Batch service client. We'll now be interacting with the Batch # service in addition to Storage credentials = SharedKeyCredentials(config.BATCH_ACCOUNT_NAME, config.BATCH_ACCOUNT_KEY) batch_client = BatchServiceClient(credentials, batch_url=config.BATCH_ACCOUNT_URL) try: # Create the pool that will contain the compute nodes that will execute the # tasks. create_pool(batch_client, config.POOL_ID) # Create the job that will run the tasks. create_job(batch_client, config.JOB_ID, config.POOL_ID) # Add the tasks to the job. add_tasks(batch_client, config.JOB_ID, input_files)
from mpi4py import MPI import azure.batch.models as batchmodel # Configurations config_file = 'config.ini' config = configparser.ConfigParser() config.read(config_file) config_azure = config['AZURE'] # Azure batch_account_name = config_azure['batch_account_name'] batch_account_key = config_azure['batch_account_key'] batch_account_url = config_azure['batch_account_url'] storage_account_name = config_azure['storage_account_name'] storage_account_key = config_azure['storage_account_key'] batch_credential = SharedKeyCredentials(batch_account_name, batch_account_key) batch_service = BatchServiceClient(batch_credential, batch_account_url) block_blob_service = blob.BlockBlobService(storage_account_name, storage_account_key) file_service = file.FileService(storage_account_name, storage_account_key) source_container = config_azure['source_container'] input_container = config_azure['input_container'] def application_source_upload(): ''' Upload related source file to Azure Blob ''' source_files = [] unique_files = []
def get_batch_client() -> BatchServiceClient: from azure.batch.batch_auth import SharedKeyCredentials account_info = get_batch_account_info() cred = SharedKeyCredentials(account_info.account, account_info.key) return BatchServiceClient(cred, account_info.endpoint)