def _upload_to_multi_tenant(self, file_to_upload): file_path = 'workspace/projects/%s/files/%s-%s' % \ (self.parent_api.object_name, shortuuid.uuid(), os.path.basename(file_to_upload)) res = self.rest_api.call( 'create_project_file_url', { 'project_id': self.parent_api.object_id, 'file_path': file_path, 'file_size': fsclient.get_file_size(file_to_upload) }) if res is None: raise AugerException( 'Error while uploading file to Auger Cloud...') if 'multipart' in res: upload_details = res['multipart'] config = upload_details['config'] uploader = FileUploader(upload_details['bucket'], config['endpoint'], config['access_key'], config['secret_key'], config['security_token']) with fsclient.open_file(file_to_upload, 'rb', encoding=None, auto_decompression=False) as f: return uploader.multipart_upload_obj( f, upload_details['key'], callback=NewlineProgressPercentage(file_to_upload)) else: url = res['url'] file_path = res['fields']['key'] with fsclient.open_file(file_to_upload, 'rb', encoding=None, auto_decompression=False) as f: files = {'file': (file_path, f)} res = requests.post(url, data=res['fields'], files=files) if res.status_code == 201 or res.status_code == 200: bucket = urllib.parse.urlparse(url).netloc.split('.')[0] return 's3://%s/%s' % (bucket, file_path) else: if res.status_code == 400 and b'EntityTooLarge' in res.content: max_size = ElementTree.fromstring( res.content).find('MaxSizeAllowed').text max_size_mb = int(max_size) / 1024 / 1024 raise AugerException( 'Data set size is limited to %.1f MB' % max_size_mb) else: raise AugerException( 'HTTP error [%s] "%s" while uploading file' ' to Auger Cloud...' % (res.status_code, res.content))
def _upload_to_multi_tenant(self, file_to_upload): file_path = 'workspace/projects/%s/files/%s-%s' % \ (self.parent_api.object_name, shortuuid.uuid(), os.path.basename(file_to_upload)) res = self.rest_api.call('create_project_file_url', { 'project_id': self.parent_api.object_id, 'file_path': file_path}) if res is None: raise AugerException( 'Error while uploading file to Auger Cloud...') url = res['url'] file_path = res['fields']['key'] with fsclient.open_file(file_to_upload, 'rb') as f: files = {'file': (file_path, f)} res = requests.post(url, data=res['fields'], files=files) if res.status_code == 201 or res.status_code == 200: bucket = urllib.parse.urlparse(url).netloc.split('.')[0] return 's3://%s/%s' % (bucket, file_path) else: if res.status_code == 400 and b'EntityTooLarge' in res.content: max_size = ElementTree.fromstring(res.content).find('MaxSizeAllowed').text max_size_mb = int(max_size) / 1024 / 1024 raise AugerException('Data set size is limited to %.1f MB' % max_size_mb) else: raise AugerException( 'HTTP error [%s] "%s" while uploading file' ' to Auger Cloud...' % (res.status_code, res.content))
def load_from_file(self, filename): if not isinstance(filename, str) or len(filename) == 0: raise ValueError("please provide yaml file name") self.filename = filename with fsclient.open_file(filename, 'r') as f: self.yaml = ruamel.yaml.load(f, Loader=ruamel.yaml.RoundTripLoader) return self
def load(self): content = {} if hasattr(self.ctx, 'credentials'): content = self.ctx.credentials elif 'AZURE_CREDENTIALS' in os.environ: content = os.environ.get('AZURE_CREDENTIALS', None) content = json.loads(content) if content else {} else: if self._credentials_file_exist(): with open(self.creds_file, 'r') as file: content = json.loads(file.read()) else: azure_creds_file = os.path.abspath('%s/.azureml/auth/azureProfile.json' % os.environ.get('HOME', '')) if os.path.exists(azure_creds_file): from a2ml.api.utils import fsclient try: with fsclient.open_file(azure_creds_file, "r", encoding='utf-8-sig', num_tries=0) as file: res = json.load(file) content = { 'subscription_id': res['subscriptions'][0]['id'] } except Exception as e: if self.ctx.debug: import traceback traceback.print_exc() self.subscription_id = content.get('subscription_id') self.service_principal_tenant_id = content.get('service_principal_tenant_id') self.service_principal_id = content.get('service_principal_id') self.service_principal_password = content.get('service_principal_password') return self
def write_text_file(self, path, data, atomic=False): self.create_parent_folder(path) if atomic: with self.open_atomic(path, "w") as file: file.write(data) else: from a2ml.api.utils import fsclient self.remove_file(path) with fsclient.open_file(path, "w") as file: try: file.write(data) finally: file.flush() # flush file buffers os.fsync(file.fileno()) self.read_text_file(path)
def _load_azure_cred_file(self): content = {} azure_creds_file = os.path.abspath( '%s/.azureml/auth/azureProfile.json' % os.environ.get('HOME', '')) if os.path.exists(azure_creds_file): from a2ml.api.utils import fsclient try: with fsclient.open_file(azure_creds_file, "r", encoding='utf-8-sig', num_tries=0) as file: res = json.load(file) content = { 'subscription_id': res['subscriptions'][0]['id'] } except Exception as e: if self.ctx.debug: import traceback traceback.print_exc() return content