def process(self): self.manual = Help() if self.__help(): return self.load_config() self.account = AzureAccount(self.config) container_name = self.account.storage_container() self.storage = Storage(self.account, container_name) # default to 1 minute ago (skew around 'now') if self.command_args['--start-datetime'] == 'now': start = datetime.datetime.utcnow() - datetime.timedelta(minutes=1) else: start = self.validate_date('--start-datetime') # default to 30 days from now if self.command_args['--expiry-datetime'] == '30 days from start': expiry = start + datetime.timedelta(days=30) else: expiry = self.validate_date('--expiry-datetime') self.validate_sas_permissions('--permissions') if self.command_args['upload']: self.__upload() elif self.command_args['delete']: self.__delete() elif self.command_args['sas']: self.__sas(container_name, start, expiry, self.command_args['--permissions'])
def create(self, identifier, disk_size_in_gb, label=None): """ Create new data disk """ footer = self.__generate_vhd_footer(disk_size_in_gb) disk_name = self.__generate_filename(identifier) size_in_bytes = int(disk_size_in_gb) * 1073741824 + 512 try: storage = Storage( self.account, self.account.storage_container() ) storage.upload_empty_image(size_in_bytes, footer, disk_name) except Exception as e: raise AzureDataDiskCreateError( '%s: %s' % (type(e).__name__, format(e)) ) disk_url = self.__data_disk_url(disk_name) args = { 'media_link': disk_url, 'name': self.__strip_platform_extension(disk_name), 'has_operating_system': False, 'os': 'Linux' } args['label'] = label if label else identifier try: self.service.add_disk(**args) except Exception as e: raise AzureDataDiskCreateError( '%s: %s' % (type(e).__name__, format(e)) )
def setup(self): account = mock.Mock() account.get_blob_service_host_base = mock.Mock( return_value='core.windows.net') account.storage_name = mock.Mock(return_value='mock-storage-name') account.storage_key = mock.Mock(return_value='bW9jay1zdG9yYWdlLWtleQ==' # base64-encoding of 'mock-storage-key' ) credentials = namedtuple( 'credentials', ['private_key', 'certificate', 'subscription_id']) account.publishsettings = mock.Mock(return_value=credentials( private_key='abc', certificate='abc', subscription_id='4711')) self.storage = Storage(account, 'some-container')
class TestStorage: def setup(self): account = mock.Mock() account.get_blob_service_host_base = mock.Mock( return_value='core.windows.net') account.storage_name = mock.Mock(return_value='mock-storage-name') account.storage_key = mock.Mock(return_value='bW9jay1zdG9yYWdlLWtleQ==' # base64-encoding of 'mock-storage-key' ) credentials = namedtuple( 'credentials', ['private_key', 'certificate', 'subscription_id']) account.publishsettings = mock.Mock(return_value=credentials( private_key='abc', certificate='abc', subscription_id='4711')) self.storage = Storage(account, 'some-container') @raises(AzureStorageFileNotFound) @patch('os.path.exists') def test_upload_storage_file_not_found(self, mock_exists): mock_exists.return_value = False self.storage.upload('some-blob', None) @raises(AzureStorageStreamError) @patch('azurectl.storage.storage.XZ.open') def test_upload_error_put_blob(self, mock_xz_open): mock_xz_open.side_effect = Exception self.storage.upload('../data/blob.xz') @raises(AzureStorageUploadError) @patch('azurectl.storage.storage.PageBlob') @patch('azurectl.storage.storage.XZ.open') def test_upload_raises(self, mock_xz_open, mock_page_blob): stream = mock.Mock stream.close = mock.Mock() mock_xz_open.return_value = stream mock_page_blob.side_effect = Exception self.storage.upload('../data/blob.xz') stream.close.assert_called_once_with() @patch('azurectl.storage.storage.PageBlob') @patch('azurectl.storage.storage.XZ.uncompressed_size') @patch('azurectl.storage.storage.XZ.open') def test_upload(self, mock_xz_open, mock_uncompressed_size, mock_page_blob): stream = mock.Mock stream.close = mock.Mock() mock_xz_open.return_value = stream page_blob = mock.Mock() next_results = [3, 2, 1] def side_effect(stream, max_chunk_size, max_attempts): try: return next_results.pop() except: raise StopIteration page_blob.next.side_effect = side_effect mock_page_blob.return_value = page_blob mock_uncompressed_size.return_value = 1024 self.storage.upload('../data/blob.xz') assert page_blob.next.call_args_list == [ call(stream, None, 5), call(stream, None, 5), call(stream, None, 5), call(stream, None, 5) ] stream.close.assert_called_once_with() @patch('azurectl.storage.storage.PageBlob') @patch('__builtin__.open') @patch('os.path.getsize') def test_upload_uncompressed(self, mock_uncompressed_size, mock_open, mock_page_blob): stream = mock.Mock stream.close = mock.Mock() mock_open.return_value = stream page_blob = mock.Mock() next_results = [3, 2, 1] def side_effect(stream, max_chunk_size, max_attempts): try: return next_results.pop() except: raise StopIteration page_blob.next.side_effect = side_effect mock_page_blob.return_value = page_blob mock_uncompressed_size.return_value = 1024 self.storage.upload('../data/blob.raw') assert page_blob.next.call_args_list == [ call(stream, None, 5), call(stream, None, 5), call(stream, None, 5), call(stream, None, 5) ] stream.close.assert_called_once_with() @patch('azurectl.storage.storage.PageBlobService.delete_blob') @raises(AzureStorageDeleteError) def test_delete(self, mock_delete_blob): mock_delete_blob.side_effect = Exception self.storage.delete('some-blob') def test_print_upload_status(self): self.storage.print_upload_status() assert self.storage.upload_status == \ {'current_bytes': 0, 'total_bytes': 0} def test_disk_image_sas(self): container = 'mock-container' image = 'foo.vhd' start = datetime.datetime(2015, 1, 1) expiry = datetime.datetime(2015, 12, 31) permissions = 'rl' parsed = urlparse( self.storage.disk_image_sas(container, image, start, expiry, permissions)) assert parsed.scheme == 'https' assert parsed.netloc == self.storage.account_name + \ '.blob.core.windows.net' assert parsed.path == '/' + container + '/' + image assert 'st=2015-01-01T00%3A00%3A00Z&' in parsed.query assert 'se=2015-12-31T00%3A00%3A00Z' in parsed.query assert 'sp=rl&' in parsed.query assert 'sr=b&' in parsed.query assert 'sig=' in parsed.query # can't actively validate the signature
class StorageDiskTask(CliTask): """ Process disk commands """ def process(self): self.manual = Help() if self.__help(): return self.load_config() self.account = AzureAccount(self.config) container_name = self.account.storage_container() self.storage = Storage(self.account, container_name) # default to 1 minute ago (skew around 'now') if self.command_args['--start-datetime'] == 'now': start = datetime.datetime.utcnow() - datetime.timedelta(minutes=1) else: start = self.validate_date('--start-datetime') # default to 30 days from now if self.command_args['--expiry-datetime'] == '30 days from start': expiry = start + datetime.timedelta(days=30) else: expiry = self.validate_date('--expiry-datetime') self.validate_sas_permissions('--permissions') if self.command_args['upload']: self.__upload() elif self.command_args['delete']: self.__delete() elif self.command_args['sas']: self.__sas(container_name, start, expiry, self.command_args['--permissions']) def __help(self): if self.command_args['help']: self.manual.show('azurectl::storage::disk') else: return False return self.manual def __upload(self): if self.command_args['--quiet']: self.__upload_no_progress() else: self.__upload_with_progress() def __upload_no_progress(self): try: self.__process_upload() except (KeyboardInterrupt): raise SystemExit('azurectl aborted by keyboard interrupt') def __upload_with_progress(self): image = self.command_args['--source'] progress = BackgroundScheduler(timezone=utc) progress.add_job(self.storage.print_upload_status, 'interval', seconds=3) progress.start() try: self.__process_upload() self.storage.print_upload_status() progress.shutdown() except (KeyboardInterrupt): progress.shutdown() raise SystemExit('azurectl aborted by keyboard interrupt') print() log.info('Uploaded %s', image) def __process_upload(self): self.storage.upload(self.command_args['--source'], self.command_args['--blob-name'], self.command_args['--max-chunk-size']) def __sas(self, container_name, start, expiry, permissions): result = DataCollector() out = DataOutput(result, self.global_args['--output-format'], self.global_args['--output-style']) result.add( self.command_args['--blob-name'] + ':sas_url', self.storage.disk_image_sas(container_name, self.command_args['--blob-name'], start, expiry, permissions)) out.display() def __delete(self): image = self.command_args['--blob-name'] self.storage.delete(image) log.info('Deleted %s', image)