def test_creates_backups_with_multiple_paths(self): # Prepare tmp_path = os.path.join(self.home_path, '{}-{}'.format(self.project.slug, now())) store_path = os.path.join(tmp_path, 'backups') driver_paths = [ os.path.join(tmp_path, 'files-1'), os.path.join(tmp_path, 'files-2'), ] self.store.load_config({'path': store_path}) self.driver.load_config({ 'frequency': 42, 'paths': driver_paths, }) touch(os.path.join(driver_paths[0], str_slug(self.faker.word()))) touch(os.path.join(driver_paths[1], str_slug(self.faker.word()))) # Execute self.driver.perform_backup() # Assert assert os.path.exists(store_path) backup = self.store.get_last_backup() assert backup is not None assert os.path.exists(os.path.join(store_path, backup.name + '.zip'))
def test_new_project_with_files_driver(self): # Prepare project_name = self.faker.name() driver_name = 'files' driver_frequency_name = 'daily' driver_frequency_minutes = 1440 driver_paths = [ '/tmp', os.path.join('tmp', str_slug(self.faker.word())), ] # Execute result = Cli.run( 'add', project_name, '--driver=' + driver_name, '--store=local', input=self.__get_new_project_with_files_driver_input( driver_frequency_name, driver_paths, ), ) # Assert assert result.exit_code == 0 config = toml.load('%s/projects/%s.conf' % (self.home_path, project_name)) assert 'driver' in config assert config['driver']['name'] == driver_name assert config['driver']['frequency'] == driver_frequency_minutes assert config['driver']['paths'] == driver_paths
def test_upload_files(self, client): # Prepare region = self.faker.word() access_key = self.faker.sentence() access_secret = self.faker.sentence() bucket = self.faker.word() path = str_slug(self.faker.word()) tmp_path = os.path.join(self.home_path, '{}-{}'.format(self.project.slug, now())) placeholder_file_name = str_slug(self.faker.word()) placeholder_file_path = os.path.join(tmp_path, placeholder_file_name) self.store.load_config({ 'region': region, 'access_key': access_key, 'access_secret': access_secret, 'bucket': bucket, 'path': path, }) touch(placeholder_file_path) mock_client = Mock() client.return_value = mock_client # Execute self.store.create_backup(tmp_path) # Assert client.assert_called_once_with( 's3', region_name=region, endpoint_url='https://%s.digitaloceanspaces.com' % region, aws_access_key_id=access_key, aws_secret_access_key=access_secret, ) mock_client.upload_file.assert_called_once_with( placeholder_file_path, bucket, ANY, ) assert mock_client.upload_file.call_args[0][2].startswith(path) assert mock_client.upload_file.call_args[0][2].endswith( placeholder_file_name)
def test_get_folders(self, client): # Prepare region = self.faker.word() access_key = self.faker.sentence() access_secret = self.faker.sentence() bucket = self.faker.word() path = str_slug(self.faker.word()) self.store.load_config({ 'region': region, 'access_key': access_key, 'access_secret': access_secret, 'bucket': bucket, 'path': path, }) mock_client = Mock() mock_paginator = Mock() client.return_value = mock_client mock_client.get_paginator.return_value = mock_paginator mock_paginator.paginate.return_value = [ { 'Contents': [ { 'Key': os.path.join(path, str(now()), 'backup.tar.gz') }, { 'Key': os.path.join(path, str(now()), 'logs.json') }, ], }, ] # Execute backup = self.store.get_last_backup() # Assert assert backup is not None client.assert_called_once_with( 's3', region_name=region, endpoint_url='https://%s.digitaloceanspaces.com' % region, aws_access_key_id=access_key, aws_secret_access_key=access_secret, ) mock_client.get_paginator.assert_called_once_with('list_objects') mock_paginator.paginate.assert_called_once_with(Bucket=bucket, Prefix=path)
def slug(self): return str_slug(self.name)
def test_slug(self): assert str_slug('Miyamoto Musashi') == 'miyamoto-musashi' assert str_slug('Let\'s Go') == 'lets-go'