def test_get_folders(self, client): # Prepare region = self.faker.word() access_key = self.faker.sentence() access_secret = self.faker.sentence() bucket = self.faker.word() path = str_slug(self.faker.word()) self.store.load_config({ 'region': region, 'access_key': access_key, 'access_secret': access_secret, 'bucket': bucket, 'path': path, }) mock_client = Mock() mock_paginator = Mock() client.return_value = mock_client mock_client.get_paginator.return_value = mock_paginator mock_paginator.paginate.return_value = [ { 'Contents': [ { 'Key': os.path.join(path, str(now()), 'backup.tar.gz') }, { 'Key': os.path.join(path, str(now()), 'logs.json') }, ], }, ] # Execute backup = self.store.get_last_backup() # Assert assert backup is not None client.assert_called_once_with( 's3', region_name=region, endpoint_url='https://%s.digitaloceanspaces.com' % region, aws_access_key_id=access_key, aws_secret_access_key=access_secret, ) mock_client.get_paginator.assert_called_once_with('list_objects') mock_paginator.paginate.assert_called_once_with(Bucket=bucket, Prefix=path)
def test_with_custom_driver(self): # Prepare time = now() command_output = self.faker.sentence() store_path = '/tmp/rireki_testing/store' project = self._create_project( driver='custom', driver_config={'command': 'echo "%s"' % command_output}, store='local', store_config={'path': store_path}, ) set_testing_now(time) # Execute result = Cli.run('backup') # Assert assert result.exit_code == 0 assert ('Backing up %s...' % project.name) in result.output assert 'Done' in result.output backup_path = os.path.join( store_path, '{}-backup-{}-{}'.format(project.slug, format_time(time, 'date'), time), 'logs.json', ) assert os.path.exists(backup_path) logs = json.loads(file_get_contents(backup_path)) assert command_output in logs.get('stdout')
def test_creates_backups_with_multiple_paths(self): # Prepare tmp_path = os.path.join(self.home_path, '{}-{}'.format(self.project.slug, now())) store_path = os.path.join(tmp_path, 'backups') driver_paths = [ os.path.join(tmp_path, 'files-1'), os.path.join(tmp_path, 'files-2'), ] self.store.load_config({'path': store_path}) self.driver.load_config({ 'frequency': 42, 'paths': driver_paths, }) touch(os.path.join(driver_paths[0], str_slug(self.faker.word()))) touch(os.path.join(driver_paths[1], str_slug(self.faker.word()))) # Execute self.driver.perform_backup() # Assert assert os.path.exists(store_path) backup = self.store.get_last_backup() assert backup is not None assert os.path.exists(os.path.join(store_path, backup.name + '.zip'))
def _create_temporary_folder(self): path = '/tmp/rireki-{}-{}-{}'.format(self.name, self.project.slug, now()) os.makedirs(path) return path
def create_backup(self, files_path): backup_name = '{slug}-backup-{date}-{timestamp}'.format( slug=self.project.slug, date=format_time(now(), 'date'), timestamp=now(), ) if os.path.isfile(files_path): self._upload_file( files_path, '{}.{}'.format(backup_name, file_get_extension(files_path)), ) elif os.path.isdir(files_path): for file in os.listdir(files_path): self._upload_file( os.path.join(files_path, file), os.path.join(backup_name, file), )
def __enter__(self): self.path = os.path.join( '/tmp', 'rireki-files-backup-{}-{}'.format(self.driver.project.slug, now()) ) os.makedirs(self.path) for path in self.driver.paths: shutil.copytree(path, os.path.join(self.path, os.path.basename(path))) return self
def test_without_pending_backups(self): # Prepare project = self._create_project( store='local', store_config={'path': '/tmp/rireki_testing/store'}, ) touch('/tmp/rireki_testing/store/%s/backup' % now()) # Execute result = Cli.run('backup') # Assert assert result.exit_code == 0 assert ('Project "%s" does not have any pending backups' % project.name) in result.output assert 'Done' in result.output
def test_upload_files(self, client): # Prepare region = self.faker.word() access_key = self.faker.sentence() access_secret = self.faker.sentence() bucket = self.faker.word() path = str_slug(self.faker.word()) tmp_path = os.path.join(self.home_path, '{}-{}'.format(self.project.slug, now())) placeholder_file_name = str_slug(self.faker.word()) placeholder_file_path = os.path.join(tmp_path, placeholder_file_name) self.store.load_config({ 'region': region, 'access_key': access_key, 'access_secret': access_secret, 'bucket': bucket, 'path': path, }) touch(placeholder_file_path) mock_client = Mock() client.return_value = mock_client # Execute self.store.create_backup(tmp_path) # Assert client.assert_called_once_with( 's3', region_name=region, endpoint_url='https://%s.digitaloceanspaces.com' % region, aws_access_key_id=access_key, aws_secret_access_key=access_secret, ) mock_client.upload_file.assert_called_once_with( placeholder_file_path, bucket, ANY, ) assert mock_client.upload_file.call_args[0][2].startswith(path) assert mock_client.upload_file.call_args[0][2].endswith( placeholder_file_name)
def test_with_one_project_backed_up(self): # Prepare project = self._create_project( store='local', store_config={'path': '/tmp/rireki_testing/store'}, ) touch('/tmp/rireki_testing/store/%s/backup' % now()) # Execute result = Cli.run('status') # Assert assert result.exit_code == 0 output_lines = result.output.splitlines() assert len(output_lines) == 2 assert project.name in output_lines[1] assert re.search('Backed up \\d seconds ago', output_lines[1])
def has_pending_backups(self, last_backup_time): frequency_in_seconds = self.frequency * 60 return last_backup_time < now() - frequency_in_seconds