def _rsync(*args, **kwargs): ''' executing rsync command ''' with tempfile.TemporaryDirectory(prefix='file_grabber') as tmp: settings = args[0] rsync_command = [ 'rsync', '-rltzu', '--exclude', '*.partial', '--remove-source-files' ] remote_user = settings.get('file-grabber.args.remote_user') remote_host = settings.get('file-grabber.args.remote_host') remote_dir = settings.get('file-grabber.args.remote_dir') landing = settings.get('file-grabber.args.landing') s3_bucket = settings.get('file-grabber.args.archive_s3_bucket_name') s3_to_glacier_after_days = int( settings.get('file-grabber.args.archive_s3_to_glacier_after_days', '30')) s3_prefix = settings.get('file-grabber.args.archive_s3_prefix') private_key = settings.get('file-grabber.args.private_key') rsync_ssh = "ssh -o StrictHostKeyChecking=no" if private_key is not None: rsync_ssh += " -i " + private_key rsync_command.append("-e") rsync_command.append(rsync_ssh) if not remote_dir.endswith('/'): remote_dir += '/' if not landing.endswith('/'): landing += '/' rsync_command.append(remote_user + '@' + remote_host + ':' + remote_dir) rsync_command.append(tmp) returncode = subprocess.call(rsync_command) if returncode is not 0: logger.error('failed rsync. return code: ' + str(returncode)) else: # copy from temporary directory to actual landing directory # using 2 steps copy because of archiving for dirpath, dirs, files in os.walk(tmp): for filename in files: fname = os.path.abspath(os.path.join(dirpath, filename)) dest_path = os.path.join(landing, dirpath[len(tmp) + 1:]) os.makedirs(dest_path, exist_ok=True) shutil.copy2(fname, dest_path) archive_files(tmp, s3_bucket, s3_to_glacier_after_days, prefix=s3_prefix)
def test_archive_file_from_backup_file(self, mock_S3_buckup, mock_archive, mock_strftime): mock_S3_buckup.return_value = None mock_archive.return_value = True mock_strftime.return_value = self.__prefix rtn = archive_files(os.path.join(self.__tmp_dir.name, 'arrival'), 's3_bucket', 1, prefix=self.__prefix, backup_of_backup=self.__src_dir) self.assertTrue(rtn) arg_list = mock_archive.call_args_list self.assertEqual(6, len(arg_list)) for arg in arg_list: args, kwargs = arg (src_file, dest_file) = args self.assertIn(src_file, self.__src_files) self.assertIn(dest_file, self.__dest_files)
def test_archive_files_ok(self, mock_S3_buckup, mock_archive, mock_strftime): mock_strftime.return_value = self.__prefix mock_S3_buckup.return_value = None mock_archive.return_value = True rtn = archive_files(self.__src_dir, 's3_bucket', 1, prefix=self.__prefix, backup_of_backup=self.__backup_dir) self.assertTrue(rtn) self.assertFalse(os.path.exists(self.__backup_dir)) arg_list = mock_archive.call_args_list self.assertEqual(6, len(arg_list)) for arg in arg_list: args, kwargs = arg (src_file, dest_file) = args self.assertIn(src_file, self.__src_files) self.assertIn(dest_file, self.__dest_files)
def test_archive_file_s3_not_available(self, mock_S3_buckup, mock_archive, mock_strftime): mock_strftime.return_value = self.__prefix mock_S3_buckup.side_effect = S3CreateError(403, '') rtn = archive_files(self.__src_dir, 's3_bucket', 1, prefix=self.__prefix, backup_of_backup=self.__backup_dir) self.assertFalse(rtn) arg_list = mock_archive.call_args_list self.assertEqual(0, len(arg_list)) self.assertTrue(os.path.exists(self.__backup_dir)) backup_filenames = [] for dirpath, dirs, files in os.walk(self.__backup_dir): for filename in files: fname = os.path.abspath(os.path.join(dirpath, filename)) backup_filenames.append(fname[len(self.__backup_dir) + 1:]) self.assertEqual(6, len(backup_filenames)) for file in backup_filenames: self.assertIn(file, self.__dest_files)
def test_archive_file_backup_to_backup(self, mock_S3_buckup, mock_archive): mock_S3_buckup.return_value = None mock_archive.return_value = False rtn = archive_files(self.__src_dir, 's3_bucket', 1, prefix=self.__prefix, backup_of_backup=self.__backup_dir) self.assertFalse(rtn)