def test_load_from_file_success(self): repo_name = 'name' internal_filename = hashlib.sha512( repo_name.encode('utf-8')).hexdigest() mock_download = mock.Mock() mock_super = mock.Mock() with mock.patch.object(S3TrackedRepo, '_download', mock_download), \ mock.patch.object(S3TrackedRepo, '_load_from_file', mock_super), \ mock.patch.object(S3TrackedRepo, '_initialize_s3_client'): S3TrackedRepo.load_from_file( repo_name, RepoConfig( base_tmp_dir=DEFAULT_BASE_TMP_DIR, baseline='baseline', exclude_regex='', ), S3Config( s3_creds_file='s3_creds_file', bucket_name='bucket_name', prefix='prefix_value', ), ) mock_download.assert_called_once_with( 'bucket_name', 'prefix_value', internal_filename, '%s/tracked/%s.json' % (DEFAULT_BASE_TMP_DIR, internal_filename))
def test_load_from_file_success(self, mock_isdir): mock_isdir.return_value = True # Emulate the file that will be written to disk, for state saving. repo_data = { 'repo': 'repo-uri', 'sha': 'sha256-hash', 'cron': '* * * * *', 'plugins': { 'HexHighEntropyString': 3, }, 'baseline_file': 'foobar', 's3_config': 'make_sure_can_be_here_without_affecting_anything', } file_contents = json.dumps(repo_data, indent=2) m = mock.mock_open(read_data=file_contents) repo_config = RepoConfig( base_tmp_dir=DEFAULT_BASE_TMP_DIR, baseline='baseline', exclude_regex='', ) with mock.patch( 'detect_secrets_server.repos.base_tracked_repo.codecs.open', m): repo = BaseTrackedRepo.load_from_file('will_be_mocked', repo_config=repo_config) assert repo.repo == 'repo-uri' assert repo.last_commit_hash == 'sha256-hash' assert repo.crontab == '* * * * *' assert repo.plugin_config.hex_limit == 3 assert repo.plugin_config.base64_limit is None
def mock_tracked_repo(cls=BaseTrackedRepo, **kwargs): """Returns a mock TrackedRepo for testing""" defaults = { 'sha': 'does_not_matter', 'repo': '[email protected]:pre-commit/pre-commit-hooks.git', 'cron': '* * 4 * *', 'repo_config': RepoConfig( base_tmp_dir='foo/bar', baseline='.secrets.baseline', exclude_regex='', ), 'plugin_sensitivity': SensitivityValues( base64_limit=4.5, hex_limit=3, ) } defaults.update(kwargs) with mock.patch( 'detect_secrets_server.repos.base_tracked_repo.os.path.isdir' ) as m: m.return_value = True return cls(**defaults)
def load_from_file(cls, repo_name, repo_config, *args, **kwargs): """This will load a TrackedRepo to memory, from a given tracked file. For automated management without a database. :type repo_name: string :param repo_name: git URL or local path of repo :type repo_config: RepoConfig :param repo_config: values to configure repos, See `server_main` for more details. :return: TrackedRepo """ repo_name = cls._get_repo_name(repo_name) data = cls._read_tracked_file(repo_name, repo_config.base_tmp_dir) if data is None: return None data = cls._modify_tracked_file_contents(data) # Add server-side configuration to repo data['repo_config'] = RepoConfig( base_tmp_dir=repo_config.base_tmp_dir, exclude_regex=repo_config.exclude_regex, baseline=data['baseline_file'], ) return cls(**data)
def parse_repo_config(args): """ :param args: parsed arguments from parse_args. :return: RepoConfig """ default_repo_config = {} if args.config_file: default_repo_config = open_config_file(args.config_file[0]).get('default', {}) return RepoConfig( default_repo_config.get('base_tmp_dir', DEFAULT_BASE_TMP_DIR), default_repo_config.get('baseline', '') or (args.baseline[0]), default_repo_config.get('exclude_regex', ''), )
def test_load_from_file_failures(self, mock_filepath): repo_config = RepoConfig( base_tmp_dir=DEFAULT_BASE_TMP_DIR, baseline='baseline', exclude_regex='', ) # IOError mock_filepath.return_value = '/blah' assert BaseTrackedRepo.load_from_file('repo', repo_config) is None # JSONDecodeError m = mock.mock_open(read_data='not a json') with mock.patch( 'detect_secrets_server.repos.base_tracked_repo.codecs.open', m): assert BaseTrackedRepo.load_from_file('repo', repo_config) is None # TypeError mock_filepath.return_value = None assert BaseTrackedRepo.load_from_file('repo', repo_config) is None
def _mock_repo_config(self): return RepoConfig( base_tmp_dir='default_base_tmp_dir', baseline='baseline', exclude_regex='', )
def initialize_repos_from_repo_yaml( repo_yaml, plugin_sensitivity, repo_config, s3_config=None ): """For expected yaml file format, see `repos.yaml.sample` :type repo_yaml: string :param repo_yaml: filename of config file to read and parse :type plugin_sensitivity: SensitivityValues :type repo_config: RepoConfig :type s3_config: S3Config :return: list of TrackedRepos :raises: IOError """ data = open_config_file(repo_yaml) output = [] if data.get('tracked') is None: return output for entry in data['tracked']: sensitivity = plugin_sensitivity if entry.get('plugins'): # Merge plugin sensitivities plugin_dict = plugin_sensitivity._asdict() # Use SensitivityValues constructor to convert values entry_sensitivity = SensitivityValues(**entry['plugins']) plugin_dict.update(entry_sensitivity._asdict()) sensitivity = SensitivityValues(**plugin_dict) entry['plugin_sensitivity'] = sensitivity config = repo_config if 'baseline_file' in entry: config = RepoConfig( base_tmp_dir=repo_config.base_tmp_dir, exclude_regex=repo_config.exclude_regex, baseline=entry['baseline_file'], ) entry['repo_config'] = config if entry.get('s3_backed') and s3_config is None: CustomLogObj.getLogger().error( ( 'Unable to load s3 config for %s. Make sure to specify ' '--s3-config-file for s3_backed repos!' ), entry.get('repo'), ) continue entry['s3_config'] = s3_config # After setting up all arguments, create respective object. repo = tracked_repo_factory( entry.get('is_local_repo', False), entry.get('s3_backed', False), ) output.append(repo(**entry)) return output