def _is_clean_line(line): return valid_token_size(line['f_name']) and valid_token_size(line['l_name'])
def _filter_tokens(self, s): return set(filter(lambda x: valid_token_size(x) and valid_name_size(x.split()), s))