def update_repo_detail_in_cache(repo_detail: dict, app_dir: str) -> None: repo_name = repo_detail['name'] # re-cache this value cnc_utils.set_long_term_cached_value(app_dir, f'{repo_name}_detail', repo_detail, 604800, 'git_repo_details') # now, find and remove the old details repos = cnc_utils.get_long_term_cached_value(app_dir, 'imported_repositories') # fix for crash when long term cached values may be blank or None if repos is not None: for r in repos: if r.get('name', '') == repo_name: repos.remove(r) break else: repos = list() # add our new repo details and re-cache repos.append(repo_detail) cnc_utils.set_long_term_cached_value(app_dir, 'imported_repositories', repos, 604800, 'imported_git_repos')
def load_skillet_by_name(self, skillet_name) -> (dict, None): """ Loads application specific skillet :param skillet_name: :return: """ all_skillets = cnc_utils.get_long_term_cached_value( self.app_dir, 'all_snippets') if not all_skillets: all_skillets = list() application_skillets_dir = Path( os.path.join(settings.SRC_PATH, self.app_dir, 'snippets')) skillet_loader = SkilletLoader() app_skillets = skillet_loader.load_all_skillets_from_dir( application_skillets_dir) for s in app_skillets: all_skillets.append(s.skillet_dict) cnc_utils.set_long_term_cached_value(self.app_dir, 'all_snippets', all_skillets, -1) for skillet in all_skillets: if skillet['name'] == skillet_name: return skillet return None
def _get_current_tag() -> (str, None): tag_string = cnc_utils.get_long_term_cached_value('panhandler', 'current_tag') if not tag_string: print('Getting updated tag') panhandler_config = cnc_utils.get_app_config('panhandler') if 'app_dir' not in panhandler_config: return None tag_file = os.path.join(panhandler_config['app_dir'], 'tag') if not os.path.exists(tag_file) or not os.path.isfile(tag_file): return None try: with open(tag_file, 'r') as bf: tag_string = bf.read() cnc_utils.set_long_term_cached_value('panhandler', 'current_tag', tag_string, 14400, 'app_update') except OSError as ose: print('Could not read tag date data file') print(ose) return None return str(tag_string.strip())
def _get_panhandler_image_data(): docker_hub_link = 'https://hub.docker.com/v2/repositories/paloaltonetworks/panhandler/tags/' try: # try to pull from cache is possible details_from_cache = cnc_utils.get_long_term_cached_value( 'panhandler', 'docker_image_details') if details_from_cache is not None: print('Returning docker_image_details from the cache') return details_from_cache print('Getting docker details from upstream') resp = requests.get(docker_hub_link, verify=False, timeout=5) if resp.status_code != 200: print('Could not fetch docker_image_details') print(resp.text) print(resp.status_code) return {} else: details = resp.json() cnc_utils.set_long_term_cached_value('panhandler', 'docker_image_details', details, 14400, 'app_update') return details except ConnectionError as ce: print('Could not contact docker hub API') print(ce) return {} except Timeout as te: print('Timed out waiting for docker image details') print(te) return {}
def get_redirect_url(self, *args, **kwargs): repo_name = kwargs['repo_name'] # we are going to keep the snippets in the snippets dir in the panhandler app # get the dir where all apps are installed # src_dir = settings.SRC_PATH # # get the panhandler app dir # panhandler_dir = os.path.join(src_dir, 'panhandler') # # get the snippets dir under that # snippets_dir = os.path.join(panhandler_dir, 'snippets') # repo_dir = os.path.abspath(os.path.join(snippets_dir, repo_name)) user_dir = os.path.expanduser('~') snippets_dir = os.path.join(user_dir, '.pan_cnc', 'panhandler', 'repositories') repo_dir = os.path.join(snippets_dir, repo_name) if snippets_dir in repo_dir: print(f'Removing repo {repo_name}') if os.path.exists(repo_dir): shutil.rmtree(repo_dir, ignore_errors=True) else: print(f'dir {repo_dir} is already gone!') print('Invalidating snippet cache') snippet_utils.invalidate_snippet_caches(self.app_dir) cnc_utils.set_long_term_cached_value(self.app_dir, f'{repo_name}_detail', None, 0, 'snippet') cnc_utils.evict_cache_items_of_type(self.app_dir, 'imported_git_repos') messages.add_message(self.request, messages.SUCCESS, 'Repo Successfully Removed') return f'/panhandler/repos'
def _get_current_build_time() -> (datetime, None): build_date_string = cnc_utils.get_long_term_cached_value( 'panhandler', 'current_build_time') if not build_date_string: print('Getting updated build_date_string') panhandler_config = cnc_utils.get_app_config('panhandler') if 'app_dir' not in panhandler_config: return None build_file = os.path.join(panhandler_config['app_dir'], 'build_date') if not os.path.exists(build_file) or not os.path.isfile(build_file): return None try: with open(build_file, 'r') as bf: build_date_string = str(bf.readline()).strip() cnc_utils.set_long_term_cached_value('panhandler', 'current_build_time', build_date_string, 14400, 'app_update') except OSError as ose: print('Could not read build date data file') print(ose) return None print(build_date_string) return datetime.strptime(build_date_string, '%Y-%m-%dT%H:%M:%S')
def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) # snippets_dir = Path(os.path.join(settings.SRC_PATH, self.app_dir, 'snippets')) snippets_dir = Path(os.path.join(os.path.expanduser('~/.pan_cnc'), 'bootstrapper', 'repositories')) repos = cnc_utils.get_long_term_cached_value(self.app_dir, 'imported_repositories') if repos is not None: context['repos'] = repos else: repos = list() for d in snippets_dir.iterdir(): git_dir = d.joinpath('.git') if git_dir.exists() and git_dir.is_dir(): repo_detail = git_utils.get_repo_details(d.name, d, self.app_dir) repos.append(repo_detail) continue # cache the repos list for 1 week. this will be cleared when we import a new repository or otherwise # change the repo list somehow cnc_utils.set_long_term_cached_value(self.app_dir, 'imported_repositories', repos, 604800, 'imported_git_repos') context['repos'] = repos return context
def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) cached_collections = cnc_utils.get_long_term_cached_value(self.app_dir, 'cached_collections') cached_collections_info = cnc_utils.get_long_term_cached_value(self.app_dir, 'cached_collections_info') if cached_collections is not None: context['collections'] = cached_collections context['collections_info'] = cached_collections_info return context # return a list of all defined collections collections = snippet_utils.load_all_label_values(self.app_dir, 'collection') # build dict of collections related to other collections (if any) # and a count of how many skillets are in the collection collections_info = dict() # manually create a collection called 'All' all_skillets = 'All Skillets' # get the full list of all snippets all_snippets = snippet_utils.load_all_snippets(self.app_dir) collections_info[all_skillets] = dict() collections_info[all_skillets]['count'] = len(all_snippets) collections_info[all_skillets]['related'] = list() # iterate over the list of collections for c in collections: if c not in collections_info: collections_info[c] = dict() collections_info[c]['count'] = 0 skillets = snippet_utils.load_snippets_by_label('collection', c, self.app_dir) collections_info[c]['count'] = len(skillets) related = list() for skillet in skillets: if 'labels' in skillet and 'collection' in skillet['labels']: if type(skillet['labels']['collection']) is list: for related_collection in skillet['labels']['collection']: if related_collection != c and related_collection not in related: related.append(related_collection) collections_info[c]['related'] = json.dumps(related) collections.append('Kitchen Sink') collections.append(all_skillets) context['collections'] = collections context['collections_info'] = collections_info cnc_utils.set_long_term_cached_value(self.app_dir, 'cached_collections', collections, 86400, 'snippet') cnc_utils.set_long_term_cached_value(self.app_dir, 'cached_collections_info', collections_info, 86400, 'snippet') return context
def get_redirect_url(self, *args, **kwargs): user_dir = os.path.expanduser('~') base_dir = os.path.join(user_dir, '.pan_cnc', 'panhandler', 'repositories') base_path = Path(base_dir) try: base_path.stat() except PermissionError: messages.add_message(self.request, messages.ERROR, 'Could not update, Permission Denied') return '/panhandler/repos' except OSError: messages.add_message(self.request, messages.ERROR, 'Could not update, Access Error for repository directory') return '/panhandler/repos' if not base_path.exists(): messages.add_message(self.request, messages.ERROR, 'Could not update, repositories directory does not exist') return '/panhandler/repos' err_condition = False updates = list() for d in base_path.iterdir(): git_dir = d.joinpath('.git') if git_dir.exists() and git_dir.is_dir(): msg = git_utils.update_repo(str(d)) if 'Error' in msg: print(f'Error updating Repository: {d.name}') print(msg) messages.add_message(self.request, messages.ERROR, f'Could not update repository {d.name}') err_condition = True elif 'updated' in msg or 'Checked out new' in msg: print(f'Updated Repository: {d.name}') updates.append(d.name) cnc_utils.set_long_term_cached_value(self.app_dir, f'{d.name}_detail', None, 0, 'git_repo_details') # remove all python3 init touch files if there is an update task_utils.python3_reset_init(str(d)) if not err_condition: repos = ", ".join(updates) messages.add_message(self.request, messages.SUCCESS, f'Successfully Updated repositories: {repos}') cnc_utils.evict_cache_items_of_type(self.app_dir, 'imported_git_repos') snippet_utils.invalidate_snippet_caches(self.app_dir) return '/panhandler/repos'
def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) # snippets_dir = Path(os.path.join(settings.SRC_PATH, 'panhandler', 'snippets')) snippets_dir = Path(os.path.join(os.path.expanduser('~/.pan_cnc'), 'panhandler', 'repositories')) try: if not snippets_dir.exists(): messages.add_message(self.request, messages.ERROR, 'Could not load repositories from directory as it does not exists') context['repos'] = list() return context except PermissionError as pe: print(pe) context['repos'] = list() return context except OSError as oe: print(oe) context['repos'] = list() return context repos = cnc_utils.get_long_term_cached_value(self.app_dir, 'imported_repositories') if repos is not None: print(f'Returning cached repos') context['repos'] = repos else: repos = list() for d in snippets_dir.iterdir(): # git_dir = os.path.join(d, '.git') git_dir = d.joinpath('.git') if git_dir.exists() and git_dir.is_dir(): repo_detail = git_utils.get_repo_details(d.name, d, self.app_dir) repos.append(repo_detail) continue # cache the repos list for 1 week. this will be cleared when we import a new repository or otherwise # change the repo list somehow cnc_utils.set_long_term_cached_value(self.app_dir, 'imported_repositories', repos, 604800, 'imported_git_repos') context['repos'] = repos return context
def update_skillet_cache() -> None: """ Updates the 'all_snippets' key in the cnc cache. This gets called whenever a repository is initialized or updated to ensure the legacy cache is always kept up to date :return: None """ all_skillets = load_all_skillets(refresh=True) # FIXME - this can and will break if every more than one app tries to do this... app_name = get_default_app_name() # ensure everything gets removed! cnc_utils.clear_long_term_cache(app_name) cnc_utils.set_long_term_cached_value(app_name, 'all_snippets', all_skillets, -1) # db_utils.load_add_skillets saves all_skillets under 'cnc' app name, ensure this is updated here as well... cnc_utils.set_long_term_cached_value('cnc', 'all_snippets', all_skillets, -1)
def load_all_skillets(refresh=False) -> list: """ Returns a list of skillet dictionaries :param refresh: Boolean flag whether to use the cache or force a cache refresh :return: skillet dictionaries """ if refresh is False: cached_skillets = cnc_utils.get_long_term_cached_value('cnc', 'all_snippets') if cached_skillets is not None: return cached_skillets skillet_dicts = list() skillets = Skillet.objects.all() for skillet in skillets: skillet_dicts.append(json.loads(skillet.skillet_json)) cnc_utils.set_long_term_cached_value('cnc', 'all_snippets', skillet_dicts, -1) return skillet_dicts
def get_repo_upstream_details(repo_name: str, repo_url: str, app_name: str) -> dict: """ Attempt to get the details from a git repository. Details are found via specific APIs for each type of git repo. Currently only Github is supported. :param repo_name: :param repo_url: :param app_name: cnc application name :return: """ details = dict() if cnc_utils.is_testing(): return details cache_repo_name = repo_name.replace(' ', '_') cached_details = cnc_utils.get_long_term_cached_value( app_name, f'git_utils_upstream_{cache_repo_name}') # fix for issue #70, details will be None after cache miss, use a new var name to keep details as a dict if cached_details is not None: return cached_details api_throttle_active = cnc_utils.get_long_term_cached_value( app_name, 'git_utils_api_throttle') if api_throttle_active: print('Skipping get_repo_upstream_details due to availability') return details print('Not found in cache, loading from upstream') url_details = parse_repo_origin_url(repo_url) owner = url_details.get('owner', '') repo = url_details.get('repo', '') try: api_url = f'https://api.github.com/repos/{owner}/{repo}' # fix for issue #70, increase timeout to 30 seconds detail_response = requests.get(api_url, verify=False, timeout=30) if detail_response.status_code != 200: print( f'response was {detail_response.status_code}, disabling upstream api queries' ) cnc_utils.set_long_term_cached_value(app_name, 'git_utils_api_throttle', True, 3601, 'git_repo_details') return details details = detail_response.json() # fix for issue #70, cache this value for 3 days instead of 1 cnc_utils.set_long_term_cached_value( app_name, f'git_utils_upstream_{cache_repo_name}', details, 259200, 'git_repo_details') except ConnectionResetError as cre: print('Could not get github details due to ConnectionResetError') print(cre) api_throttle_active = True except RequestException as ce: print('Could not get github details due to RequestException') print(ce) api_throttle_active = True except Exception as e: print(type(e)) print(e) api_throttle_active = True if api_throttle_active: print('Disabling upstream api queries for 1 hour') cnc_utils.set_long_term_cached_value(app_name, 'git_utils_api_throttle', True, 3601, 'git_repo_details') return details
def get_repo_details(repo_name, repo_dir, app_name='cnc'): """ Fetch the details for a given repo name and directory :param repo_name: :param repo_dir: :param app_name: name of the CNC application :return: """ repo_detail = cnc_utils.get_long_term_cached_value(app_name, f'{repo_name}_detail') if repo_detail: return repo_detail repo_detail = dict() repo_detail['name'] = repo_name try: repo = Repo(repo_dir) except NoSuchPathError as nspe: print(f'Repository directory {repo_dir} does not actually exist!') print(nspe) repo_detail['error'] = 'Repository directory could not be found!' return repo_detail except GitError as ge: print(ge) repo_detail['error'] = 'Git Repository Error!' return repo_detail # Fix for PH #172 if not hasattr(repo.remotes, 'origin'): repo_detail['error'] = 'Git Repository Error! No origin set!' return repo_detail url = str(repo.remotes.origin.url) url_details = parse_repo_origin_url(url) is_github = False if 'github' in url: link = f"https://github.com/{url_details['owner']}/{url_details['repo']}" commits_url = f"https://github.com/{url_details['owner']}/{url_details['repo']}/commit/" is_github = True elif 'spring.palo' in url: link = f"https://spring.paloaltonetworks.com/{url_details['owner']}/{url_details['repo']}" commits_url = f"https://spring.paloaltonetworks.com/{url_details['owner']}/{url_details['repo']}/commit/" elif 'gitlab' in url: link = f"https://gitlab.com/{url_details['owner']}/{url_details['repo']}" commits_url = f"https://gitlab.com/{url_details['owner']}/{url_details['repo']}/-/commit/" else: link = '' commits_url = '' if 'repo' not in url_details or url_details['repo'] is None or url_details[ 'repo'] == '': url_details['repo'] = repo_name branch = 'master' commit_log = list() last_updated = 0 last_updated_str = '' repo_detail['label'] = url_details['repo'] repo_detail['link'] = link repo_detail['dir'] = repo_name repo_detail['url'] = url repo_detail['branch'] = branch repo_detail['commits_url'] = commits_url repo_detail['is_github'] = is_github try: branch = repo.active_branch.name commits = repo.iter_commits(branch, max_count=5) # fix for #182 - do not lose track of current branch repo_detail['branch'] = branch for c in commits: commit_detail = dict() timestamp = datetime.datetime.fromtimestamp(c.committed_date) commit_detail['time'] = timestamp.strftime('%Y-%m-%d %H:%M') commit_detail['author'] = c.author.name commit_detail['message'] = c.message commit_detail['id'] = str(c) commit_log.append(commit_detail) if c.committed_date > last_updated: last_updated = c.committed_date last_updated_str = commit_detail['time'] except GitCommandError as gce: print('Could not get commits from repo') print(gce) # partial fix for PH #171 - bail out when issues getting git details here to avoid hang repo_detail['error'] = 'Could not fetch commit history for repo!' return repo_detail except GitError as ge: print('Unknown GitError') print(ge) repo_detail['error'] = 'Unknown Git error getting history for repo!' return repo_detail branches = __get_repo_branches(repo) repo_detail['branches'] = branches repo_detail['commits'] = commit_log repo_detail['last_updated'] = last_updated_str repo_detail['last_updated_time'] = last_updated upstream_details = dict() if 'github' in url.lower(): upstream_details = get_repo_upstream_details(repo_name, url, app_name) if 'description' in upstream_details: if upstream_details['description'] is None or upstream_details[ 'description'] == 'None': repo_detail['description'] = f"{url} {branch}" else: repo_detail['description'] = upstream_details['description'] else: repo_detail['description'] = branch cnc_utils.set_long_term_cached_value(app_name, f'{repo_name}_detail', repo_detail, 604800, 'git_repo_details') return repo_detail
def get_recommended_links() -> list: app_name = 'panhandler' app_config = cnc_utils.get_app_config('panhandler') recommended_links = list() # do not make external calls if we are testing if cnc_utils.is_testing(): print('Returning blank recommended links due to testing env') return recommended_links if 'application_data' not in app_config: print('Could not find application_data in .pan-cnc.yaml') return recommended_links if type(app_config['application_data']) is not dict: print('malformed application_data in .pan-cnc.yaml') return recommended_links if 'recommended_repos_link' not in app_config['application_data']: print( 'Could not find value recommended_repos_link key in application_data' ) return recommended_links recommend_url = app_config['application_data']['recommended_repos_link'] if not str(recommend_url).startswith('http'): print('recommended_repos_link does not appear to be a valid link') return recommended_links try: # try to pull from cache is possible recommends_from_cache = cnc_utils.get_long_term_cached_value( app_name, 'recommended_links') if recommends_from_cache is not None: print('Returning recommended_links from the cache') return recommends_from_cache resp = requests.get(recommend_url, verify=False, timeout=5) if resp.status_code != 200: print('Could not fetch recommended_repos_link') print(resp.text) print(resp.status_code) return recommended_links data_object = oyaml.safe_load(resp.text) if _validate_recommended_data(data_object): # save for later cnc_utils.set_long_term_cached_value(app_name, 'recommended_links', data_object['links'], 7200, 'recommended_links') return data_object['links'] else: # FIXME - return a default list here return recommended_links except ValueError as ve: print('Could not load response') print(ve) return recommended_links except ConnectionError as ce: print('Could not fetch recommended links url') print(ce) return recommended_links except Timeout as te: print('Timed out waiting for recommended links to load') print(te) return recommended_links
def form_valid(self, form): workflow = self.get_workflow() # get the values from the user submitted form here url = workflow.get('url') repo_name = workflow.get('repo_name') if not re.match(r'^[a-zA-Z0-9-_ \.]*$', repo_name): print('Repository name is invalid!') messages.add_message(self.request, messages.ERROR, 'Invalid Repository Name') return HttpResponseRedirect('repos') user_dir = os.path.expanduser('~/.pan_cnc') snippets_dir = os.path.join(user_dir, 'panhandler/repositories') repo_dir = os.path.join(snippets_dir, repo_name) if os.path.exists(repo_dir): if os.path.isdir(repo_dir) and len(os.listdir(repo_dir)) == 0: print('Reusing existing repository directory') else: messages.add_message(self.request, messages.ERROR, 'A Repository with this name already exists') return HttpResponseRedirect('repos') else: try: os.makedirs(repo_dir, mode=0o700) except PermissionError: messages.add_message(self.request, messages.ERROR, 'Could not create repository directory, Permission Denied') return HttpResponseRedirect('repos') except OSError: messages.add_message(self.request, messages.ERROR, 'Could not create repository directory') return HttpResponseRedirect('repos') # where to clone from clone_url = url.strip() if 'github' in url.lower(): details = git_utils.get_repo_upstream_details(repo_name, url, self.app_dir) if 'clone_url' in details: clone_url = details['clone_url'] try: message = git_utils.clone_repository(repo_dir, repo_name, clone_url) print(message) except ImportRepositoryException as ire: messages.add_message(self.request, messages.ERROR, f'Could not Import Repository: {ire}') else: print('Invalidating snippet cache') snippet_utils.invalidate_snippet_caches(self.app_dir) # no need to evict all these items, just grab the new repo details and append it to list and re-cache # cnc_utils.evict_cache_items_of_type(self.app_dir, 'imported_git_repos') repos = cnc_utils.get_long_term_cached_value(self.app_dir, 'imported_repositories') # FIX for #148 if repos is None: repos = list() repo_detail = git_utils.get_repo_details(repo_name, repo_dir, self.app_dir) repos.append(repo_detail) cnc_utils.set_long_term_cached_value(self.app_dir, 'imported_repositories', repos, 604800, 'imported_git_repos') debug_errors = snippet_utils.debug_snippets_in_repo(Path(repo_dir), list()) # check each snippet found for dependencies loaded_skillets = snippet_utils.load_snippets_of_type_from_dir(self.app_dir, repo_dir) for skillet in loaded_skillets: for depends in skillet['depends']: url = depends.get('url', None) branch = depends.get('branch', 'master') # now check each repo to see if we already have it, add an error if not found = False for repo in repos: if repo['url'] == url and repo['branch'] == branch: found = True break if not found: messages.add_message(self.request, messages.ERROR, f'Unresolved Dependency found!! Please ensure you import the following' f'repository: {url} with branch: {branch}') if debug_errors: messages.add_message(self.request, messages.ERROR, 'Found Skillets with errors! Please open an issue on ' 'this repository to help resolve this issue') for d in debug_errors: if 'err_list' in d and 'path' in d and 'severity' in d: for e in d['err_list']: if d['severity'] == 'warn': level = messages.WARNING else: level = messages.ERROR messages.add_message(self.request, level, f'Skillet: {d["path"]}\n\nError: {e}') else: messages.add_message(self.request, messages.INFO, 'Imported Repository Successfully') # return render(self.request, 'pan_cnc/results.html', context) return HttpResponseRedirect('repos')
def get_repo_details(repo_name, repo_dir, app_name='cnc'): """ Fetch the details for a given repo name and directory :param repo_name: :param repo_dir: :param app_name: name of the CNC application :return: """ repo_detail = cnc_utils.get_long_term_cached_value(app_name, f'{repo_name}_detail') if repo_detail: return repo_detail try: repo = Repo(repo_dir) except NoSuchPathError as nspe: print(f'Repository directory {repo_dir} does not actually exist!') print(nspe) repo_detail = dict() repo_detail['name'] = 'Repository directory could not be found!' return repo_detail except GitError as ge: print(ge) repo_detail = dict() repo_detail['name'] = 'Git Repository Error!' return repo_detail url = str(repo.remotes.origin.url) url_details = parse_repo_origin_url(url) if 'github' in url: link = f"https://github.com/{url_details['owner']}/{url_details['repo']}" else: link = url if 'repo' not in url_details or url_details['repo'] is None or url_details[ 'repo'] == '': url_details['repo'] = repo_name try: branch = repo.active_branch.name commits = repo.iter_commits(branch, max_count=5) commit_log = list() for c in commits: commit_detail = dict() commit_detail['time'] = str(c.committed_datetime) commit_detail['author'] = c.author.name commit_detail['message'] = c.message commit_detail['id'] = str(c) commit_log.append(commit_detail) except GitCommandError as gce: print('Could not get commits from repo') print(gce) except GitError as ge: print('Unknown GitError') print(ge) repo_detail = dict() repo_detail['name'] = repo_name repo_detail['label'] = url_details['repo'] repo_detail['link'] = link repo_detail['dir'] = repo_name repo_detail['url'] = url repo_detail['branch'] = branch repo_detail['commits'] = commit_log repo_detail['commits_url'] = get_repo_commits_url(url) upstream_details = get_repo_upstream_details(repo_name, url, app_name) if 'description' in upstream_details: if upstream_details['description'] is None or upstream_details[ 'description'] == 'None': repo_detail['description'] = f"{url} {branch}" else: repo_detail['description'] = upstream_details['description'] else: repo_detail['description'] = branch cnc_utils.set_long_term_cached_value(app_name, f'{repo_name}_detail', repo_detail, 604800, 'git_repo_details') return repo_detail