def urlify_issues(newtext, repo_name): """Urlify issue references according to .ini configuration""" global _urlify_issues_f if _urlify_issues_f is None: from kallithea import CONFIG from kallithea.model.db import URL_SEP assert CONFIG['sqlalchemy.url'] # make sure config has been loaded # Build chain of urlify functions, starting with not doing any transformation tmp_urlify_issues_f = lambda s: s issue_pat_re = re.compile(r'issue_pat(.*)') for k in CONFIG.keys(): # Find all issue_pat* settings that also have corresponding server_link and prefix configuration m = issue_pat_re.match(k) if m is None: continue suffix = m.group(1) issue_pat = CONFIG.get(k) issue_server_link = CONFIG.get('issue_server_link%s' % suffix) issue_prefix = CONFIG.get('issue_prefix%s' % suffix) if issue_pat and issue_server_link and issue_prefix: log.debug('issue pattern %r: %r -> %r %r', suffix, issue_pat, issue_server_link, issue_prefix) else: log.error('skipping incomplete issue pattern %r: %r -> %r %r', suffix, issue_pat, issue_server_link, issue_prefix) continue # Wrap tmp_urlify_issues_f with substitution of this pattern, while making sure all loop variables (and compiled regexpes) are bound issue_re = re.compile(issue_pat) def issues_replace(match_obj, issue_server_link=issue_server_link, issue_prefix=issue_prefix): leadingspace = ' ' if match_obj.group().startswith(' ') else '' issue_id = ''.join(match_obj.groups()) issue_url = issue_server_link.replace('{id}', issue_id) issue_url = issue_url.replace('{repo}', repo_name) issue_url = issue_url.replace('{repo_name}', repo_name.split(URL_SEP)[-1]) return ( '%(leadingspace)s<a class="issue-tracker-link" href="%(url)s">' '%(issue-prefix)s%(id-repr)s' '</a>') % { 'leadingspace': leadingspace, 'url': issue_url, 'id-repr': issue_id, 'issue-prefix': issue_prefix, 'serv': issue_server_link, } tmp_urlify_issues_f = (lambda s, issue_re=issue_re, issues_replace= issues_replace, chain_f=tmp_urlify_issues_f: issue_re.sub(issues_replace, chain_f(s))) # Set tmp function globally - atomically _urlify_issues_f = tmp_urlify_issues_f return _urlify_issues_f(newtext)
def urlify_issues(newtext, repo_name): """Urlify issue references according to .ini configuration""" global _urlify_issues_f if _urlify_issues_f is None: from kallithea import CONFIG from kallithea.model.db import URL_SEP assert CONFIG['sqlalchemy.url'] # make sure config has been loaded # Build chain of urlify functions, starting with not doing any transformation tmp_urlify_issues_f = lambda s: s issue_pat_re = re.compile(r'issue_pat(.*)') for k in CONFIG.keys(): # Find all issue_pat* settings that also have corresponding server_link and prefix configuration m = issue_pat_re.match(k) if m is None: continue suffix = m.group(1) issue_pat = CONFIG.get(k) issue_server_link = CONFIG.get('issue_server_link%s' % suffix) issue_prefix = CONFIG.get('issue_prefix%s' % suffix) if issue_pat and issue_server_link and issue_prefix: log.debug('issue pattern %r: %r -> %r %r', suffix, issue_pat, issue_server_link, issue_prefix) else: log.error('skipping incomplete issue pattern %r: %r -> %r %r', suffix, issue_pat, issue_server_link, issue_prefix) continue # Wrap tmp_urlify_issues_f with substitution of this pattern, while making sure all loop variables (and compiled regexpes) are bound issue_re = re.compile(issue_pat) def issues_replace(match_obj, issue_server_link=issue_server_link, issue_prefix=issue_prefix): leadingspace = ' ' if match_obj.group().startswith(' ') else '' issue_id = ''.join(match_obj.groups()) issue_url = issue_server_link.replace('{id}', issue_id) issue_url = issue_url.replace('{repo}', repo_name) issue_url = issue_url.replace('{repo_name}', repo_name.split(URL_SEP)[-1]) return ( '%(leadingspace)s<a class="issue-tracker-link" href="%(url)s">' '%(issue-prefix)s%(id-repr)s' '</a>' ) % { 'leadingspace': leadingspace, 'url': issue_url, 'id-repr': issue_id, 'issue-prefix': issue_prefix, 'serv': issue_server_link, } tmp_urlify_issues_f = (lambda s, issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f: issue_re.sub(issues_replace, chain_f(s))) # Set tmp function globally - atomically _urlify_issues_f = tmp_urlify_issues_f return _urlify_issues_f(newtext)
def show_id(cs): """ Configurable function that shows ID by default it's r123:fffeeefffeee :param cs: changeset instance """ from kallithea import CONFIG def_len = safe_int(CONFIG.get('show_sha_length', 12)) show_rev = str2bool(CONFIG.get('show_revision_number', False)) raw_id = cs.raw_id[:def_len] if show_rev: return 'r%s:%s' % (cs.revision, raw_id) else: return raw_id
def __get_desc(self, cs): desc_msg = [(_('%s committed on %s') % (h.person(cs.author), h.fmt_date(cs.date))) + '<br/>'] #branches, tags, bookmarks if cs.branch: desc_msg.append('branch: %s<br/>' % cs.branch) if h.is_hg(c.db_repo_scm_instance): for book in cs.bookmarks: desc_msg.append('bookmark: %s<br/>' % book) for tag in cs.tags: desc_msg.append('tag: %s<br/>' % tag) diff_processor, changes = self.__changes(cs) # rev link _url = h.canonical_url('changeset_home', repo_name=c.db_repo.repo_name, revision=cs.raw_id) desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8])) desc_msg.append('<pre>') desc_msg.append(h.urlify_text(cs.message)) desc_msg.append('\n') desc_msg.extend(changes) if str2bool(CONFIG.get('rss_include_diff', False)): desc_msg.append('\n\n') desc_msg.append(diff_processor.as_raw()) desc_msg.append('</pre>') return map(safe_unicode, desc_msg)
def __init__(self, reponame, username, *args, **kwargs): from kallithea import CONFIG from kallithea.lib.utils2 import safe_int _code = CONFIG.get('lock_ret_code') self.code = safe_int(_code, self.code) self.title = self.explanation = ('Repository `%s` locked by ' 'user `%s`' % (reponame, username)) super(HTTPLockedRC, self).__init__(*args, **kwargs)
def canonical_hostname(): '''Return canonical hostname of system''' from kallithea import CONFIG try: parts = CONFIG.get('canonical_url', '').split('://', 1) return parts[1].split('/', 1)[0] except IndexError: parts = url('home', qualified=True).split('://', 1) return parts[1].split('/', 1)[0]
def canonical_url(*args, **kargs): '''Like url(x, qualified=True), but returns url that not only is qualified but also canonical, as configured in canonical_url''' from kallithea import CONFIG try: parts = CONFIG.get('canonical_url', '').split('://', 1) kargs['host'] = parts[1].split('/', 1)[0] kargs['protocol'] = parts[0] except IndexError: kargs['qualified'] = True return url(*args, **kargs)
def canonical_url(*args, **kargs): '''Like url(x, qualified=True), but returns url that not only is qualified but also canonical, as configured in canonical_url''' from kallithea import CONFIG try: parts = CONFIG.get('canonical_url', '').split('://', 1) kargs['host'] = parts[1] kargs['protocol'] = parts[0] except IndexError: kargs['qualified'] = True return url(*args, **kargs)
def __changes(self, cs): changes = [] rss_cut_off_limit = safe_int(CONFIG.get('rss_cut_off_limit', 32 * 1024)) diff_processor = DiffProcessor(cs.diff(), diff_limit=rss_cut_off_limit) _parsed = diff_processor.prepare(inline_diff=False) limited_diff = False if isinstance(_parsed, LimitedDiffContainer): limited_diff = True for st in _parsed: st.update({'added': st['stats']['added'], 'removed': st['stats']['deleted']}) changes.append('\n %(operation)s %(filename)s ' '(%(added)s lines added, %(removed)s lines removed)' % st) if limited_diff: changes = changes + ['\n ' + _('Changeset was too big and was cut off...')] return diff_processor, changes
def allowed_api_access(controller_name, whitelist=None, api_key=None): """ Check if given controller_name is in whitelist API access """ if not whitelist: from kallithea import CONFIG whitelist = aslist(CONFIG.get('api_access_controllers_whitelist'), sep=',') log.debug('whitelist of API access is: %s' % (whitelist)) api_access_valid = controller_name in whitelist if api_access_valid: log.debug('controller:%s is in API whitelist' % (controller_name)) else: msg = 'controller: %s is *NOT* in API whitelist' % (controller_name) if api_key: #if we use API key and don't have access it's a warning log.warning(msg) else: log.debug(msg) return api_access_valid
def allowed_api_access(controller_name, whitelist=None, api_key=None): """ Check if given controller_name is in whitelist API access """ if not whitelist: from kallithea import CONFIG whitelist = aslist(CONFIG.get('api_access_controllers_whitelist'), sep=',') log.debug('whitelist of API access is: %s', whitelist) api_access_valid = controller_name in whitelist if api_access_valid: log.debug('controller:%s is in API whitelist', controller_name) else: msg = 'controller: %s is *NOT* in API whitelist' % (controller_name) if api_key: #if we use API key and don't have access it's a warning log.warning(msg) else: log.debug(msg) return api_access_valid
def _get_feed_from_cache(key, kind): feed = Rss201rev2Feed( title=_('%s %s feed') % (c.site_name, repo_name), link=h.canonical_url('summary_home', repo_name=repo_name), description=_('Changes on %s repository') % repo_name, language=language, ttl=ttl ) rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20)) for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])): feed.add_item(title=self._get_title(cs), link=h.canonical_url('changeset_home', repo_name=repo_name, revision=cs.raw_id), author_name=cs.author, description=''.join(self.__get_desc(cs)), pubdate=cs.date, ) response.content_type = feed.mime_type return feed.writeString('utf-8')
def _get_feed_from_cache(key, kind): feed = Rss201rev2Feed( title=_('%s %s feed') % (c.site_name, repo_name), link=h.canonical_url('summary_home', repo_name=repo_name), description=_('Changes on %s repository') % repo_name, language=language, ttl=ttl) rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20)) for cs in reversed( list(c.db_repo_scm_instance[-rss_items_per_page:])): feed.add_item( title=self._get_title(cs), link=h.canonical_url('changeset_home', repo_name=repo_name, revision=cs.raw_id), author_name=cs.author, description=''.join(self.__get_desc(cs)), pubdate=cs.date, ) response.content_type = feed.mime_type return feed.writeString('utf-8')
def __changes(self, cs): changes = [] rss_cut_off_limit = safe_int(CONFIG.get('rss_cut_off_limit', 32 * 1024)) diff_processor = DiffProcessor(cs.diff(), diff_limit=rss_cut_off_limit) _parsed = diff_processor.prepare(inline_diff=False) limited_diff = False if isinstance(_parsed, LimitedDiffContainer): limited_diff = True for st in _parsed: st.update({ 'added': st['stats']['added'], 'removed': st['stats']['deleted'] }) changes.append( '\n %(operation)s %(filename)s ' '(%(added)s lines added, %(removed)s lines removed)' % st) if limited_diff: changes = changes + [ '\n ' + _('Changeset was too big and was cut off...') ] return diff_processor, changes
def archivefile(self, repo_name, fname): fileformat = None revision = None ext = None subrepos = request.GET.get('subrepos') == 'true' for a_type, ext_data in settings.ARCHIVE_SPECS.items(): archive_spec = fname.split(ext_data[1]) if len(archive_spec) == 2 and archive_spec[1] == '': fileformat = a_type or ext_data[1] revision = archive_spec[0] ext = ext_data[1] try: dbrepo = RepoModel().get_by_repo_name(repo_name) if not dbrepo.enable_downloads: return _('Downloads disabled') if c.db_repo_scm_instance.alias == 'hg': # patch and reset hooks section of UI config to not run any # hooks on fetching archives with subrepos for k, v in c.db_repo_scm_instance._repo.ui.configitems('hooks'): c.db_repo_scm_instance._repo.ui.setconfig('hooks', k, None) cs = c.db_repo_scm_instance.get_changeset(revision) content_type = settings.ARCHIVE_SPECS[fileformat][0] except ChangesetDoesNotExistError: return _('Unknown revision %s') % revision except EmptyRepositoryError: return _('Empty repository') except (ImproperArchiveTypeError, KeyError): return _('Unknown archive type') # archive cache from kallithea import CONFIG rev_name = cs.raw_id[:12] archive_name = '%s-%s%s' % (safe_str(repo_name.replace('/', '_')), safe_str(rev_name), ext) use_cached_archive = False # defines if we use cached version of archive archive_cache_enabled = CONFIG.get('archive_cache_dir') if not subrepos and archive_cache_enabled: #check if we it's ok to write if not os.path.isdir(CONFIG['archive_cache_dir']): os.makedirs(CONFIG['archive_cache_dir']) cached_archive_path = os.path.join(CONFIG['archive_cache_dir'], archive_name) if os.path.isfile(cached_archive_path): log.debug('Found cached archive in %s' % cached_archive_path) fd, archive = None, cached_archive_path use_cached_archive = True else: log.debug('Archive %s is not yet cached' % (archive_name)) if not use_cached_archive: # generate new archive fd, archive = tempfile.mkstemp() temp_stream = open(archive, 'wb') log.debug('Creating new temp archive in %s' % archive) cs.fill_archive(stream=temp_stream, kind=fileformat, subrepos=subrepos) temp_stream.close() if not subrepos and archive_cache_enabled: #if we generated the archive and use cache rename that log.debug('Storing new archive in %s' % cached_archive_path) shutil.move(archive, cached_archive_path) archive = cached_archive_path def get_chunked_archive(archive): stream = open(archive, 'rb') while True: data = stream.read(16 * 1024) if not data: stream.close() if fd: # fd means we used temporary file os.close(fd) if not archive_cache_enabled: log.debug('Destroying temp archive %s' % archive) os.remove(archive) break yield data # store download action action_logger(user=c.authuser, action='user_downloaded_archive:%s' % (archive_name), repo=repo_name, ipaddr=self.ip_addr, commit=True) response.content_disposition = str('attachment; filename=%s' % (archive_name)) response.content_type = str(content_type) return get_chunked_archive(archive)
def urlify_issues(newtext, repo_name): """Urlify issue references according to .ini configuration""" global _urlify_issues_f if _urlify_issues_f is None: from kallithea import CONFIG from kallithea.model.db import URL_SEP assert CONFIG['sqlalchemy.url'] # make sure config has been loaded # Build chain of urlify functions, starting with not doing any transformation def tmp_urlify_issues_f(s): return s issue_pat_re = re.compile(r'issue_pat(.*)') for k in CONFIG: # Find all issue_pat* settings that also have corresponding server_link and prefix configuration m = issue_pat_re.match(k) if m is None: continue suffix = m.group(1) issue_pat = CONFIG.get(k) issue_server_link = CONFIG.get('issue_server_link%s' % suffix) issue_sub = CONFIG.get('issue_sub%s' % suffix) issue_prefix = CONFIG.get('issue_prefix%s' % suffix) if issue_prefix: log.error('found unsupported issue_prefix%s = %r - use issue_sub%s instead', suffix, issue_prefix, suffix) if not issue_pat: log.error('skipping incomplete issue pattern %r: it needs a regexp', k) continue if not issue_server_link: log.error('skipping incomplete issue pattern %r: it needs issue_server_link%s', k, suffix) continue if issue_sub is None: # issue_sub can be empty but should be present log.error('skipping incomplete issue pattern %r: it needs (a potentially empty) issue_sub%s', k, suffix) continue # Wrap tmp_urlify_issues_f with substitution of this pattern, while making sure all loop variables (and compiled regexpes) are bound try: issue_re = re.compile(issue_pat) except re.error as e: log.error('skipping invalid issue pattern %r: %r -> %r %r. Error: %s', k, issue_pat, issue_server_link, issue_sub, str(e)) continue log.debug('issue pattern %r: %r -> %r %r', k, issue_pat, issue_server_link, issue_sub) def issues_replace(match_obj, issue_server_link=issue_server_link, issue_sub=issue_sub): try: issue_url = match_obj.expand(issue_server_link) except (IndexError, re.error) as e: log.error('invalid issue_url setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e)) issue_url = issue_server_link issue_url = issue_url.replace('{repo}', repo_name) issue_url = issue_url.replace('{repo_name}', repo_name.split(URL_SEP)[-1]) # if issue_sub is empty use the matched issue reference verbatim if not issue_sub: issue_text = match_obj.group() else: try: issue_text = match_obj.expand(issue_sub) except (IndexError, re.error) as e: log.error('invalid issue_sub setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e)) issue_text = match_obj.group() return ( '<a class="issue-tracker-link" href="%(url)s">' '%(text)s' '</a>' ) % { 'url': issue_url, 'text': issue_text, } def tmp_urlify_issues_f(s, issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f): return issue_re.sub(issues_replace, chain_f(s)) # Set tmp function globally - atomically _urlify_issues_f = tmp_urlify_issues_f return _urlify_issues_f(newtext)
def _handle_request(self, environ, start_response): if not is_git(environ): return self.application(environ, start_response) if not self._check_ssl(environ): return HTTPNotAcceptable('SSL REQUIRED !')(environ, start_response) ip_addr = self._get_ip_addr(environ) username = None self._git_first_op = False # skip passing error to error controller environ['pylons.status_code_redirect'] = True #====================================================================== # EXTRACT REPOSITORY NAME FROM ENV #====================================================================== try: str_repo_name = self.__get_repository(environ) repo_name = safe_unicode(str_repo_name) log.debug('Extracted repo name is %s', repo_name) except Exception as e: log.error('error extracting repo_name: %r', e) return HTTPInternalServerError()(environ, start_response) # quick check if that dir exists... if not is_valid_repo(repo_name, self.basepath, 'git'): return HTTPNotFound()(environ, start_response) #====================================================================== # GET ACTION PULL or PUSH #====================================================================== action = self.__get_action(environ) #====================================================================== # CHECK ANONYMOUS PERMISSION #====================================================================== if action in ['pull', 'push']: anonymous_user = User.get_default_user(cache=True) username = anonymous_user.username if anonymous_user.active: # ONLY check permissions if the user is activated anonymous_perm = self._check_permission(action, anonymous_user, repo_name, ip_addr) else: anonymous_perm = False if not anonymous_user.active or not anonymous_perm: if not anonymous_user.active: log.debug('Anonymous access is disabled, running ' 'authentication') if not anonymous_perm: log.debug('Not enough credentials to access this ' 'repository as anonymous user') username = None #============================================================== # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS #============================================================== # try to auth based on environ, container auth methods log.debug('Running PRE-AUTH for container based authentication') pre_auth = auth_modules.authenticate('', '', environ) if pre_auth is not None and pre_auth.get('username'): username = pre_auth['username'] log.debug('PRE-AUTH got %s as username', username) # If not authenticated by the container, running basic auth if not username: self.authenticate.realm = \ safe_str(self.config['realm']) result = self.authenticate(environ) if isinstance(result, str): AUTH_TYPE.update(environ, 'basic') REMOTE_USER.update(environ, result) username = result else: return result.wsgi_application(environ, start_response) #============================================================== # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME #============================================================== try: user = User.get_by_username_or_email(username) if user is None or not user.active: return HTTPForbidden()(environ, start_response) username = user.username except Exception: log.error(traceback.format_exc()) return HTTPInternalServerError()(environ, start_response) #check permissions for this repository perm = self._check_permission(action, user, repo_name, ip_addr) if not perm: return HTTPForbidden()(environ, start_response) # extras are injected into UI object and later available # in hooks executed by kallithea from kallithea import CONFIG server_url = get_server_url(environ) extras = { 'ip': ip_addr, 'username': username, 'action': action, 'repository': repo_name, 'scm': 'git', 'config': CONFIG['__file__'], 'server_url': server_url, 'make_lock': None, 'locked_by': [None, None] } #=================================================================== # GIT REQUEST HANDLING #=================================================================== repo_path = os.path.join(safe_str(self.basepath),str_repo_name) log.debug('Repository path is %s', repo_path) # CHECK LOCKING only if it's not ANONYMOUS USER if username != User.DEFAULT_USER: log.debug('Checking locking on repository') (make_lock, locked, locked_by) = self._check_locking_state( environ=environ, action=action, repo=repo_name, user_id=user.user_id ) # store the make_lock for later evaluation in hooks extras.update({'make_lock': make_lock, 'locked_by': locked_by}) fix_PATH() log.debug('HOOKS extras is %s', extras) baseui = make_ui('db') self.__inject_extras(repo_path, baseui, extras) try: self._handle_githooks(repo_name, action, baseui, environ) log.info('%s action on Git repo "%s" by "%s" from %s', action, str_repo_name, safe_str(username), ip_addr) app = self.__make_app(repo_name, repo_path, extras) result = app(environ, start_response) if action == 'push': result = WSGIResultCloseCallback(result, lambda: self._invalidate_cache(repo_name)) return result except HTTPLockedRC as e: _code = CONFIG.get('lock_ret_code') log.debug('Repository LOCKED ret code %s!', _code) return e(environ, start_response) except Exception: log.error(traceback.format_exc()) return HTTPInternalServerError()(environ, start_response)
def archivefile(self, repo_name, fname): fileformat = None revision = None ext = None subrepos = request.GET.get('subrepos') == 'true' for a_type, ext_data in settings.ARCHIVE_SPECS.items(): archive_spec = fname.split(ext_data[1]) if len(archive_spec) == 2 and archive_spec[1] == '': fileformat = a_type or ext_data[1] revision = archive_spec[0] ext = ext_data[1] try: dbrepo = RepoModel().get_by_repo_name(repo_name) if not dbrepo.enable_downloads: return _('Downloads disabled') # TODO: do something else? if c.db_repo_scm_instance.alias == 'hg': # patch and reset hooks section of UI config to not run any # hooks on fetching archives with subrepos for k, v in c.db_repo_scm_instance._repo.ui.configitems( 'hooks'): c.db_repo_scm_instance._repo.ui.setconfig('hooks', k, None) cs = c.db_repo_scm_instance.get_changeset(revision) content_type = settings.ARCHIVE_SPECS[fileformat][0] except ChangesetDoesNotExistError: return _('Unknown revision %s') % revision except EmptyRepositoryError: return _('Empty repository') except (ImproperArchiveTypeError, KeyError): return _('Unknown archive type') from kallithea import CONFIG rev_name = cs.raw_id[:12] archive_name = '%s-%s%s' % (repo_name.replace('/', '_'), rev_name, ext) archive_path = None cached_archive_path = None archive_cache_dir = CONFIG.get('archive_cache_dir') if archive_cache_dir and not subrepos: # TODO: subrepo caching? if not os.path.isdir(archive_cache_dir): os.makedirs(archive_cache_dir) cached_archive_path = os.path.join(archive_cache_dir, archive_name) if os.path.isfile(cached_archive_path): log.debug('Found cached archive in %s', cached_archive_path) archive_path = cached_archive_path else: log.debug('Archive %s is not yet cached', archive_name) if archive_path is None: # generate new archive fd, archive_path = tempfile.mkstemp() log.debug('Creating new temp archive in %s', archive_path) with os.fdopen(fd, 'wb') as stream: cs.fill_archive(stream=stream, kind=fileformat, subrepos=subrepos) # stream (and thus fd) has been closed by cs.fill_archive if cached_archive_path is not None: # we generated the archive - move it to cache log.debug('Storing new archive in %s', cached_archive_path) shutil.move(archive_path, cached_archive_path) archive_path = cached_archive_path def get_chunked_archive(archive_path): stream = open(archive_path, 'rb') while True: data = stream.read(16 * 1024) if not data: break yield data stream.close() if archive_path != cached_archive_path: log.debug('Destroying temp archive %s', archive_path) os.remove(archive_path) action_logger(user=request.authuser, action='user_downloaded_archive:%s' % (archive_name), repo=repo_name, ipaddr=request.ip_addr, commit=True) response.content_disposition = str('attachment; filename=%s' % (archive_name)) response.content_type = str(content_type) return get_chunked_archive(archive_path)
def _handle_request(self, environ, start_response): if not is_git(environ): return self.application(environ, start_response) if not self._check_ssl(environ): return HTTPNotAcceptable('SSL REQUIRED !')(environ, start_response) ip_addr = self._get_ip_addr(environ) username = None self._git_first_op = False # skip passing error to error controller environ['pylons.status_code_redirect'] = True #====================================================================== # EXTRACT REPOSITORY NAME FROM ENV #====================================================================== try: repo_name = self.__get_repository(environ) log.debug('Extracted repo name is %s' % repo_name) except Exception: return HTTPInternalServerError()(environ, start_response) # quick check if that dir exists... if not is_valid_repo(repo_name, self.basepath, 'git'): return HTTPNotFound()(environ, start_response) #====================================================================== # GET ACTION PULL or PUSH #====================================================================== action = self.__get_action(environ) #====================================================================== # CHECK ANONYMOUS PERMISSION #====================================================================== if action in ['pull', 'push']: anonymous_user = self.__get_user('default') username = anonymous_user.username if anonymous_user.active: # ONLY check permissions if the user is activated anonymous_perm = self._check_permission( action, anonymous_user, repo_name, ip_addr) else: anonymous_perm = False if not anonymous_user.active or not anonymous_perm: if not anonymous_user.active: log.debug('Anonymous access is disabled, running ' 'authentication') if not anonymous_perm: log.debug('Not enough credentials to access this ' 'repository as anonymous user') username = None #============================================================== # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS #============================================================== # try to auth based on environ, container auth methods log.debug( 'Running PRE-AUTH for container based authentication') pre_auth = auth_modules.authenticate('', '', environ) if pre_auth and pre_auth.get('username'): username = pre_auth['username'] log.debug('PRE-AUTH got %s as username' % username) # If not authenticated by the container, running basic auth if not username: self.authenticate.realm = \ safe_str(self.config['realm']) result = self.authenticate(environ) if isinstance(result, str): AUTH_TYPE.update(environ, 'basic') REMOTE_USER.update(environ, result) username = result else: return result.wsgi_application(environ, start_response) #============================================================== # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME #============================================================== try: user = self.__get_user(username) if user is None or not user.active: return HTTPForbidden()(environ, start_response) username = user.username except Exception: log.error(traceback.format_exc()) return HTTPInternalServerError()(environ, start_response) #check permissions for this repository perm = self._check_permission(action, user, repo_name, ip_addr) if not perm: return HTTPForbidden()(environ, start_response) # extras are injected into UI object and later available # in hooks executed by kallithea from kallithea import CONFIG server_url = get_server_url(environ) extras = { 'ip': ip_addr, 'username': username, 'action': action, 'repository': repo_name, 'scm': 'git', 'config': CONFIG['__file__'], 'server_url': server_url, 'make_lock': None, 'locked_by': [None, None] } #=================================================================== # GIT REQUEST HANDLING #=================================================================== str_repo_name = safe_str(repo_name) repo_path = os.path.join(safe_str(self.basepath), str_repo_name) log.debug('Repository path is %s' % repo_path) # CHECK LOCKING only if it's not ANONYMOUS USER if username != User.DEFAULT_USER: log.debug('Checking locking on repository') (make_lock, locked, locked_by) = self._check_locking_state(environ=environ, action=action, repo=repo_name, user_id=user.user_id) # store the make_lock for later evaluation in hooks extras.update({'make_lock': make_lock, 'locked_by': locked_by}) fix_PATH() log.debug('HOOKS extras is %s' % extras) baseui = make_ui('db') self.__inject_extras(repo_path, baseui, extras) try: self._handle_githooks(repo_name, action, baseui, environ) log.info('%s action on Git repo "%s" by "%s" from %s' % (action, str_repo_name, safe_str(username), ip_addr)) app = self.__make_app(repo_name, repo_path, extras) return app(environ, start_response) except HTTPLockedRC, e: _code = CONFIG.get('lock_ret_code') log.debug('Repository LOCKED ret code %s!' % (_code)) return e(environ, start_response)
def archivefile(self, repo_name, fname): fileformat = None revision = None ext = None subrepos = request.GET.get('subrepos') == 'true' for a_type, ext_data in settings.ARCHIVE_SPECS.items(): archive_spec = fname.split(ext_data[1]) if len(archive_spec) == 2 and archive_spec[1] == '': fileformat = a_type or ext_data[1] revision = archive_spec[0] ext = ext_data[1] try: dbrepo = RepoModel().get_by_repo_name(repo_name) if not dbrepo.enable_downloads: return _('Downloads disabled') # TODO: do something else? if c.db_repo_scm_instance.alias == 'hg': # patch and reset hooks section of UI config to not run any # hooks on fetching archives with subrepos for k, v in c.db_repo_scm_instance._repo.ui.configitems('hooks'): c.db_repo_scm_instance._repo.ui.setconfig('hooks', k, None) cs = c.db_repo_scm_instance.get_changeset(revision) content_type = settings.ARCHIVE_SPECS[fileformat][0] except ChangesetDoesNotExistError: return _('Unknown revision %s') % revision except EmptyRepositoryError: return _('Empty repository') except (ImproperArchiveTypeError, KeyError): return _('Unknown archive type') from kallithea import CONFIG rev_name = cs.raw_id[:12] archive_name = '%s-%s%s' % (safe_str(repo_name.replace('/', '_')), safe_str(rev_name), ext) archive_path = None cached_archive_path = None archive_cache_dir = CONFIG.get('archive_cache_dir') if archive_cache_dir and not subrepos: # TOOD: subrepo caching? if not os.path.isdir(archive_cache_dir): os.makedirs(archive_cache_dir) cached_archive_path = os.path.join(archive_cache_dir, archive_name) if os.path.isfile(cached_archive_path): log.debug('Found cached archive in %s', cached_archive_path) archive_path = cached_archive_path else: log.debug('Archive %s is not yet cached', archive_name) if archive_path is None: # generate new archive fd, archive_path = tempfile.mkstemp() log.debug('Creating new temp archive in %s', archive_path) with os.fdopen(fd, 'wb') as stream: cs.fill_archive(stream=stream, kind=fileformat, subrepos=subrepos) # stream (and thus fd) has been closed by cs.fill_archive if cached_archive_path is not None: # we generated the archive - move it to cache log.debug('Storing new archive in %s', cached_archive_path) shutil.move(archive_path, cached_archive_path) archive_path = cached_archive_path def get_chunked_archive(archive_path): stream = open(archive_path, 'rb') while True: data = stream.read(16 * 1024) if not data: break yield data stream.close() if archive_path != cached_archive_path: log.debug('Destroying temp archive %s', archive_path) os.remove(archive_path) action_logger(user=c.authuser, action='user_downloaded_archive:%s' % (archive_name), repo=repo_name, ipaddr=self.ip_addr, commit=True) response.content_disposition = str('attachment; filename=%s' % (archive_name)) response.content_type = str(content_type) return get_chunked_archive(archive_path)