def commit_change(self, repo, repo_name, cs, user, author, message, content, f_path): """ Commits changes :param repo: SCM instance """ if repo.alias == 'hg': from rhodecode.lib.vcs.backends.hg import \ MercurialInMemoryChangeset as IMC elif repo.alias == 'git': from rhodecode.lib.vcs.backends.git import \ GitInMemoryChangeset as IMC # decoding here will force that we have proper encoded values # in any other case this will throw exceptions and deny commit content = safe_str(content) path = safe_str(f_path) # message and author needs to be unicode # proper backend should then translate that into required type message = safe_unicode(message) author = safe_unicode(author) m = IMC(repo) m.change(FileNode(path, content)) tip = m.commit(message=message, author=author, parents=[cs], branch=cs.branch) action = 'push_local:%s' % tip.raw_id action_logger(user, action, repo_name) self.mark_for_invalidation(repo_name) return tip
def __init__(self, server, base_dn, port=389, bind_dn='', bind_pass='', tls_kind='PLAIN', tls_reqcert='DEMAND', ldap_version=3, ldap_filter='(&(objectClass=user)(!(objectClass=computer)))', search_scope='SUBTREE', attr_login='******'): self.ldap_version = ldap_version ldap_server_type = 'ldap' self.TLS_KIND = tls_kind if self.TLS_KIND == 'LDAPS': port = port or 689 ldap_server_type = ldap_server_type + 's' OPT_X_TLS_DEMAND = 2 self.TLS_REQCERT = getattr(ldap, 'OPT_X_TLS_%s' % tls_reqcert, OPT_X_TLS_DEMAND) self.LDAP_SERVER_ADDRESS = server self.LDAP_SERVER_PORT = port # USE FOR READ ONLY BIND TO LDAP SERVER self.LDAP_BIND_DN = safe_str(bind_dn) self.LDAP_BIND_PASS = safe_str(bind_pass) self.LDAP_SERVER = "%s://%s:%s" % (ldap_server_type, self.LDAP_SERVER_ADDRESS, self.LDAP_SERVER_PORT) self.BASE_DN = safe_str(base_dn) self.LDAP_FILTER = safe_str(ldap_filter) self.SEARCH_SCOPE = getattr(ldap, 'SCOPE_%s' % search_scope) self.attr_login = attr_login
def is_valid_repos_group(repos_group_name, base_path): """ Returns True if given path is a repos group False otherwise :param repo_name: :param base_path: """ full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name)) # check if it's not a repo if is_valid_repo(repos_group_name, base_path): return False try: # we need to check bare git repos at higher level # since we might match branches/hooks/info/objects or possible # other things inside bare git repo get_scm(os.path.dirname(full_path)) return False except VCSError: pass # check if it's a valid path if os.path.isdir(full_path): return True return False
def commit_change(self, repo, repo_name, cs, user, author, message, content, f_path): """ Commits changes :param repo: SCM instance """ user = self._get_user(user) IMC = self._get_IMC_module(repo.alias) # decoding here will force that we have proper encoded values # in any other case this will throw exceptions and deny commit content = safe_str(content) path = safe_str(f_path) # message and author needs to be unicode # proper backend should then translate that into required type message = safe_unicode(message) author = safe_unicode(author) imc = IMC(repo) imc.change(FileNode(path, content, mode=cs.get_file_mode(f_path))) tip = imc.commit(message=message, author=author, parents=[cs], branch=cs.branch) self.mark_for_invalidation(repo_name) self._handle_push(repo, username=user.username, action='push_local', repo_name=repo_name, revisions=[tip.raw_id]) return tip
def commit_change(self, repo, repo_name, cs, user, author, message, content, f_path): """ Commits changes :param repo: SCM instance """ user = self._get_user(user) IMC = self._get_IMC_module(repo.alias) # decoding here will force that we have proper encoded values # in any other case this will throw exceptions and deny commit content = safe_str(content) path = safe_str(f_path) # message and author needs to be unicode # proper backend should then translate that into required type message = safe_unicode(message) author = safe_unicode(author) imc = IMC(repo) imc.change(FileNode(path, content, mode=cs.get_file_mode(f_path))) try: tip = imc.commit(message=message, author=author, parents=[cs], branch=cs.branch) except Exception, e: log.error(traceback.format_exc()) raise IMCCommitError(str(e))
def get_file_history(self, path, limit=None): """ Returns history of file as reversed list of ``Changeset`` objects for which file at given ``path`` has been modified. TODO: This function now uses os underlying 'git' and 'grep' commands which is generally not good. Should be replaced with algorithm iterating commits. """ self._get_filectx(path) cs_id = safe_str(self.id) f_path = safe_str(path) if limit: cmd = 'log -n %s --pretty="format: %%H" -s -p %s -- "%s"' % ( safe_int(limit, 0), cs_id, f_path ) else: cmd = 'log --pretty="format: %%H" -s -p %s -- "%s"' % ( cs_id, f_path ) so, se = self.repository.run_git_command(cmd) ids = re.findall(r'[0-9a-fA-F]{40}', so) return [self.repository.get_changeset(id) for id in ids]
def differ(org_repo, org_ref, other_repo, other_ref, context=3, ignore_whitespace=False): """ General differ between branches, bookmarks, revisions of two remote or local but related repositories :param org_repo: :param org_ref: :param other_repo: :type other_repo: :type other_ref: """ org_repo_scm = org_repo.scm_instance other_repo_scm = other_repo.scm_instance org_repo = org_repo_scm._repo other_repo = other_repo_scm._repo org_ref = safe_str(org_ref[1]) other_ref = safe_str(other_ref[1]) if org_repo_scm == other_repo_scm: log.debug('running diff between %s@%s and %s@%s' % (org_repo.path, org_ref, other_repo.path, other_ref)) _diff = org_repo_scm.get_diff(rev1=org_ref, rev2=other_ref, ignore_whitespace=ignore_whitespace, context=context) return _diff return '' # FIXME: when is it ever relevant to return nothing?
def command(self): # get SqlAlchemy session self._init_session() repos_location = RhodeCodeUi.get_repos_location() to_remove = [] for dn, dirs, f in os.walk(safe_str(repos_location)): alldirs = list(dirs) del dirs[:] if ".hg" in alldirs or "objects" in alldirs and ("refs" in alldirs or "packed-refs" in f): continue for loc in alldirs: if REMOVED_REPO_PAT.match(loc): to_remove.append([os.path.join(dn, loc), self._extract_date(loc)]) else: dirs.append(loc) # filter older than (if present)! now = datetime.datetime.now() older_than = self.options.older_than if older_than: to_remove_filtered = [] older_than_date = self._parse_older_than(older_than) for name, date_ in to_remove: repo_age = now - date_ if repo_age > older_than_date: to_remove_filtered.append([name, date_]) to_remove = to_remove_filtered print >> sys.stdout, "removing %s deleted repos older than %s (%s)" % ( len(to_remove), older_than, older_than_date, ) else: print >> sys.stdout, "removing all [%s] deleted repos" % len(to_remove) if self.options.dont_ask or not to_remove: # don't ask just remove ! remove = True else: remove = ask_ok( "the following repositories will be deleted completely:\n%s\n" "are you sure you want to remove them [y/n]?" % ", \n".join(["%s removed on %s" % (safe_str(x[0]), safe_str(x[1])) for x in to_remove]) ) if remove: for path, date_ in to_remove: print >> sys.stdout, "removing repository %s" % path shutil.rmtree(path) else: print "nothing done exiting..." sys.exit(0)
def get_node(self, repo, path): """ gets a filenode based on given full path.It operates on string for hg git compatability. :param repo: scm repo instance :param path: full path including root location :return: FileNode """ root_path = safe_str(repo.path)+'/' parts = safe_str(path).partition(root_path) cs = self._get_index_changeset(repo) node = cs.get_node(parts[-1]) return node
def command(self): logging.config.fileConfig(self.path_to_ini_file) from pylons import config #get to remove repos !! add_cache(config) engine = engine_from_config(config, 'sqlalchemy.db1.') init_model(engine) repos_location = RhodeCodeUi.get_repos_location() to_remove = [] for dn, dirs, f in os.walk(safe_str(repos_location)): for loc in dirs: if REMOVED_REPO_PAT.match(loc): to_remove.append([loc, self._extract_date(loc)]) #filter older than (if present)! now = datetime.datetime.now() older_than = self.options.older_than if older_than: to_remove_filtered = [] older_than_date = self._parse_older_than(older_than) for name, date_ in to_remove: repo_age = now - date_ if repo_age > older_than_date: to_remove_filtered.append([name, date_]) to_remove = to_remove_filtered print >> sys.stdout, 'removing [%s] deleted repos older than %s[%s]' \ % (len(to_remove), older_than, older_than_date) else: print >> sys.stdout, 'removing all [%s] deleted repos' \ % len(to_remove) if self.options.dont_ask or not to_remove: # don't ask just remove ! remove = True else: remove = ask_ok('are you sure to remove listed repos \n%s [y/n]?' % ', \n'.join(['%s removed on %s' % (safe_str(x[0]), safe_str(x[1])) for x in to_remove])) if remove: for name, date_ in to_remove: print >> sys.stdout, 'removing repository %s' % name shutil.rmtree(os.path.join(repos_location, name)) else: print 'nothing done exiting...' sys.exit(0)
def is_valid_repo(repo_name, base_path): """ Returns True if given path is a valid repository False otherwise :param repo_name: :param base_path: :return True: if given path is a valid repository """ full_path = os.path.join(safe_str(base_path), safe_str(repo_name)) try: get_scm(full_path) return True except VCSError: return False
def _get_cache_parameters(query): """For a query with cache_region and cache_namespace configured, return the correspoinding Cache instance and cache key, based on this query's current criterion and parameter values. """ if not hasattr(query, '_cache_parameters'): raise ValueError("This Query does not have caching " "parameters configured.") region, namespace, cache_key = query._cache_parameters namespace = _namespace_from_query(namespace, query) if cache_key is None: # cache key - the value arguments from this query's parameters. args = [safe_str(x) for x in _params_from_query(query)] args.extend(filter(lambda k: k not in ['None', None, u'None'], [str(query._limit), str(query._offset)])) cache_key = " ".join(args) if cache_key is None: raise Exception('Cache key cannot be None') # get cache #cache = query.cache_manager.get_cache_region(namespace, region) cache = get_cache_region(namespace, region) # optional - hash the cache_key too for consistent length # import uuid # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key)) return cache, cache_key
def gravatar_url(email_address, size=30, ssl_enabled=True): from pylons import url # doh, we need to re-import url to mock it later from rhodecode import CONFIG _def = '*****@*****.**' # default gravatar use_gravatar = str2bool(CONFIG.get('use_gravatar')) alternative_gravatar_url = CONFIG.get('alternative_gravatar_url', '') email_address = email_address or _def if not use_gravatar or not email_address or email_address == _def: f = lambda a, l: min(l, key=lambda x: abs(x - a)) return url("/images/user%s.png" % f(size, [14, 16, 20, 24, 30])) if use_gravatar and alternative_gravatar_url: tmpl = alternative_gravatar_url parsed_url = urlparse.urlparse(url.current(qualified=True)) tmpl = tmpl.replace('{email}', email_address)\ .replace('{md5email}', hashlib.md5(email_address.lower()).hexdigest()) \ .replace('{netloc}', parsed_url.netloc)\ .replace('{scheme}', parsed_url.scheme)\ .replace('{size}', str(size)) return tmpl default = 'identicon' baseurl_nossl = "http://www.gravatar.com/avatar/" baseurl_ssl = "https://secure.gravatar.com/avatar/" baseurl = baseurl_ssl if ssl_enabled else baseurl_nossl if isinstance(email_address, unicode): #hashlib crashes on unicode items email_address = safe_str(email_address) # construct the url gravatar_url = baseurl + hashlib.md5(email_address.lower()).hexdigest() + "?" gravatar_url += urllib.urlencode({'d': default, 's': str(size)}) return gravatar_url
def get_paths(self, repo): """ recursive walk in root dir and return a set of all path in that dir based on repository walk function """ index_paths_ = set() try: cs = self._get_index_changeset(repo) for _topnode, _dirs, files in cs.walk('/'): for f in files: index_paths_.add(jn(safe_str(repo.path), safe_str(f.path))) except RepositoryError: log.debug(traceback.format_exc()) pass return index_paths_
def gravatar_url(email_address, size=30): from pylons import url ## doh, we need to re-import url to mock it later if(str2bool(config['app_conf'].get('use_gravatar')) and config['app_conf'].get('alternative_gravatar_url')): tmpl = config['app_conf'].get('alternative_gravatar_url', '') parsed_url = urlparse.urlparse(url.current(qualified=True)) tmpl = tmpl.replace('{email}', email_address)\ .replace('{md5email}', hashlib.md5(email_address.lower()).hexdigest()) \ .replace('{netloc}', parsed_url.netloc)\ .replace('{scheme}', parsed_url.scheme)\ .replace('{size}', str(size)) return tmpl if (not str2bool(config['app_conf'].get('use_gravatar')) or not email_address or email_address == '*****@*****.**'): f = lambda a, l: min(l, key=lambda x: abs(x - a)) return url("/images/user%s.png" % f(size, [14, 16, 20, 24, 30])) ssl_enabled = 'https' == request.environ.get('wsgi.url_scheme') default = 'identicon' baseurl_nossl = "http://www.gravatar.com/avatar/" baseurl_ssl = "https://secure.gravatar.com/avatar/" baseurl = baseurl_ssl if ssl_enabled else baseurl_nossl if isinstance(email_address, unicode): #hashlib crashes on unicode items email_address = safe_str(email_address) # construct the url gravatar_url = baseurl + hashlib.md5(email_address.lower()).hexdigest() + "?" gravatar_url += urllib.urlencode({'d': default, 's': str(size)}) return gravatar_url
def _index(self, revision, method): c.anchor_url = anchor_url c.ignorews_url = _ignorews_url c.context_url = _context_url c.fulldiff = fulldiff = request.GET.get('fulldiff') #get ranges of revisions if preset rev_range = revision.split('...')[:2] enable_comments = True try: if len(rev_range) == 2: enable_comments = False rev_start = rev_range[0] rev_end = rev_range[1] rev_ranges = c.rhodecode_repo.get_changesets(start=rev_start, end=rev_end) else: rev_ranges = [c.rhodecode_repo.get_changeset(revision)] c.cs_ranges = list(rev_ranges) if not c.cs_ranges: raise RepositoryError('Changeset range returned empty result') except (RepositoryError, ChangesetDoesNotExistError, Exception), e: log.error(traceback.format_exc()) h.flash(safe_str(e), category='error') raise HTTPNotFound()
def _report_keys(self): elems_cnt = '%s/%s' % (len(self.keys()), self.size) # trick for pformat print it more nicely fmt = '\n' for cnt, elem in enumerate(self.keys()): fmt += '%s - %s\n' % (cnt + 1, safe_str(elem)) log.debug('current LRU keys (%s):%s' % (elems_cnt, fmt))
def _get_scm_size(alias, root_path): if not alias.startswith('.'): alias += '.' size_scm, size_root = 0, 0 for path, dirs, files in os.walk(safe_str(root_path)): if path.find(alias) != -1: for f in files: try: size_scm += os.path.getsize(os.path.join(path, f)) except OSError: pass else: for f in files: try: size_root += os.path.getsize(os.path.join(path, f)) except OSError: pass size_scm_f = h.format_byte_size(size_scm) size_root_f = h.format_byte_size(size_root) size_total_f = h.format_byte_size(size_root + size_scm) return size_scm_f, size_root_f, size_total_f
def __create_repo(self, repo_name, alias, new_parent_id, clone_uri=False): """ makes repository on filesystem. It's group aware means it'll create a repository within a group, and alter the paths accordingly of group location :param repo_name: :param alias: :param parent_id: :param clone_uri: """ from rhodecode.lib.utils import is_valid_repo, is_valid_repos_group if new_parent_id: paths = RepoGroup.get(new_parent_id).full_path.split(RepoGroup.url_sep()) new_parent_path = os.sep.join(paths) else: new_parent_path = "" # we need to make it str for mercurial repo_path = os.path.join(*map(lambda x: safe_str(x), [self.repos_path, new_parent_path, repo_name])) # check if this path is not a repository if is_valid_repo(repo_path, self.repos_path): raise Exception("This path %s is a valid repository" % repo_path) # check if this path is a group if is_valid_repos_group(repo_path, self.repos_path): raise Exception("This path %s is a valid group" % repo_path) log.info("creating repo %s in %s @ %s" % (repo_name, safe_unicode(repo_path), clone_uri)) backend = get_backend(alias) backend(repo_path, create=True, src_url=clone_uri)
def gravatar_url(email_address, size=30): from pylons import url # doh, we need to re-import url to mock it later _def = '*****@*****.**' use_gravatar = str2bool(config['app_conf'].get('use_gravatar')) email_address = email_address or _def if (not use_gravatar or not email_address or email_address == _def): f = lambda a, l: min(l, key=lambda x: abs(x - a)) return url("/images/user%s.png" % f(size, [14, 16, 20, 24, 30])) if use_gravatar and config['app_conf'].get('alternative_gravatar_url'): tmpl = config['app_conf'].get('alternative_gravatar_url', '') parsed_url = urlparse.urlparse(url.current(qualified=True)) tmpl = tmpl.replace('{email}', email_address)\ .replace('{md5email}', hashlib.md5(email_address.lower()).hexdigest()) \ .replace('{netloc}', parsed_url.netloc)\ .replace('{scheme}', parsed_url.scheme)\ .replace('{size}', str(size)) return tmpl ssl_enabled = 'https' == request.environ.get('wsgi.url_scheme') default = 'identicon' baseurl_nossl = "http://www.gravatar.com/avatar/" baseurl_ssl = "https://secure.gravatar.com/avatar/" baseurl = baseurl_ssl if ssl_enabled else baseurl_nossl if isinstance(email_address, unicode): #hashlib crashes on unicode items email_address = safe_str(email_address) # construct the url gravatar_url = baseurl + hashlib.md5( email_address.lower()).hexdigest() + "?" gravatar_url += urllib.urlencode({'d': default, 's': str(size)}) return gravatar_url
def safe_str(unicode_, to_encoding=None): """ safe str function. Does few trick to turn unicode_ into string In case of UnicodeEncodeError we try to return it with encoding detected by chardet library if it fails fallback to string with errors replaced :param unicode_: unicode to encode :rtype: str :returns: str object """ from rhodecode.lib.utils2 import safe_str return safe_str(unicode_, to_encoding) if isinstance(unicode_, str): return unicode_ try: return unicode_.encode(to_encoding) except UnicodeEncodeError: pass try: import chardet encoding = chardet.detect(unicode_)['encoding'] if encoding is None: raise UnicodeEncodeError() return unicode_.encode(encoding) except (ImportError, UnicodeEncodeError): return unicode_.encode(to_encoding, 'replace') return safe_str
def __get_instance(self): repo_full_path = self.repo_full_path try: alias = get_scm(repo_full_path)[0] log.debug("Creating instance of %s repository" % alias) backend = get_backend(alias) except VCSError: log.error(traceback.format_exc()) log.error( "Perhaps this repository is in db and not in " "filesystem run rescan repositories with " '"destroy old data " option from admin panel' ) return if alias == "hg": repo = backend(safe_str(repo_full_path), create=False, baseui=self._ui) # skip hidden web repository if repo._get_hidden(): return else: repo = backend(repo_full_path, create=False) return repo
def index(self, repo_name, revision, f_path, annotate=False): # redirect to given revision from form if given post_revision = request.POST.get('at_rev', None) if post_revision: cs = self.__get_cs_or_redirect(post_revision, repo_name) c.changeset = self.__get_cs_or_redirect(revision, repo_name) c.branch = request.GET.get('branch', None) c.f_path = f_path c.annotate = annotate c.changeset = self.__get_cs_or_redirect(revision, repo_name) cur_rev = c.changeset.revision # prev link try: prev_rev = c.rhodecode_repo.get_changeset(cur_rev).prev(c.branch) c.url_prev = url('files_home', repo_name=c.repo_name, revision=prev_rev.raw_id, f_path=f_path) if c.branch: c.url_prev += '?branch=%s' % c.branch except (ChangesetDoesNotExistError, VCSError): c.url_prev = '#' # next link try: next_rev = c.rhodecode_repo.get_changeset(cur_rev).next(c.branch) c.url_next = url('files_home', repo_name=c.repo_name, revision=next_rev.raw_id, f_path=f_path) if c.branch: c.url_next += '?branch=%s' % c.branch except (ChangesetDoesNotExistError, VCSError): c.url_next = '#' # files or dirs try: c.file = c.changeset.get_node(f_path) if c.file.is_file(): c.load_full_history = False file_last_cs = c.file.last_changeset c.file_changeset = (c.changeset if c.changeset.revision < file_last_cs.revision else file_last_cs) #determine if we're on branch head _branches = c.rhodecode_repo.branches c.on_branch_head = revision in _branches.keys() + _branches.values() _hist = [] c.file_history = [] if c.load_full_history: c.file_history, _hist = self._get_node_history(c.changeset, f_path) c.authors = [] for a in set([x.author for x in _hist]): c.authors.append((h.email(a), h.person(a))) else: c.authors = c.file_history = [] except RepositoryError, e: h.flash(safe_str(e), category='error') raise HTTPNotFound()
def rawfile(self, repo_name, revision, f_path): cs = self.__get_cs_or_redirect(revision, repo_name) file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path) response.content_disposition = "attachment; filename=%s" % safe_str(f_path.split(Repository.url_sep())[-1]) response.content_type = file_node.mimetype return file_node.content
def checkSessionFlash(self, response, msg): self.assertTrue('flash' in response.session, msg='Response session:%r have no flash' % response.session) if not msg in response.session['flash'][0][1]: msg = u'msg `%s` not found in session flash: got `%s` instead' % ( msg, response.session['flash'][0][1]) self.fail(safe_str(msg))
def create_or_update_repo_hg_settings(self, data): largefiles, phases = self.HG_SETTINGS largefiles_key, phases_key = self._get_hg_settings( self.HG_SETTINGS, data) self._create_or_update_ui( self.repo_settings, *largefiles, value='', active=data[largefiles_key]) self._create_or_update_ui( self.repo_settings, *phases, value=safe_str(data[phases_key]))
def assert_not_in_session_flash(response, msg, category=None): assert 'flash' in response.session, 'Response session has no flash key' message_category, message_text = response.session['flash'][0] if msg in message_text: msg = u'msg `%s` found in session flash: got `%s` instead' % ( msg, message_text) pytest.fail(safe_str(msg)) if category: assert category == message_category
def is_valid_repos_group(repos_group_name, base_path): """ Returns True if given path is a repos group False otherwise :param repo_name: :param base_path: """ full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name)) # check if it's not a repo if is_valid_repo(repos_group_name, base_path): return False # check if it's a valid path if os.path.isdir(full_path): return True return False
def _generate_repo_name_hashes(models, op, session): repositories = models.Repository.get_all() for repository in repositories: hash_ = hashlib.sha1(safe_str(repository.repo_name)).hexdigest() params = {'hash': hash_, 'id': repository.repo_id} query = text('UPDATE repositories SET repo_name_hash = :hash' ' WHERE repo_id = :id').bindparams(**params) op.execute(query) session().commit()
def get_ui(self, section=None, key=None): q = self.UiDbModel.query() q = self._filter_by_repo(RepoRhodeCodeUi, q) if section: q = q.filter(self.UiDbModel.ui_section == section) if key: q = q.filter(self.UiDbModel.ui_key == key) # TODO: mikhail: add caching result = [ UiSetting( section=safe_str(r.ui_section), key=safe_str(r.ui_key), value=safe_str(r.ui_value), active=r.ui_active ) for r in q.all() ] return result
def rawfile(self, repo_name, revision, f_path): cs = self.__get_cs_or_redirect(revision, repo_name) file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path) response.content_disposition = 'attachment; filename=%s' % \ safe_str(f_path.split(Repository.url_sep())[-1]) response.content_type = file_node.mimetype return file_node.content
def repo_size(extras): """Present size of repository after push.""" repo = Repository.get_by_repo_name(extras.repository) vcs_part = safe_str(u'.%s' % repo.repo_type) size_vcs, size_root, size_total = _get_scm_size(vcs_part, repo.repo_full_path) msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n' % (repo.repo_name, vcs_part, size_vcs, size_root, size_total)) return HookResponse(0, msg)
def FID(raw_id, path): """ Creates a uniqe ID for filenode based on it's hash of path and revision it's safe to use in urls :param raw_id: :param path: """ return 'C-%s-%s' % (short_id(raw_id), md5(safe_str(path)).hexdigest()[:12])
def _migrate_hash_to_bcrypt(self, username, password, new_hash): new_hash_cypher = _RhodeCodeCryptoBCrypt() # extra checks, so make sure new hash is correct. password_encoded = safe_str(password) if new_hash and new_hash_cypher.hash_check(password_encoded, new_hash): cur_user = User.get_by_username(username) cur_user.password = new_hash Session().add(cur_user) Session().flush() log.info('Migrated user %s hash to bcrypt', cur_user)
def __init__(self, key, hmac=False, strict_verification=True): if not key: raise ValueError('passed key variable is empty') self.strict_verification = strict_verification self.block_size = 32 self.hmac_size = 32 self.hmac = hmac self.key = SHA256.new(safe_str(key)).digest() self.hmac_key = SHA256.new(self.key).digest()
def __create_repo(self, repo_name, alias, parent, clone_uri=False, repo_store_location=None): """ makes repository on filesystem. It's group aware means it'll create a repository within a group, and alter the paths accordingly of group location :param repo_name: :param alias: :param parent_id: :param clone_uri: :param repo_path: """ from rhodecode.lib.utils import is_valid_repo, is_valid_repos_group from rhodecode.model.scm import ScmModel if parent: new_parent_path = os.sep.join(parent.full_path_splitted) else: new_parent_path = '' if repo_store_location: _paths = [repo_store_location] else: _paths = [self.repos_path, new_parent_path, repo_name] # we need to make it str for mercurial repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) # check if this path is not a repository if is_valid_repo(repo_path, self.repos_path): raise Exception('This path %s is a valid repository' % repo_path) # check if this path is a group if is_valid_repos_group(repo_path, self.repos_path): raise Exception('This path %s is a valid group' % repo_path) log.info( 'creating repo %s in %s @ %s' % (repo_name, safe_unicode(repo_path), obfuscate_url_pw(clone_uri))) backend = get_backend(alias) if alias == 'hg': repo = backend(repo_path, create=True, src_url=clone_uri) elif alias == 'git': repo = backend(repo_path, create=True, src_url=clone_uri, bare=True) # add rhodecode hook into this repo ScmModel().install_git_hook(repo=repo) else: raise Exception('Undefined alias %s' % alias) return repo
def auth(self, userobj, username, password, settings, **kwargs): if not userobj: log.debug('userobj was:%s skipping' % (userobj, )) return None if userobj.extern_type != self.name: log.warning( "userobj:%s extern_type mismatch got:`%s` expected:`%s`" % (userobj, userobj.extern_type, self.name)) return None user_attrs = { "username": userobj.username, "firstname": userobj.firstname, "lastname": userobj.lastname, "groups": [], "email": userobj.email, "admin": userobj.admin, "active": userobj.active, "active_from_extern": userobj.active, "extern_name": userobj.user_id, "extern_type": userobj.extern_type, } log.debug("User attributes:%s" % (user_attrs, )) if userobj.active: from rhodecode.lib import auth crypto_backend = auth.crypto_backend() password_encoded = safe_str(password) password_match, new_hash = crypto_backend.hash_check_with_upgrade( password_encoded, userobj.password) if password_match and new_hash: log.debug( 'user %s properly authenticated, but ' 'requires hash change to bcrypt', userobj) # if password match, and we use OLD deprecated hash, # we should migrate this user hash password to the new hash # we store the new returned by hash_check_with_upgrade function user_attrs['_hash_migrate'] = new_hash if userobj.username == User.DEFAULT_USER and userobj.active: log.info('user %s authenticated correctly as anonymous user', userobj) return user_attrs elif userobj.username == username and password_match: log.info('user %s authenticated correctly', userobj) return user_attrs log.info( "user %s had a bad password when " "authenticating on this plugin", userobj) return None else: log.warning('user %s tried auth but is disabled', userobj) return None
def create_node(self, repo, repo_name, cs, user, author, message, content, f_path): if repo.alias == 'hg': from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC elif repo.alias == 'git': from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC # decoding here will force that we have proper encoded values # in any other case this will throw exceptions and deny commit if isinstance(content, (basestring, )): content = safe_str(content) elif isinstance(content, ( file, cStringIO.OutputType, )): content = content.read() else: raise Exception('Content is of unrecognized type %s' % (type(content))) message = safe_unicode(message) author = safe_unicode(author) path = safe_str(f_path) m = IMC(repo) if isinstance(cs, EmptyChangeset): # EmptyChangeset means we we're editing empty repository parents = None else: parents = [cs] m.add(FileNode(path, content=content)) tip = m.commit(message=message, author=author, parents=parents, branch=cs.branch) new_cs = tip.short_id action = 'push_local:%s' % new_cs action_logger(user, action, repo_name) self.mark_for_invalidation(repo_name)
def create_node(self, repo, repo_name, cs, user, author, message, content, f_path): user = self._get_user(user) IMC = self._get_IMC_module(repo.alias) # decoding here will force that we have proper encoded values # in any other case this will throw exceptions and deny commit if isinstance(content, (basestring, )): content = safe_str(content) elif isinstance(content, ( file, cStringIO.OutputType, )): content = content.read() else: raise Exception('Content is of unrecognized type %s' % (type(content))) message = safe_unicode(message) author = safe_unicode(author) path = safe_str(f_path) m = IMC(repo) if isinstance(cs, EmptyChangeset): # EmptyChangeset means we we're editing empty repository parents = None else: parents = [cs] m.add(FileNode(path, content=content)) tip = m.commit(message=message, author=author, parents=parents, branch=cs.branch) self.mark_for_invalidation(repo_name) self._handle_push(repo, username=user.username, action='push_local', repo_name=repo_name, revisions=[tip.raw_id]) return tip
def __get_repo(self, instance): cls = Repository if isinstance(instance, cls): return instance elif isinstance(instance, int) or safe_str(instance).isdigit(): return cls.get(instance) elif isinstance(instance, basestring): return cls.get_by_repo_name(instance) elif instance: raise Exception('given object must be int, basestr or Instance' ' of %s got %s' % (type(cls), type(instance)))
def create_or_update_global_hg_settings(self, data): largefiles, phases, subversion = self.GLOBAL_HG_SETTINGS largefiles_key, phases_key, subversion_key = self._get_hg_settings( self.GLOBAL_HG_SETTINGS, data) self._create_or_update_ui( self.global_settings, *largefiles, value='', active=data[largefiles_key]) self._create_or_update_ui( self.global_settings, *phases, value=safe_str(data[phases_key])) self._create_or_update_ui( self.global_settings, *subversion, active=data[subversion_key])
def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True): """ A function that will read python rc files or database and make an mercurial ui object from read options :param path: path to mercurial config file :param checkpaths: check the path :param read_from: read from 'file' or 'db' """ baseui = ui.ui() # clean the baseui object baseui._ocfg = config.config() baseui._ucfg = config.config() baseui._tcfg = config.config() if read_from == 'file': if not os.path.isfile(path): log.debug('hgrc file is not present at %s, skipping...' % path) return False log.debug('reading hgrc from %s' % path) cfg = config.config() cfg.read(path) for section in ui_sections: for k, v in cfg.items(section): log.debug('settings ui from file: [%s] %s=%s' % (section, k, v)) baseui.setconfig(safe_str(section), safe_str(k), safe_str(v)) elif read_from == 'db': sa = meta.Session() ret = sa.query(RhodeCodeUi)\ .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\ .all() hg_ui = ret for ui_ in hg_ui: if ui_.ui_active: log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section, ui_.ui_key, ui_.ui_value) baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key), safe_str(ui_.ui_value)) if ui_.ui_key == 'push_ssl': # force set push_ssl requirement to False, rhodecode # handles that baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key), False) if clear_session: meta.Session.remove() return baseui
def is_valid_repo(repo_name, base_path, scm=None): """ Returns True if given path is a valid repository False otherwise. If scm param is given also compare if given scm is the same as expected from scm parameter :param repo_name: :param base_path: :param scm: :return True: if given path is a valid repository """ full_path = os.path.join(safe_str(base_path), safe_str(repo_name)) try: scm_ = get_scm(full_path) if scm: return scm_[0] == scm return True except VCSError: return False
def test_setting_is_returned_when_name_is_found(self, settings_util, name, value, type_, expected_value): settings_util.create_rhodecode_setting(name, value, type_) model = SettingsModel() setting = model.get_setting_by_name(name) assert setting.app_settings_type == type_ actual_value = setting.app_settings_value if type_ == 'unicode': actual_value = safe_str(actual_value) assert actual_value == expected_value
def __call__(self, request): start = time.time() try: response = self.handler(request) finally: end = time.time() log.info('IP: %s Request to %s time: %.3fs' % (get_ip_addr(request.environ), safe_str(get_access_path(request.environ)), end - start)) return response
def _validate_archive_prefix(self, prefix): if prefix is None: prefix = self._ARCHIVE_PREFIX_TEMPLATE.format( repo_name=safe_str(self.repository.name), short_id=self.short_id) elif not isinstance(prefix, str): raise ValueError("prefix not a bytes object: %s" % repr(prefix)) elif prefix.startswith('/'): raise VCSError("Prefix cannot start with leading slash") elif prefix.strip() == '': raise VCSError("Prefix cannot be empty") return prefix
def config_data_from_db(clear_session=True, repo=None): """ Read the configuration data from the database and return configuration tuples. """ from rhodecode.model.settings import VcsSettingsModel config = [] sa = meta.Session() settings_model = VcsSettingsModel(repo=repo, sa=sa) ui_settings = settings_model.get_ui_settings() for setting in ui_settings: if setting.active: log.debug('settings ui from db: [%s] %s=%s', setting.section, setting.key, setting.value) config.append((safe_str(setting.section), safe_str(setting.key), safe_str(setting.value))) if setting.key == 'push_ssl': # force set push_ssl requirement to False, rhodecode # handles that config.append( (safe_str(setting.section), safe_str(setting.key), False)) if clear_session: meta.Session.remove() # TODO: mikhail: probably it makes no sense to re-read hooks information. # It's already there and activated/deactivated skip_entries = [] enabled_hook_classes = get_enabled_hook_classes(ui_settings) if 'pull' not in enabled_hook_classes: skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL)) if 'push' not in enabled_hook_classes: skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH)) config = [entry for entry in config if entry[:2] not in skip_entries] return config
def __init__(self, server, base_dn, port=389, bind_dn='', bind_pass='', tls_kind='PLAIN', tls_reqcert='DEMAND', ldap_version=3, search_scope='SUBTREE', attr_login='******', ldap_filter='(&(objectClass=user)(!(objectClass=computer)))'): if ldap == Missing: raise LdapImportError("Missing or incompatible ldap library") self.ldap_version = ldap_version self.ldap_server_type = 'ldap' self.TLS_KIND = tls_kind if self.TLS_KIND == 'LDAPS': port = port or 689 self.ldap_server_type += 's' OPT_X_TLS_DEMAND = 2 self.TLS_REQCERT = getattr(ldap, 'OPT_X_TLS_%s' % tls_reqcert, OPT_X_TLS_DEMAND) # split server into list self.SERVER_ADDRESSES = server.split(',') self.LDAP_SERVER_PORT = port # USE FOR READ ONLY BIND TO LDAP SERVER self.attr_login = attr_login self.LDAP_BIND_DN = safe_str(bind_dn) self.LDAP_BIND_PASS = safe_str(bind_pass) self.LDAP_SERVER = self._build_servers() self.SEARCH_SCOPE = getattr(ldap, 'SCOPE_%s' % search_scope) self.BASE_DN = safe_str(base_dn) self.LDAP_FILTER = safe_str(ldap_filter)
def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True): """ A function that will read python rc files or database and make an mercurial ui object from read options :param path: path to mercurial config file :param checkpaths: check the path :param read_from: read from 'file' or 'db' """ baseui = ui.ui() # clean the baseui object baseui._ocfg = config.config() baseui._ucfg = config.config() baseui._tcfg = config.config() if read_from == 'file': if not os.path.isfile(path): log.debug('hgrc file is not present at %s, skipping...' % path) return False log.debug('reading hgrc from %s' % path) cfg = config.config() cfg.read(path) for section in ui_sections: for k, v in cfg.items(section): log.debug('settings ui from file[%s]%s:%s' % (section, k, v)) baseui.setconfig(safe_str(section), safe_str(k), safe_str(v)) elif read_from == 'db': sa = meta.Session() ret = sa.query(RhodeCodeUi)\ .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\ .all() hg_ui = ret for ui_ in hg_ui: if ui_.ui_active: log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, ui_.ui_key, ui_.ui_value) baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key), safe_str(ui_.ui_value)) if ui_.ui_key == 'push_ssl': # force set push_ssl requirement to False, rhodecode # handles that baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key), False) if clear_session: meta.Session.remove() return baseui
def commit_change(self, repo, repo_name, commit, user, author, message, content, f_path): """ Commits changes :param repo: SCM instance """ user = self._get_user(user) # decoding here will force that we have proper encoded values # in any other case this will throw exceptions and deny commit content = safe_str(content) path = safe_str(f_path) # message and author needs to be unicode # proper backend should then translate that into required type message = safe_unicode(message) author = safe_unicode(author) imc = repo.in_memory_commit imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path))) try: # TODO: handle pre-push action ! tip = imc.commit(message=message, author=author, parents=[commit], branch=commit.branch) except Exception as e: log.error(traceback.format_exc()) raise IMCCommitError(str(e)) finally: # always clear caches, if commit fails we want fresh object also self.mark_for_invalidation(repo_name) # We trigger the post-push action hooks_utils.trigger_post_push_hook(username=user.username, action='push_local', repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id]) return tip
def raw(self, repo_name, revision, f_path): """ Action for show as raw, some mimetypes are "rendered", those include images, icons. """ commit = self.__get_commit_or_redirect(revision, repo_name) file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path) raw_mimetype_mapping = { # map original mimetype to a mimetype used for "show as raw" # you can also provide a content-disposition to override the # default "attachment" disposition. # orig_type: (new_type, new_dispo) # show images inline: # Do not re-add SVG: it is unsafe and permits XSS attacks. One can # for example render an SVG with javascript inside or even render # HTML. 'image/x-icon': ('image/x-icon', 'inline'), 'image/png': ('image/png', 'inline'), 'image/gif': ('image/gif', 'inline'), 'image/jpeg': ('image/jpeg', 'inline'), } mimetype = file_node.mimetype try: mimetype, dispo = raw_mimetype_mapping[mimetype] except KeyError: # we don't know anything special about this, handle it safely if file_node.is_binary: # do same as download raw for binary files mimetype, dispo = 'application/octet-stream', 'attachment' else: # do not just use the original mimetype, but force text/plain, # otherwise it would serve text/html and that might be unsafe. # Note: underlying vcs library fakes text/plain mimetype if the # mimetype can not be determined and it thinks it is not # binary.This might lead to erroneous text display in some # cases, but helps in other cases, like with text files # without extension. mimetype, dispo = 'text/plain', 'inline' if dispo == 'attachment': dispo = 'attachment; filename=%s' % safe_str( f_path.split(os.sep)[-1]) response.content_disposition = dispo response.content_type = mimetype charset = self._get_default_encoding() if charset: response.charset = charset return file_node.content
def create_repo_fork(form_data, cur_user): """ Creates a fork of repository using interval VCS methods :param form_data: :param cur_user: """ from rhodecode.model.repo import RepoModel log = get_logger(create_repo_fork) DBS = get_session() base_path = Repository.base_path() fork_repo = RepoModel(DBS).create(form_data, cur_user, just_db=True, fork=True) alias = form_data['repo_type'] org_repo_name = form_data['org_path'] fork_name = form_data['repo_name_full'] update_after_clone = form_data['update_after_clone'] source_repo_path = os.path.join(base_path, org_repo_name) destination_fork_path = os.path.join(base_path, fork_name) log.info('creating fork of %s as %s', source_repo_path, destination_fork_path) backend = get_backend(alias) backend(safe_str(destination_fork_path), create=True, src_url=safe_str(source_repo_path), update_after_clone=update_after_clone) log_create_repository(fork_repo.get_dict(), created_by=cur_user.username) action_logger(cur_user, 'user_forked_repo:%s' % fork_name, org_repo_name, '', DBS) action_logger(cur_user, 'user_created_fork:%s' % fork_name, fork_name, '', DBS) # finally commit at latest possible stage DBS.commit()
def __make_app(self, repo_name, repo_path, extras): """ Make an wsgi application using dulserver :param repo_name: name of the repository :param repo_path: full path to the repository """ from rhodecode.lib.middleware.pygrack import make_wsgi_app app = make_wsgi_app(repo_root=safe_str(self.basepath), repo_name=repo_name, extras=extras) app = GunzipFilter(LimitedInputFilter(app)) return app