Beispiel #1
0
    def repo_scan(self, repos_path=None):
        """
        Listing of repositories in given path. This path should not be a
        repository itself. Return a dictionary of repository objects

        :param repos_path: path to directory containing repositories
        """

        if repos_path is None:
            repos_path = self.repos_path

        log.info('scanning for repositories in %s', repos_path)

        config = make_db_config()
        config.set('extensions', 'largefiles', '')
        repos = {}

        for name, path in get_filesystem_repos(repos_path, recursive=True):
            # name need to be decomposed and put back together using the /
            # since this is internal storage separator for rhodecode
            name = Repository.normalize_repo_name(name)

            try:
                if name in repos:
                    raise RepositoryError('Duplicate repository name %s '
                                          'found in %s' % (name, path))
                elif path[0] in rhodecode.BACKENDS:
                    klass = get_backend(path[0])
                    repos[name] = klass(path[1], config=config)
            except OSError:
                continue
        log.debug('found %s paths with repositories', len(repos))
        return repos
def test_make_db_config_hg_hooks(pylonsapp, request, disabled_hooks,
                                 expected_hooks):
    disable_hooks(request, disabled_hooks)

    config = utils.make_db_config()
    hooks = extract_hooks(config)

    assert set(hooks.iterkeys()).intersection(HG_HOOKS) == set(expected_hooks)
 def test_data_from_config_data_from_db_returned(self):
     test_data = [
         ('section1', 'option1', 'value1'),
         ('section2', 'option2', 'value2'),
         ('section3', 'option3', 'value3'),
     ]
     with mock.patch.object(utils, 'config_data_from_db') as config_mock:
         config_mock.return_value = test_data
         kwargs = {'clear_session': False, 'repo': 'test_repo'}
         result = utils.make_db_config(**kwargs)
     config_mock.assert_called_once_with(**kwargs)
     for section, option, expected_value in test_data:
         value = result.get(section, option)
         assert value == expected_value
 def test_per_repo_config_is_generated_during_filesystem_repo_creation(
         self, tmpdir, backend, use_global_config, repo_name_passed):
     repo_name = 'test-{}-repo-{}'.format(backend.alias, use_global_config)
     config = make_db_config()
     model = RepoModel()
     with mock.patch('rhodecode.model.repo.make_db_config') as config_mock:
         config_mock.return_value = config
         model._create_filesystem_repo(repo_name,
                                       backend.alias,
                                       repo_group='',
                                       clone_uri=None,
                                       use_global_config=use_global_config)
     expected_repo_name = repo_name if repo_name_passed else None
     expected_call = mock.call(clear_session=False, repo=expected_repo_name)
     assert expected_call in config_mock.call_args_list
    def _delete_filesystem_repo(self, repo):
        """
        removes repo from filesystem, the removal is acctually made by
        added rm__ prefix into dir, and rename internat .hg/.git dirs so this
        repository is no longer valid for rhodecode, can be undeleted later on
        by reverting the renames on this repository

        :param repo: repo object
        """
        rm_path = os.path.join(self.repos_path, repo.repo_name)
        repo_group = repo.group
        log.info("Removing repository %s", rm_path)
        # disable hg/git internal that it doesn't get detected as repo
        alias = repo.repo_type

        config = make_db_config(clear_session=False)
        config.set('extensions', 'largefiles', '')
        bare = getattr(repo.scm_instance(config=config), 'bare', False)

        # skip this for bare git repos
        if not bare:
            # disable VCS repo
            vcs_path = os.path.join(rm_path, '.%s' % alias)
            if os.path.exists(vcs_path):
                shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))

        _now = datetime.now()
        _ms = str(_now.microsecond).rjust(6, '0')
        _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
                             repo.just_name)
        if repo_group:
            # if repository is in group, prefix the removal path with the group
            args = repo_group.full_path_splitted + [_d]
            _d = os.path.join(*args)

        if os.path.isdir(rm_path):
            shutil.move(rm_path, os.path.join(self.repos_path, _d))
    def url_handler(repo_type, url):
        config = make_db_config(clear_session=False)
        if repo_type == 'hg':
            allowed_prefixes = ('http', 'svn+http', 'git+http')

            if 'http' in url[:4]:
                # initially check if it's at least the proper URL
                # or does it pass basic auth
                MercurialRepository.check_url(url, config)
            elif 'svn+http' in url[:8]:  # svn->hg import
                SubversionRepository.check_url(url, config)
            elif 'git+http' in url[:8]:  # git->hg import
                raise NotImplementedError()
            else:
                exc = InvalidCloneUrl('Clone from URI %s not allowed. '
                                      'Allowed url must start with one of %s'
                                      % (url, ','.join(allowed_prefixes)))
                exc.allowed_prefixes = allowed_prefixes
                raise exc

        elif repo_type == 'git':
            allowed_prefixes = ('http', 'svn+http', 'hg+http')
            if 'http' in url[:4]:
                # initially check if it's at least the proper URL
                # or does it pass basic auth
                GitRepository.check_url(url, config)
            elif 'svn+http' in url[:8]:  # svn->git import
                raise NotImplementedError()
            elif 'hg+http' in url[:8]:  # hg->git import
                raise NotImplementedError()
            else:
                exc = InvalidCloneUrl('Clone from URI %s not allowed. '
                                      'Allowed url must start with one of %s'
                                      % (url, ','.join(allowed_prefixes)))
                exc.allowed_prefixes = allowed_prefixes
                raise exc
    def _create_filesystem_repo(self,
                                repo_name,
                                repo_type,
                                repo_group,
                                clone_uri=None,
                                repo_store_location=None,
                                use_global_config=False):
        """
        makes repository on filesystem. It's group aware means it'll create
        a repository within a group, and alter the paths accordingly of
        group location

        :param repo_name:
        :param alias:
        :param parent:
        :param clone_uri:
        :param repo_store_location:
        """
        from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
        from rhodecode.model.scm import ScmModel

        if Repository.NAME_SEP in repo_name:
            raise ValueError('repo_name must not contain groups got `%s`' %
                             repo_name)

        if isinstance(repo_group, RepoGroup):
            new_parent_path = os.sep.join(repo_group.full_path_splitted)
        else:
            new_parent_path = repo_group or ''

        if repo_store_location:
            _paths = [repo_store_location]
        else:
            _paths = [self.repos_path, new_parent_path, repo_name]
            # we need to make it str for mercurial
        repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))

        # check if this path is not a repository
        if is_valid_repo(repo_path, self.repos_path):
            raise Exception('This path %s is a valid repository' % repo_path)

        # check if this path is a group
        if is_valid_repo_group(repo_path, self.repos_path):
            raise Exception('This path %s is a valid group' % repo_path)

        log.info('creating repo %s in %s from url: `%s`', repo_name,
                 safe_unicode(repo_path), obfuscate_url_pw(clone_uri))

        backend = get_backend(repo_type)

        config_repo = None if use_global_config else repo_name
        if config_repo and new_parent_path:
            config_repo = Repository.NAME_SEP.join(
                (new_parent_path, config_repo))
        config = make_db_config(clear_session=False, repo=config_repo)
        config.set('extensions', 'largefiles', '')

        # patch and reset hooks section of UI config to not run any
        # hooks on creating remote repo
        config.clear_section('hooks')

        # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
        if repo_type == 'git':
            repo = backend(repo_path,
                           config=config,
                           create=True,
                           src_url=clone_uri,
                           bare=True)
        else:
            repo = backend(repo_path,
                           config=config,
                           create=True,
                           src_url=clone_uri)

        ScmModel().install_hooks(repo, repo_type=repo_type)

        log.debug('Created repo %s with %s backend', safe_unicode(repo_name),
                  safe_unicode(repo_type))
        return repo
Beispiel #8
0
 def _config(self):
     """
     Returns db based config object.
     """
     from rhodecode.lib.utils import make_db_config
     return make_db_config(clear_session=False)
    def _create_config(self, extras, repo_name):
        config = utils.make_db_config(repo=repo_name)
        config.set('rhodecode', 'RC_SCM_DATA', json.dumps(extras))

        return config.serialize()
def load_environment(global_conf,
                     app_conf,
                     initial=False,
                     test_env=None,
                     test_index=None):
    """
    Configure the Pylons environment via the ``pylons.config``
    object
    """
    config = PylonsConfig()

    # Pylons paths
    root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
    paths = {
        'root': root,
        'controllers': os.path.join(root, 'controllers'),
        'static_files': os.path.join(root, 'public'),
        'templates': [os.path.join(root, 'templates')],
    }

    # Initialize config with the basic options
    config.init_app(global_conf, app_conf, package='rhodecode', paths=paths)

    # store some globals into rhodecode
    rhodecode.CELERY_ENABLED = str2bool(config['app_conf'].get('use_celery'))
    rhodecode.CELERY_EAGER = str2bool(
        config['app_conf'].get('celery.always.eager'))

    config['routes.map'] = make_map(config)

    if asbool(config.get('generate_js_files', 'false')):
        jsroutes = config['routes.map'].jsroutes()
        jsroutes_file_content = generate_jsroutes_content(jsroutes)
        jsroutes_file_path = os.path.join(paths['static_files'], 'js',
                                          'rhodecode', 'routes.js')

        with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f:
            f.write(jsroutes_file_content)

    config['pylons.app_globals'] = app_globals.Globals(config)
    config['pylons.h'] = helpers
    rhodecode.CONFIG = config

    load_rcextensions(root_path=config['here'])

    # Setup cache object as early as possible
    import pylons
    pylons.cache._push_object(config['pylons.app_globals'].cache)

    # Create the Mako TemplateLookup, with the default auto-escaping
    config['pylons.app_globals'].mako_lookup = TemplateLookup(
        directories=paths['templates'],
        error_handler=handle_mako_error,
        module_directory=os.path.join(app_conf['cache_dir'], 'templates'),
        input_encoding='utf-8',
        default_filters=['escape'],
        imports=['from webhelpers.html import escape'])

    # sets the c attribute access when don't existing attribute are accessed
    config['pylons.strict_tmpl_context'] = True

    # Limit backends to "vcs.backends" from configuration
    backends = config['vcs.backends'] = aslist(config.get(
        'vcs.backends', 'hg,git'),
                                               sep=',')
    for alias in rhodecode.BACKENDS.keys():
        if alias not in backends:
            del rhodecode.BACKENDS[alias]
    log.info("Enabled backends: %s", backends)

    # initialize vcs client and optionally run the server if enabled
    vcs_server_uri = config.get('vcs.server', '')
    vcs_server_enabled = str2bool(config.get('vcs.server.enable', 'true'))
    start_server = (str2bool(config.get('vcs.start_server', 'false')) and
                    not int(os.environ.get('RC_VCSSERVER_TEST_DISABLE', '0')))
    if vcs_server_enabled and start_server:
        log.info("Starting vcsserver")
        start_vcs_server(server_and_port=vcs_server_uri,
                         protocol=utils.get_vcs_server_protocol(config),
                         log_level=config['vcs.server.log_level'])

    set_available_permissions(config)
    db_cfg = make_db_config(clear_session=True)

    repos_path = list(db_cfg.items('paths'))[0][1]
    config['base_path'] = repos_path

    config['vcs.hooks.direct_calls'] = _use_direct_hook_calls(config)
    config['vcs.hooks.protocol'] = _get_vcs_hooks_protocol(config)

    # store db config also in main global CONFIG
    set_rhodecode_config(config)

    # configure instance id
    utils.set_instance_id(config)

    # CONFIGURATION OPTIONS HERE (note: all config options will override
    # any Pylons config options)

    # store config reference into our module to skip import magic of pylons
    rhodecode.CONFIG.update(config)

    utils.configure_pyro4(config)
    utils.configure_vcs(config)
    if vcs_server_enabled:
        connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(config))

    import_on_startup = str2bool(config.get('startup.import_repos', False))
    if vcs_server_enabled and import_on_startup:
        repo2db_mapper(ScmModel().repo_scan(repos_path), remove_obsolete=False)
    return config