Пример #1
0
def _describe_annex():
    from datalad.cmd import (
        GitWitlessRunner,
        StdOutErrCapture,
    )

    runner = GitWitlessRunner()
    try:
        out = runner.run(['git', 'annex', 'version'],
                         protocol=StdOutErrCapture)
    except CommandError as e:
        return dict(
            version='not available',
            message=exc_str(e),
        )
    info = {}
    for line in out['stdout'].split(os.linesep):
        key = line.split(':')[0]
        if not key:
            continue
        value = line[len(key) + 2:].strip()
        key = key.replace('git-annex ', '')
        if key.endswith('s'):
            value = value.split()
        info[key] = value
    return info
Пример #2
0
def _parse_git_diff(dspath, diff_thingie=None, paths=None,
                    ignore_submodules='none', staged=False):
    # use '--work-tree=.' to get direct omde to cooperate
    cmd = ['git', '--work-tree=.', 'diff', '--raw',
           # file names NULL terminated
           '-z',
           # how to treat submodules (see git diff docs)
           '--ignore-submodules={}'.format(ignore_submodules),
           # never abbreviate sha sums
           '--abbrev=40']
    if staged:
        cmd.append('--staged')
    if diff_thingie:
        cmd.append(diff_thingie)
    if paths:
        cmd.append('--')
        cmd.extend(ap['path'] for ap in paths if ap.get('raw_input', False))

    try:
        stdout = GitWitlessRunner(cwd=dspath).run(
            cmd, protocol=StdOutErrCapture)['stdout']
    except CommandError as e:
        if 'bad revision' in e.stderr:
            yield dict(
                path=dspath,
                type='dataset',
                status='impossible',
                message=e.stderr.strip())
            return
        raise

    ap = None
    for line in stdout.split('\0'):
        if not line:
            continue
        if line.startswith(':'):
            # a new path
            # yield any existing one
            if ap:
                yield ap
                ap = None
            # start new record
            m_src, m_dst, sha_src, sha_dst, status = \
                line[1:].split()
            ap = dict(
                mode_src=int(m_src, base=8),
                mode=int(m_dst, base=8),
                revision_src=sha_src if sha_src != '0' * 40 else None,
                revision=sha_dst if sha_dst != '0' * 40 else None,
                parentds=dspath)
            _translate_status(status, ap)
            _translate_type(ap['mode'], ap, 'type')
            _translate_type(ap['mode_src'], ap, 'type_src')
        else:
            # a filename
            if 'path' in ap:
                ap['path_src'] = ap['path']
            ap['path'] = opj(dspath, line)
    if ap:
        yield ap
Пример #3
0
    def __init__(self, path):
        # A lock to prevent multiple threads performing write operations in parallel
        self._write_lock = threading.Lock()

        # Note, that the following three path objects are used often and
        # therefore are stored for performance. Path object creation comes with
        # a cost. Most notably, this is used for validity checking of the
        # repository.
        self.pathobj = Path(path)
        self.dot_git = _get_dot_git(self.pathobj, ok_missing=True)
        self._valid_git_test_path = self.dot_git / 'HEAD'

        self._cfg = None
        self._git_runner = GitWitlessRunner(cwd=self.pathobj)

        self.__fake_dates_enabled = None

        self._line_splitter = None

        # Finally, register a finalizer (instead of having a __del__ method).
        # This will be called by garbage collection as well as "atexit". By
        # keeping the reference here, we can also call it explicitly.
        # Note, that we can pass required attributes to the finalizer, but not
        # `self` itself. This would create an additional reference to the object
        # and thereby preventing it from being collected at all.
        self._finalizer = finalize(self, GitRepo._cleanup, self.pathobj)
Пример #4
0
def _get_untracked_content(dspath, report_untracked, paths=None):
    cmd = [
        'git',
        '--work-tree=.',
        'status',
        '--porcelain',
        # file names NULL terminated
        '-z',
        # we never want to touch submodules, they cannot be untracked
        '--ignore-submodules=all',
        # fully untracked dirs as such, the rest as files
        '--untracked={}'.format(report_untracked)
    ]
    try:
        stdout = GitWitlessRunner(cwd=dspath).run(
            cmd, protocol=StdOutErrCapture)['stdout']
    except CommandError as e:
        # TODO should we catch any and handle them in here?
        raise e

    if paths:
        paths = [r['path'] for r in paths]
        if len(paths) == 1 and paths[0] == dspath:
            # nothing to filter
            paths = None

    from datalad.utils import ensure_unicode

    for line in stdout.split('\0'):
        if not line:
            continue
        line = ensure_unicode(line)
        if not line.startswith('?? '):
            # nothing untracked, ignore, task of `diff`
            continue
        apath = opj(
            dspath,
            # strip state marker
            line[3:])
        norm_apath = normpath(apath)
        if paths and not any(norm_apath == p or path_startswith(apath, p)
                             for p in paths):
            # we got a whitelist for paths, don't report any other
            continue
        ap = dict(path=norm_apath,
                  parentds=dspath,
                  state='untracked',
                  type='directory' if isdir(apath) else 'file')
        yield ap
Пример #5
0
class witlessrunner(SuprocBenchmarks):
    """Some rudimentary tests to see if there is no major slowdowns of WitlessRunner
    """
    def setup(self):
        self.runner = WitlessRunner()
        self.git_runner = GitWitlessRunner()

    def time_echo(self):
        self.runner.run(["echo"])

    def time_echo_gitrunner(self):
        self.git_runner.run(["echo"])

    def time_echo_gitrunner_fullcapture(self):
        self.git_runner.run(["echo"], protocol=StdOutErrCapture)
Пример #6
0
def test_magic_number():
    # we hard code the magic SHA1 that represents the state of a Git repo
    # prior to the first commit -- used to diff from scratch to a specific
    # commit
    # given the level of dark magic, we better test whether this stays
    # constant across Git versions (it should!)
    out = GitWitlessRunner().run('cd ./ | git hash-object --stdin -t tree',
                                 protocol=StdOutCapture)
    eq_(out['stdout'].strip(), PRE_INIT_COMMIT_SHA)
Пример #7
0
def test_get_git_environ_adjusted():
    gitrunner = GitWitlessRunner()
    env = {
        "GIT_DIR": "../../.git",
        "GIT_WORK_TREE": "../../",
        "TEST_VAR": "Exists"
    }

    # test conversion of relevant env vars from relative_path to correct absolute_path
    adj_env = gitrunner.get_git_environ_adjusted(env)
    assert_equal(adj_env["GIT_DIR"], abspath(env["GIT_DIR"]))
    assert_equal(adj_env["GIT_WORK_TREE"], abspath(env["GIT_WORK_TREE"]))

    # test if other environment variables passed to function returned unaltered
    assert_equal(adj_env["TEST_VAR"], env["TEST_VAR"])

    # test import of sys_env if no environment passed to function
    sys_env = gitrunner.get_git_environ_adjusted()
    assert_equal(sys_env["PWD"], os.environ.get("PWD"))
Пример #8
0
def test_gitannex(osf_id, dspath):
    from datalad.cmd import GitWitlessRunner
    dspath = Path(dspath)

    ds = Dataset(dspath).create()

    # add remote parameters here
    init_remote_opts = ["node={}".format(osf_id)]

    # add special remote
    init_opts = common_init_opts + init_remote_opts
    ds.repo.init_remote('osfproject', options=init_opts)

    # run git-annex-testremote
    # note, that we don't want to capture output. If something goes wrong we
    # want to see it in test build's output log.
    # TODO use AnnexRepo._call_annex(..., protocol=None) with 0.14+
    GitWitlessRunner(cwd=dspath,
                     env=GitWitlessRunner.get_git_environ_adjusted()).run([
                         'git', 'annex', 'testremote', 'osfproject', "--fast"
                     ])
Пример #9
0
def test_publish_no_fetch_refspec_configured(path):

    path = Path(path)
    GitWitlessRunner(cwd=str(path)).run(
        ["git", "init", "--bare", "empty-remote"])
    ds = Dataset(path / "ds").create()
    ds.repo.add_remote("origin", str(ds.pathobj.parent / "empty-remote"))
    # Mimic a situation that can happen with an LFS remote. See gh-4199.
    ds.repo.config.unset("remote.origin.fetch", where="local")
    (ds.repo.pathobj / "foo").write_text("a")
    ds.save()
    ds.publish(to="origin")
Пример #10
0
def _test_gitannex(host, store, dspath):
    store = Path(store)

    dspath = Path(dspath)
    store = Path(store)

    ds = Dataset(dspath).create()

    if ds.repo.is_managed_branch():
        # git-annex-testremote is way too slow on crippled FS.
        # Use is_managed_branch() as a proxy and skip only here
        # instead of in a decorator
        raise SkipTest("Test too slow on crippled FS")

    populate_dataset(ds)
    ds.save()
    assert_repo_status(ds.path)

    # set up store:
    io = SSHRemoteIO(host) if host else LocalIO()
    if host:
        store_url = "ria+ssh://{host}{path}".format(host=host, path=store)
    else:
        store_url = "ria+{}".format(store.as_uri())

    create_store(io, store, '1')

    # TODO: Re-establish test for version 1
    # version 2: dirhash
    create_ds_in_store(io, store, ds.id, '2', '1')

    # add special remote
    init_opts = common_init_opts + ['url={}'.format(store_url)]
    ds.repo.init_remote('store', options=init_opts)

    from datalad.support.external_versions import external_versions
    if '8.20200330' < external_versions['cmd:annex'] < '8.20200624':
        # https://git-annex.branchable.com/bugs/testremote_breeds_way_too_many_instances_of_the_externals_remote/?updated
        raise SkipTest(
            "git-annex might lead to overwhelming number of external "
            "special remote instances")

    # run git-annex-testremote
    # note, that we don't want to capture output. If something goes wrong we
    # want to see it in test build's output log.
    GitWitlessRunner(cwd=dspath).run(['git', 'annex', 'testremote', 'store'])
Пример #11
0
class ConfigManager(object):
    """Thin wrapper around `git-config` with support for a dataset configuration.

    The general idea is to have an object that is primarily used to read/query
    configuration option.  Upon creation, current configuration is read via one
    (or max two, in the case of the presence of dataset-specific configuration)
    calls to `git config`.  If this class is initialized with a Dataset
    instance, it supports reading and writing configuration from
    ``.datalad/config`` inside a dataset too. This file is committed to Git and
    hence useful to ship certain configuration items with a dataset.

    The API aims to provide the most significant read-access API of a
    dictionary, the Python ConfigParser, and GitPython's config parser
    implementations.

    This class is presently not capable of efficiently writing multiple
    configurations items at once.  Instead, each modification results in a
    dedicated call to `git config`. This author thinks this is OK, as he
    cannot think of a situation where a large number of items need to be
    written during normal operation.

    Each instance carries a public `overrides` attribute. This dictionary
    contains variables that override any setting read from a file. The overrides
    are persistent across reloads.

    Any DATALAD_* environment variable is also presented as a configuration
    item. Settings read from environment variables are not stored in any of the
    configuration files, but are read dynamically from the environment at each
    `reload()` call. Their values take precedence over any specification in
    configuration files, and even overrides.

    Parameters
    ----------
    dataset : Dataset, optional
      If provided, all `git config` calls are executed in this dataset's
      directory. Moreover, any modifications are, by default, directed to
      this dataset's configuration file (which will be created on demand)
    overrides : dict, optional
      Variable overrides, see general class documentation for details.
    source : {'any', 'local', 'dataset', 'dataset-local'}, optional
      Which sources of configuration setting to consider. If 'dataset',
      configuration items are only read from a dataset's persistent
      configuration file, if any is present (the one in ``.datalad/config``, not
      ``.git/config``); if 'local', any non-committed source is considered
      (local and global configuration in Git config's terminology);
      if 'dataset-local', persistent dataset configuration and local, but
      not global or system configuration are considered; if 'any'
      all possible sources of configuration are considered.
    """

    _checked_git_identity = False

    # Lock for running changing operation across multiple threads.
    # Since config itself to the same path could
    # potentially be created independently in multiple threads, and we might be
    # modifying global config as well, making lock static should not allow more than
    # one thread to  write at a time, even if to different repositories.
    _run_lock = threading.Lock()

    def __init__(self, dataset=None, overrides=None, source='any'):
        if source not in ('any', 'local', 'dataset', 'dataset-local'):
            raise ValueError(
                'Unknown ConfigManager(source=) setting: {}'.format(source))
        store = dict(
            # store in a simple dict
            # no subclassing, because we want to be largely read-only, and implement
            # config writing separately
            cfg={},
            # track the files that jointly make up the config in this store
            files=set(),
            # and their modification times to be able to avoid needless unforced reloads
            mtimes=None,
        )
        self._stores = dict(
            # populated with info from git
            git=store,
            # only populated with info from commited dataset config
            dataset=store.copy(),
        )
        # merged representation (the only one that existed pre datalad 0.14)
        # will be built on initial reload
        self._merged_store = {}

        self._repo_dot_git = None
        self._repo_pathobj = None
        if dataset:
            if hasattr(dataset, 'dot_git'):
                self._repo_dot_git = dataset.dot_git
                self._repo_pathobj = dataset.pathobj
            elif dataset.repo:
                self._repo_dot_git = dataset.repo.dot_git
                self._repo_pathobj = dataset.repo.pathobj

        self._config_cmd = ['git', 'config']
        # public dict to store variables that always override any setting
        # read from a file
        # `hasattr()` is needed because `datalad.cfg` is generated upon first module
        # import, hence when this code runs first, there cannot be any config manager
        # to inherit from
        self.overrides = datalad.cfg.overrides.copy() if hasattr(
            datalad, 'cfg') else {}
        if overrides is not None:
            self.overrides.update(overrides)
        if dataset is None:
            if source in ('dataset', 'dataset-local'):
                raise ValueError(
                    'ConfigManager configured to read dataset only, '
                    'but no dataset given')
            # The caller didn't specify a repository. Unset the git directory
            # when calling 'git config' to prevent a repository in the current
            # working directory from leaking configuration into the output.
            self._config_cmd = ['git', '--git-dir=', 'config']

        self._src_mode = source
        run_kwargs = dict()
        self._runner = None
        if dataset is not None:
            if hasattr(dataset, '_git_runner'):
                self._runner = dataset._git_runner
            elif dataset.repo:
                self._runner = dataset.repo._git_runner
            else:
                # make sure we run the git config calls in the dataset
                # to pick up the right config files
                run_kwargs['cwd'] = dataset.path
        if self._runner is None:
            self._runner = GitWitlessRunner(**run_kwargs)

        self.reload(force=True)

        if not ConfigManager._checked_git_identity:
            for cfg, envs in (('user.name', ('GIT_AUTHOR_NAME',
                                             'GIT_COMMITTER_NAME')),
                              ('user.email', ('GIT_AUTHOR_EMAIL',
                                              'GIT_COMMITTER_EMAIL'))):
                if cfg not in self \
                        and not any(e in os.environ for e in envs):
                    lgr.warning(
                        "It is highly recommended to configure Git before using "
                        "DataLad. Set both 'user.name' and 'user.email' "
                        "configuration variables.")
            ConfigManager._checked_git_identity = True

    def reload(self, force=False):
        """Reload all configuration items from the configured sources

        If `force` is False, all files configuration was previously read from
        are checked for differences in the modification times. If no difference
        is found for any file no reload is performed. This mechanism will not
        detect newly created global configuration files, use `force` in this case.
        """
        run_args = ['-z', '-l', '--show-origin']

        # update from desired config sources only
        # 2-step strategy:
        #   - load datalad dataset config from dataset
        #   - load git config from all supported by git sources
        # in doing so we always stay compatible with where Git gets its
        # config from, but also allow to override persistent information
        # from dataset locally or globally

        # figure out what needs to be reloaded at all
        to_run = {}
        # committed dataset config
        dataset_cfgfile = self._repo_pathobj / DATASET_CONFIG_FILE \
            if self._repo_pathobj else None
        if (self._src_mode != 'local' and dataset_cfgfile
                and dataset_cfgfile.exists()) and (force or self._need_reload(
                    self._stores['dataset'])):
            to_run['dataset'] = run_args + ['--file', str(dataset_cfgfile)]

        if self._src_mode != 'dataset' and (force or self._need_reload(
                self._stores['git'])):
            to_run['git'] = run_args + ['--local'] \
                if self._src_mode == 'dataset-local' \
                else run_args

        # reload everything that was found todo
        while to_run:
            store_id, runargs = to_run.popitem()
            self._stores[store_id] = self._reload(runargs)

        # always update the merged representation, even if we did not reload
        # anything from a file. ENV or overrides could change independently
        # start with the commit dataset config
        merged = self._stores['dataset']['cfg'].copy()
        # local config always takes precedence
        merged.update(self._stores['git']['cfg'])
        # superimpose overrides
        merged.update(self.overrides)
        # override with environment variables, unless we only want to read the
        # dataset's commit config
        if self._src_mode != 'dataset':
            _update_from_env(merged)
        self._merged_store = merged

    def _need_reload(self, store):
        if not store['mtimes']:
            return True
        # we have read files before
        # check if any file we read from has changed
        current_time = time()
        curmtimes = {
            c: c.stat().st_mtime
            for c in store['files'] if c.exists()
        }
        if all(curmtimes[c] == store['mtimes'].get(c) and
               # protect against low-res mtimes (FAT32 has 2s, EXT3 has 1s!)
               # if mtime age is less than worst resolution assume modified
               (current_time - curmtimes[c]) > 2.0 for c in curmtimes):
            return False
        return True

    def _reload(self, run_args):
        # query git-config
        stdout, stderr = self._run(
            run_args,
            protocol=StdOutErrCapture,
            # always expect git-config to output utf-8
            encoding='utf-8',
        )
        store = {}
        store['cfg'], store['files'] = _parse_gitconfig_dump(
            stdout, cwd=self._runner.cwd)

        # update mtimes of config files, they have just been discovered
        # and should still exist
        store['mtimes'] = {c: c.stat().st_mtime for c in store['files']}
        return store

    @_where_reload
    def obtain(self,
               var,
               default=None,
               dialog_type=None,
               valtype=None,
               store=False,
               where=None,
               reload=True,
               **kwargs):
        """
        Convenience method to obtain settings interactively, if needed

        A UI will be used to ask for user input in interactive sessions.
        Questions to ask, and additional explanations can be passed directly
        as arguments, or retrieved from a list of pre-configured items.

        Additionally, this method allows for type conversion and storage
        of obtained settings. Both aspects can also be pre-configured.

        Parameters
        ----------
        var : str
          Variable name including any section like `git config` expects them,
          e.g. 'core.editor'
        default : any type
          In interactive sessions and if `store` is True, this default value
          will be presented to the user for confirmation (or modification).
          In all other cases, this value will be silently assigned unless
          there is an existing configuration setting.
        dialog_type : {'question', 'yesno', None}
          Which dialog type to use in interactive sessions. If `None`,
          pre-configured UI options are used.
        store : bool
          Whether to store the obtained value (or default)
        %s
        `**kwargs`
          Additional arguments for the UI function call, such as a question
          `text`.
        """
        # do local import, as this module is import prominently and the
        # could theroetically import all kind of weired things for type
        # conversion
        from datalad.interface.common_cfg import definitions as cfg_defs
        # fetch what we know about this variable
        cdef = cfg_defs.get(var, {})
        # type conversion setup
        if valtype is None and 'type' in cdef:
            valtype = cdef['type']
        if valtype is None:
            valtype = lambda x: x

        # any default?
        if default is None and 'default' in cdef:
            default = cdef['default']

        _value = None
        if var in self:
            # nothing needs to be obtained, it is all here already
            _value = self[var]
        elif store is False and default is not None:
            # nothing will be stored, and we have a default -> no user confirmation
            # we cannot use logging, because we want to use the config to confiugre
            # the logging
            #lgr.debug('using default {} for config setting {}'.format(default, var))
            _value = default

        if _value is not None:
            # we got everything we need and can exit early
            try:
                return valtype(_value)
            except Exception as e:
                raise ValueError(
                    "value '{}' of existing configuration for '{}' cannot be "
                    "converted to the desired type '{}' ({})".format(
                        _value, var, valtype, exc_str(e)))

        # now we need to try to obtain something from the user
        from datalad.ui import ui

        # configure UI
        dialog_opts = kwargs
        if dialog_type is None:  # no override
            # check for common knowledge on how to obtain a value
            if 'ui' in cdef:
                dialog_type = cdef['ui'][0]
                # pull standard dialog settings
                dialog_opts = cdef['ui'][1]
                # update with input
                dialog_opts.update(kwargs)

        if (not ui.is_interactive or dialog_type is None) and default is None:
            raise RuntimeError(
                "cannot obtain value for configuration item '{}', "
                "not preconfigured, no default, no UI available".format(var))

        if not hasattr(ui, dialog_type):
            raise ValueError(
                "UI '{}' does not support dialog type '{}'".format(
                    ui, dialog_type))

        # configure storage destination, if needed
        if store:
            if where is None and 'destination' in cdef:
                where = cdef['destination']
            if where is None:
                raise ValueError(
                    "request to store configuration item '{}', but no "
                    "storage destination specified".format(var))

        # obtain via UI
        dialog = getattr(ui, dialog_type)
        _value = dialog(default=default, **dialog_opts)

        if _value is None:
            # we got nothing
            if default is None:
                raise RuntimeError(
                    "could not obtain value for configuration item '{}', "
                    "not preconfigured, no default".format(var))
            # XXX maybe we should return default here, even it was returned
            # from the UI -- if that is even possible

        # execute type conversion before storing to check that we got
        # something that looks like what we want
        try:
            value = valtype(_value)
        except Exception as e:
            raise ValueError(
                "cannot convert user input `{}` to desired type ({})".format(
                    _value, exc_str(e)))
            # XXX we could consider "looping" until we have a value of proper
            # type in case of a user typo...

        if store:
            # store value as it was before any conversion, needs to be str
            # anyway
            # needs string conversion nevertheless, because default could come
            # in as something else
            self.add(var, '{}'.format(_value), where=where, reload=reload)
        return value

    def __repr__(self):
        # give full list of all tracked config files, plus overrides
        return "ConfigManager({}{})".format(
            [
                str(p) for p in self._stores['dataset']['files'].union(
                    self._stores['git']['files'])
            ],
            ', overrides={!r}'.format(self.overrides)
            if self.overrides else '',
        )

    def __str__(self):
        # give path of dataset, if there is any, plus overrides
        return "ConfigManager({}{})".format(
            self._repo_pathobj if self._repo_pathobj else '',
            'with overrides' if self.overrides else '',
        )

    #
    # Compatibility with dict API
    #
    def __len__(self):
        return len(self._merged_store)

    def __getitem__(self, key):
        return self._merged_store.__getitem__(key)

    def __contains__(self, key):
        return self._merged_store.__contains__(key)

    def keys(self):
        """Returns list of configuration item names"""
        return self._merged_store.keys()

    # XXX should this be *args?
    def get(self, key, default=None, get_all=False):
        """D.get(k[,d]) -> D[k] if k in D, else d.  d defaults to None.

        Parameters
        ----------
        default : optional
          Value to return when key is not present. `None` by default.
        get_all : bool, optional
          If True, return all values of multiple identical configuration keys.
          By default only the last specified value is returned.
        """
        try:
            val = self[key]
            if get_all or not isinstance(val, tuple):
                return val
            else:
                return val[-1]
        except KeyError:
            # return as-is, default could be a tuple, hence do not subject to
            # get_all processing
            return default

    def get_from_source(self, source, key, default=None):
        """Like get(), but a source can be specific.

        If `source` is 'dataset', only the commited configuration is queried,
        overrides are applied. In the case of 'local', the committed
        configuration is ignored, but overrides and configuration from
        environment variables are applied as usual.
        """
        if source not in ('dataset', 'local'):
            raise ValueError("source must be 'dataset' or 'local'")
        if source == 'dataset':
            return self.overrides.get(
                key, self._stores['dataset']['cfg'].get(key, default))
        else:
            if key not in self._stores['dataset']['cfg']:
                # the key is not in the committed config, hence we can
                # just report based on the merged representation
                return self.get(key, default)
            else:
                # expensive case, rebuild a config without the committed
                # dataset config contributing
                env = {}
                _update_from_env(env)
                return env.get(
                    key,
                    self.overrides.get(
                        key, self._stores['local']['cfg'].get(key, default)))

    #
    # Compatibility with ConfigParser API
    #
    def sections(self):
        """Returns a list of the sections available"""
        return list(
            set([
                cfg_section_regex.match(k).group(1) for k in self._merged_store
            ]))

    def options(self, section):
        """Returns a list of options available in the specified section."""
        opts = []
        for k in self._merged_store:
            sec, opt = cfg_sectionoption_regex.match(k).groups()
            if sec == section:
                opts.append(opt)
        return opts

    def has_section(self, section):
        """Indicates whether a section is present in the configuration"""
        for k in self._merged_store:
            if k.startswith(section):
                return True
        return False

    def has_option(self, section, option):
        """If the given section exists, and contains the given option"""
        for k in self._merged_store:
            sec, opt = cfg_sectionoption_regex.match(k).groups()
            if sec == section and opt == option:
                return True
        return False

    def _get_type(self, typefn, section, option):
        key = '.'.join([section, option])
        # Mimic the handling of get_value(..., default=None), while still going
        # through get() in order to get its default tuple handling.
        if key not in self:
            raise KeyError(key)
        return typefn(self.get(key))

    def getint(self, section, option):
        """A convenience method which coerces the option value to an integer"""
        return self._get_type(int, section, option)

    def getfloat(self, section, option):
        """A convenience method which coerces the option value to a float"""
        return self._get_type(float, section, option)

    def getbool(self, section, option, default=None):
        """A convenience method which coerces the option value to a bool

        Values "on", "yes", "true" and any int!=0 are considered True
        Values which evaluate to bool False, "off", "no", "false" are considered
        False
        TypeError is raised for other values.
        """
        key = '.'.join([section, option])
        # Mimic the handling of get_value(..., default=None), while still going
        # through get() in order to get its default tuple handling.
        if default is None and key not in self:
            raise KeyError(key)
        val = self.get(key, default=default)
        if val is None:  # no value at all, git treats it as True
            return True
        return anything2bool(val)

    # this is a hybrid of ConfigParser and dict API
    def items(self, section=None):
        """Return a list of (name, value) pairs for each option

        Optionally limited to a given section.
        """
        if section is None:
            return self._merged_store.items()
        return [(k, v) for k, v in self._merged_store.items()
                if cfg_section_regex.match(k).group(1) == section]

    #
    # Compatibility with GitPython's ConfigParser
    #
    def get_value(self, section, option, default=None):
        """Like `get()`, but with an optional default value

        If the default is not None, the given default value will be returned in
        case the option did not exist. This behavior imitates GitPython's
        config parser.
        """
        try:
            return self['.'.join((section, option))]
        except KeyError as e:
            # this strange dance is needed because gitpython does it this way
            if default is not None:
                return default
            else:
                raise e

    #
    # Modify configuration (proxy respective git-config call)
    #
    @_where_reload
    def _run(self, args, where=None, reload=False, **kwargs):
        """Centralized helper to run "git config" calls

        Parameters
        ----------
        args : list
          Arguments to pass for git config
        %s
        **kwargs
          Keywords arguments for Runner's call
        """
        if where:
            args = self._get_location_args(where) + args
        if '-l' in args:
            # we are just reading, no need to reload, no need to lock
            out = self._runner.run(self._config_cmd + args, **kwargs)
            return out['stdout'], out['stderr']

        # all other calls are modifications
        if '--file' in args:
            # all paths we are passing are absolute
            custom_file = Path(args[args.index('--file') + 1])
            custom_file.parent.mkdir(exist_ok=True)
        lockfile = None
        if self._repo_dot_git and ('--local' in args or '--file' in args):
            # modification of config in a dataset
            lockfile = self._repo_dot_git / 'config.dataladlock'
        else:
            # follow pattern in downloaders for lockfile location
            lockfile = Path(self.obtain('datalad.locations.cache')) \
                / 'locks' / 'gitconfig.lck'

        with ConfigManager._run_lock, InterProcessLock(lockfile, logger=lgr):
            out = self._runner.run(self._config_cmd + args, **kwargs)

        if reload:
            self.reload()
        return out['stdout'], out['stderr']

    def _get_location_args(self, where, args=None):
        if args is None:
            args = []
        cfg_labels = ('dataset', 'local', 'global', 'override')
        if where not in cfg_labels:
            raise ValueError(
                "unknown configuration label '{}' (not in {})".format(
                    where, cfg_labels))
        if where == 'dataset':
            if not self._repo_pathobj:
                raise ValueError(
                    'ConfigManager cannot store configuration to dataset, '
                    'none specified')
            dataset_cfgfile = self._repo_pathobj / DATASET_CONFIG_FILE
            args.extend(['--file', str(dataset_cfgfile)])
        elif where == 'global':
            args.append('--global')
        elif where == 'local':
            args.append('--local')
        return args

    @_where_reload
    def add(self, var, value, where='dataset', reload=True):
        """Add a configuration variable and value

        Parameters
        ----------
        var : str
          Variable name including any section like `git config` expects them, e.g.
          'core.editor'
        value : str
          Variable value
        %s"""
        if where == 'override':
            from datalad.utils import ensure_list
            val = ensure_list(self.overrides.pop(var, None))
            val.append(value)
            self.overrides[var] = val[0] if len(val) == 1 else val
            if reload:
                self.reload(force=True)
            return

        self._run(['--add', var, value],
                  where=where,
                  reload=reload,
                  protocol=StdOutErrCapture)

    @_where_reload
    def set(self, var, value, where='dataset', reload=True, force=False):
        """Set a variable to a value.

        In opposition to `add`, this replaces the value of `var` if there is
        one already.

        Parameters
        ----------
        var : str
          Variable name including any section like `git config` expects them, e.g.
          'core.editor'
        value : str
          Variable value
        force: bool
          if set, replaces all occurrences of `var` by a single one with the
          given `value`. Otherwise raise if multiple entries for `var` exist
          already
        %s"""
        if where == 'override':
            self.overrides[var] = value
            if reload:
                self.reload(force=True)
            return

        from datalad.support.gitrepo import to_options

        self._run(to_options(replace_all=force) + [var, value],
                  where=where,
                  reload=reload,
                  protocol=StdOutErrCapture)

    @_where_reload
    def rename_section(self, old, new, where='dataset', reload=True):
        """Rename a configuration section

        Parameters
        ----------
        old : str
          Name of the section to rename.
        new : str
          Name of the section to rename to.
        %s"""
        if where == 'override':
            self.overrides = {
                (new + k[len(old):]) if k.startswith(old + '.') else k: v
                for k, v in self.overrides.items()
            }
            if reload:
                self.reload(force=True)
            return

        self._run(['--rename-section', old, new], where=where, reload=reload)

    @_where_reload
    def remove_section(self, sec, where='dataset', reload=True):
        """Rename a configuration section

        Parameters
        ----------
        sec : str
          Name of the section to remove.
        %s"""
        if where == 'override':
            self.overrides = {
                k: v
                for k, v in self.overrides.items()
                if not k.startswith(sec + '.')
            }
            if reload:
                self.reload(force=True)
            return

        self._run(['--remove-section', sec], where=where, reload=reload)

    @_where_reload
    def unset(self, var, where='dataset', reload=True):
        """Remove all occurrences of a variable

        Parameters
        ----------
        var : str
          Name of the variable to remove
        %s"""
        if where == 'override':
            self.overrides.pop(var, None)
            if reload:
                self.reload(force=True)
            return

        # use unset all as it is simpler for now
        self._run(['--unset-all', var], where=where, reload=reload)
Пример #12
0
 def setup(self):
     self.runner = WitlessRunner()
     self.git_runner = GitWitlessRunner()
Пример #13
0
def test_ephemeral(origin_path, bare_path,
                   clone1_path, clone2_path, clone3_path):

    file_test = Path('ds') / 'test.txt'
    file_testsub = Path('ds') / 'subdir' / 'testsub.txt'

    origin = Dataset(origin_path).create(force=True)
    if origin.repo.is_managed_branch():
        raise SkipTest('Ephemeral clones cannot use adjusted mode repos')

    origin.save()
    # 1. clone via path
    clone1 = clone(origin_path, clone1_path, reckless='ephemeral')

    can_symlink = has_symlink_capability()

    if can_symlink:
        clone1_annex = (clone1.repo.dot_git / 'annex')
        ok_(clone1_annex.is_symlink())
        ok_(clone1_annex.resolve().samefile(origin.repo.dot_git / 'annex'))
        if not clone1.repo.is_managed_branch():
            # TODO: We can't properly handle adjusted branch yet
            eq_((clone1.pathobj / file_test).read_text(), 'some')
            eq_((clone1.pathobj / file_testsub).read_text(), 'somemore')

    # 2. clone via file-scheme URL
    clone2 = clone('file://' + Path(origin_path).as_posix(), clone2_path,
                   reckless='ephemeral')

    if can_symlink:
        clone2_annex = (clone2.repo.dot_git / 'annex')
        ok_(clone2_annex.is_symlink())
        ok_(clone2_annex.resolve().samefile(origin.repo.dot_git / 'annex'))
        if not clone2.repo.is_managed_branch():
            # TODO: We can't properly handle adjusted branch yet
            eq_((clone2.pathobj / file_test).read_text(), 'some')
            eq_((clone2.pathobj / file_testsub).read_text(), 'somemore')

    # 3. add something to clone1 and push back to origin availability from
    # clone1 should not be propagated (we declared 'here' dead to that end)

    (clone1.pathobj / 'addition.txt').write_text("even more")
    clone1.save()
    origin.config.set("receive.denyCurrentBranch", "updateInstead",
                      where="local")
    # Note, that the only thing to test is git-annex-dead here,
    # if we couldn't symlink:
    clone1.publish(to='origin', transfer_data='none' if can_symlink else 'auto')
    if not origin.repo.is_managed_branch():
        # test logic cannot handle adjusted branches
        eq_(origin.repo.get_hexsha(), clone1.repo.get_hexsha())
    res = origin.repo.whereis("addition.txt")
    if can_symlink:
        # obv. present in origin, but this is not yet known to origin:
        eq_(res, [])
        res = origin.repo.fsck()
        assert_result_count(res, 3, success=True)
        # TODO: Double check whether annex reports POSIX paths o windows!
        eq_({str(file_test), str(file_testsub), "addition.txt"},
            {r['file'] for r in res})
        # now origin knows:
    res = origin.repo.whereis("addition.txt")
    eq_(res, [origin.config.get("annex.uuid")])

    # 4. ephemeral clone from a bare repo
    runner = GitWitlessRunner()
    runner.run(['git', 'clone', '--bare', origin_path, bare_path])
    runner.run(['git', 'annex', 'init'], cwd=bare_path)

    eph_from_bare = clone(bare_path, clone3_path, reckless='ephemeral')
    can_symlink = has_symlink_capability()

    if can_symlink:
        # Bare repo uses dirhashlower by default, while a standard repo uses
        # dirhashmixed. Symlinking different object trees doesn't really work.
        # Don't test that here, since this is not a matter of the "ephemeral"
        # option alone. We should have such a setup in the RIA tests and test
        # for data access there.
        # Here we only test for the correct linking.
        eph_annex = eph_from_bare.repo.dot_git / 'annex'
        ok_(eph_annex.is_symlink())
        ok_(eph_annex.resolve().samefile(Path(bare_path) / 'annex'))
Пример #14
0
def _push_data(ds,
               target,
               content,
               data,
               force,
               jobs,
               res_kwargs,
               got_path_arg=False):
    if ds.config.getbool('remote.{}'.format(target), 'annex-ignore', False):
        lgr.debug(
            "Target '%s' is set to annex-ignore, exclude from data-push.",
            target,
        )
        return
    res_kwargs['target'] = target
    if not ds.config.get('.'.join(('remote', target, 'annex-uuid')), None):
        # this remote either isn't an annex,
        # or hasn't been properly initialized
        # rather than barfing tons of messages for each file, do one
        # for the entire dataset
        yield dict(
            res_kwargs,
            action='copy',
            status='impossible'
            if force in ('all', 'checkdatapresent') else 'notneeded',
            message=("Target '%s' does not appear to be an annex remote",
                     target))
        return

    # it really looks like we will transfer files, get info on what annex
    # has in store
    ds_repo = ds.repo
    # paths must be recoded to a dataset REPO root (in case of a symlinked
    # location
    annex_info_init = \
        {ds_repo.pathobj / Path(c['path']).relative_to(ds.pathobj): c
         for c in content} if ds.pathobj != ds_repo.pathobj else \
        {Path(c['path']): c for c in content}
    content = ds.repo.get_content_annexinfo(
        # paths are taken from `annex_info_init`
        paths=None,
        init=annex_info_init,
        ref='HEAD',
        # this is an expensive operation that is only needed
        # to perform a warning below, and for more accurate
        # progress reporting (exclude unavailable content).
        # limit to cases with explicit paths provided
        eval_availability=True if got_path_arg else False,
    )
    # figure out which of the reported content (after evaluating
    # `since` and `path` arguments needs transport
    to_transfer = [
        c for c in content.values()
        # by force
        if ((
            force in ('all', 'checkdatapresent') or
            # or by modification report
            c.get('state', None) not in ('clean', 'deleted'))
            # only consider annex'ed files
            and 'key' in c)
    ]
    if got_path_arg:
        for c in [c for c in to_transfer if not c.get('has_content', False)]:
            yield dict(
                res_kwargs,
                type=c['type'],
                path=c['path'],
                action='copy',
                status='impossible',
                message='Slated for transport, but no content present',
            )

    cmd = [
        'git', 'annex', 'copy', '--batch', '-z', '--to', target, '--json',
        '--json-error-messages', '--json-progress'
    ]

    if jobs:
        cmd.extend(['--jobs', str(jobs)])

    # Since we got here - we already have some  data != "nothing"
    if (data == 'auto') or \
        (
            (data == 'auto-if-wanted') and
            ds_repo.get_preferred_content('wanted', target)
        ):
        lgr.debug("Invoking copy --auto")
        cmd.append('--auto')

    if force not in ('all', 'checkdatapresent'):
        # if we force, we do not trust local knowledge and do the checks
        cmd.append('--fast')

    lgr.debug("Push data from %s to '%s'", ds, target)

    # input has type=dataset, but now it is about files
    res_kwargs.pop('type', None)

    # produce final path list. use knowledge that annex command will
    # run in the root of the dataset and compact paths to be relative
    # to this location
    # XXX must not be a SpooledTemporaryFile -- dunno why, but doesn't work
    # otherwise
    with TemporaryFile() as file_list:
        nbytes = 0
        for c in to_transfer:
            file_list.write(bytes(Path(c['path']).relative_to(ds.pathobj)))
            file_list.write(b'\0')
            nbytes += c['bytesize']

        # rewind stdin buffer
        file_list.seek(0)

        # tailor the progress protocol with the total number of files
        # to be transferred
        class TailoredPushAnnexJsonProtocol(AnnexJsonProtocol):
            total_nbytes = nbytes

        # and go
        # TODO try-except and yield what was captured before the crash
        #res = GitWitlessRunner(
        res = GitWitlessRunner(cwd=ds.path, ).run(
            cmd,
            # TODO report how many in total, and give global progress too
            protocol=TailoredPushAnnexJsonProtocol,
            stdin=file_list)
        for c in ('stdout', 'stderr'):
            if res[c]:
                lgr.debug('Received unexpected %s from `annex copy`: %s', c,
                          res[c])
        for j in res['stdout_json']:
            yield annexjson2result(j, ds, type='file', **res_kwargs)
    return
Пример #15
0
 def _get_runner(self):
     runner = self._runner or GitWitlessRunner(
         cwd=self._repo.path if self._repo else None)
     self._runner = runner
     return runner
Пример #16
0
def postclonecfg_annexdataset(ds, reckless, description=None):
    """If ds "knows annex" -- annex init it, set into reckless etc

    Provides additional tune up to a possibly an annex repo, e.g.
    "enables" reckless mode, sets up description
    """
    # in any case check whether we need to annex-init the installed thing:
    if not knows_annex(ds.path):
        # not for us
        return

    # init annex when traces of a remote annex can be detected
    if reckless == 'auto':
        lgr.debug(
            "Instruct annex to hardlink content in %s from local "
            "sources, if possible (reckless)", ds.path)
        ds.config.set(
            'annex.hardlink', 'true', where='local', reload=True)

    lgr.debug("Initializing annex repo at %s", ds.path)
    # Note, that we cannot enforce annex-init via AnnexRepo().
    # If such an instance already exists, its __init__ will not be executed.
    # Therefore do quick test once we have an object and decide whether to call
    # its _init().
    #
    # Additionally, call init if we need to add a description (see #1403),
    # since AnnexRepo.__init__ can only do it with create=True
    repo = AnnexRepo(ds.path, init=True)
    if not repo.is_initialized() or description:
        repo._init(description=description)
    if reckless == 'auto' or (reckless and reckless.startswith('shared-')):
        repo._run_annex_command('untrust', annex_options=['here'])

    elif reckless == 'ephemeral':
        # with ephemeral we declare 'here' as 'dead' right away, whenever
        # we symlink origin's annex, since availability from 'here' should
        # not be propagated for an ephemeral clone when we publish back to
        # origin.
        # This will cause stuff like this for a locally present annexed file:
        # % git annex whereis d1
        # whereis d1 (0 copies) failed
        # BUT this works:
        # % git annex find . --not --in here
        # % git annex find . --in here
        # d1

        # we don't want annex copy-to origin
        ds.config.set(
            'remote.origin.annex-ignore', 'true',
            where='local')

        ds.repo.set_remote_dead('here')

        if check_symlink_capability(ds.repo.dot_git / 'dl_link_test',
                                    ds.repo.dot_git / 'dl_target_test'):
            # symlink the annex to avoid needless copies in an emphemeral clone
            annex_dir = ds.repo.dot_git / 'annex'
            origin_annex_url = ds.config.get("remote.origin.url", None)
            origin_git_path = None
            if origin_annex_url:
                try:
                    # Deal with file:// scheme URLs as well as plain paths.
                    # If origin isn't local, we have nothing to do.
                    origin_git_path = Path(RI(origin_annex_url).localpath)

                    # we are local; check for a bare repo first to not mess w/
                    # the path
                    # Note, that w/o support for bare repos in GitRepo we also
                    # can't use ConfigManager ATM.
                    gc_response = GitWitlessRunner(
                        cwd=origin_git_path,
                    ).run(['git', 'config', '--local', '--get', 'core.bare'],
                          protocol=StdOutErrCapture)
                    if gc_response['stdout'].lower().strip() == 'true':
                        # origin is a bare repo -> use path as is
                        pass
                    elif origin_git_path.name != '.git':
                        origin_git_path /= '.git'
                except ValueError:
                    # Note, that accessing localpath on a non-local RI throws
                    # ValueError rather than resulting in an AttributeError.
                    # TODO: Warning level okay or is info level sufficient?
                    # Note, that setting annex-dead is independent of
                    # symlinking .git/annex. It might still make sense to
                    # have an ephemeral clone that doesn't propagate its avail.
                    # info. Therefore don't fail altogether.
                    lgr.warning("reckless=ephemeral mode: origin doesn't seem "
                                "local: %s\nno symlinks being used",
                                origin_annex_url)
            if origin_git_path:
                # TODO make sure that we do not delete any unique data
                rmtree(str(annex_dir)) \
                    if not annex_dir.is_symlink() else annex_dir.unlink()
                annex_dir.symlink_to(origin_git_path / 'annex',
                                     target_is_directory=True)
        else:
            # TODO: What level? + note, that annex-dead is independ
            lgr.warning("reckless=ephemeral mode: Unable to create symlinks on "
                        "this file system.")

    srs = {True: [], False: []}  # special remotes by "autoenable" key
    remote_uuids = None  # might be necessary to discover known UUIDs

    repo_config = repo.config
    # Note: The purpose of this function is to inform the user. So if something
    # looks misconfigured, we'll warn and move on to the next item.
    for uuid, config in repo.get_special_remotes().items():
        sr_name = config.get('name', None)
        if sr_name is None:
            lgr.warning(
                'Ignoring special remote %s because it does not have a name. '
                'Known information: %s',
                uuid, config)
            continue
        sr_autoenable = config.get('autoenable', False)
        try:
            sr_autoenable = ensure_bool(sr_autoenable)
        except ValueError:
            lgr.warning(
                'Failed to process "autoenable" value %r for sibling %s in '
                'dataset %s as bool.'
                'You might need to enable it later manually and/or fix it up to'
                ' avoid this message in the future.',
                sr_autoenable, sr_name, ds.path)
            continue

        # If it looks like a type=git special remote, make sure we have up to
        # date information. See gh-2897.
        if sr_autoenable and repo_config.get("remote.{}.fetch".format(sr_name)):
            try:
                repo.fetch(remote=sr_name)
            except CommandError as exc:
                lgr.warning("Failed to fetch type=git special remote %s: %s",
                            sr_name, exc_str(exc))

        # determine whether there is a registered remote with matching UUID
        if uuid:
            if remote_uuids is None:
                remote_uuids = {
                    # Check annex-config-uuid first. For sameas annex remotes,
                    # this will point to the UUID for the configuration (i.e.
                    # the key returned by get_special_remotes) rather than the
                    # shared UUID.
                    (repo_config.get('remote.%s.annex-config-uuid' % r) or
                     repo_config.get('remote.%s.annex-uuid' % r))
                    for r in repo.get_remotes()
                }
            if uuid not in remote_uuids:
                srs[sr_autoenable].append(sr_name)

    if srs[True]:
        lgr.debug(
            "configuration for %s %s added because of autoenable,"
            " but no UUIDs for them yet known for dataset %s",
            # since we are only at debug level, we could call things their
            # proper names
            single_or_plural("special remote",
                             "special remotes", len(srs[True]), True),
            ", ".join(srs[True]),
            ds.path
        )

    if srs[False]:
        # if has no auto-enable special remotes
        lgr.info(
            'access to %s %s not auto-enabled, enable with:\n'
            '\t\tdatalad siblings -d "%s" enable -s %s',
            # but since humans might read it, we better confuse them with our
            # own terms!
            single_or_plural("dataset sibling",
                             "dataset siblings", len(srs[False]), True),
            ", ".join(srs[False]),
            ds.path,
            srs[False][0] if len(srs[False]) == 1 else "SIBLING",
        )

    # we have just cloned the repo, so it has 'origin', configure any
    # reachable origin of origins
    yield from configure_origins(ds, ds)
Пример #17
0
def get_git_version(runner=None):
    """Return version of available git"""
    runner = runner or GitWitlessRunner()
    return runner.run('git version'.split(),
                      protocol=StdOutErrCapture)['stdout'].split()[2]
Пример #18
0
class GitRepo(RepoInterface, metaclass=PathBasedFlyweight):
    """Representation of a Git repository

    """
    # Could be used to e.g. disable automatic garbage and autopacking
    # ['-c', 'receive.autogc=0', '-c', 'gc.auto=0']
    _GIT_COMMON_OPTIONS = ["-c", "diff.ignoreSubmodules=none"]
    _git_cmd_prefix = ["git"] + _GIT_COMMON_OPTIONS

    # Begin Flyweight:

    _unique_instances = WeakValueDictionary()

    def _flyweight_invalid(self):
        return not self.is_valid()

    @classmethod
    def _flyweight_reject(cls, id_, *args, **kwargs):
        pass

    @classmethod
    def _cleanup(cls, path):
        # Ben: I think in case of GitRepo there's nothing to do ATM. Statements
        #      like the one in the out commented __del__ above, don't make sense
        #      with python's GC, IMO, except for manually resolving cyclic
        #      references (not the case w/ ConfigManager ATM).
        lgr.log(1, "Finalizer called on: GitRepo(%s)", path)

    def __hash__(self):
        # the flyweight key is already determining unique instances
        # add the class name to distinguish from strings of a path
        return hash((self.__class__.__name__, self.__weakref__.key))

    # End Flyweight

    def __init__(self, path):
        # A lock to prevent multiple threads performing write operations in parallel
        self._write_lock = threading.Lock()

        # Note, that the following three path objects are used often and
        # therefore are stored for performance. Path object creation comes with
        # a cost. Most notably, this is used for validity checking of the
        # repository.
        self.pathobj = Path(path)
        self.dot_git = _get_dot_git(self.pathobj, ok_missing=True)
        self._valid_git_test_path = self.dot_git / 'HEAD'

        self._cfg = None
        self._git_runner = GitWitlessRunner(cwd=self.pathobj)

        self.__fake_dates_enabled = None

        self._line_splitter = None

        # Finally, register a finalizer (instead of having a __del__ method).
        # This will be called by garbage collection as well as "atexit". By
        # keeping the reference here, we can also call it explicitly.
        # Note, that we can pass required attributes to the finalizer, but not
        # `self` itself. This would create an additional reference to the object
        # and thereby preventing it from being collected at all.
        self._finalizer = finalize(self, GitRepo._cleanup, self.pathobj)

    def __eq__(self, obj):
        """Decides whether or not two instances of this class are equal.

        This is done by comparing the base repository path.
        """
        return self.pathobj == obj.pathobj

    def is_valid(self_or_path):
        """Returns whether the underlying repository appears to be still valid

        This method can be used as an instance method or a class method.
        """
        # preserving notes from the original implementations in GitRepo
        #
        # Note, that this almost identical to the classmethod is_valid_repo().
        # However, if we are testing an existing instance, we can save Path object
        # creations. Since this testing is done a lot, this is relevant. Creation
        # of the Path objects in is_valid_repo() takes nearly half the time of the
        # entire function.

        # Also note, that this method is bound to an instance but still
        # class-dependent, meaning that a subclass cannot simply overwrite it.
        # This is particularly important for the call from within __init__(),
        # which in turn is called by the subclasses' __init__. Using an overwrite
        # would lead to the wrong thing being called.
        if not isinstance(self_or_path, GitRepo):
            # called like a classmethod, perform test without requiring
            # a repo instance
            if not isinstance(self_or_path, Path):
                self_or_path = Path(self_or_path)
            dot_git_path = self_or_path / '.git'
            return (dot_git_path.exists() and
                    (not dot_git_path.is_dir() or
                     (dot_git_path / 'HEAD').exists())) or (self_or_path /
                                                            'HEAD').exists()
        else:
            # called as a method of a repo instance
            return self_or_path.dot_git.exists() and (
                not self_or_path.dot_git.is_dir()
                or self_or_path._valid_git_test_path.exists())

    @property
    def cfg(self):
        """Get a ConfigManager instance for this repository

        Returns
        -------
        ConfigManager
        """
        if self._cfg is None:
            # associate with this dataset and read the entire config hierarchy
            self._cfg = ConfigManager(dataset=self, source='any')
        return self._cfg

    @property
    def _fake_dates_enabled(self):
        """Is the repository configured to use fake dates?

        This is an internal query performance helper for the datalad.fake-dates
        config option.
        """
        if self.__fake_dates_enabled is None:
            self.__fake_dates_enabled = \
                self.cfg.getbool('datalad', 'fake-dates', default=False)
        return self.__fake_dates_enabled

    def add_fake_dates_to_env(self, env=None):
        """Add fake dates to `env`.

        Parameters
        ----------
        env : dict, optional
            Environment variables.

        Returns
        -------
        A dict (copied from env), with date-related environment
        variables for git and git-annex set.
        """
        env = (env if env is not None else environ).copy()
        # Note: Use _git_custom_command here rather than repo.git.for_each_ref
        # so that we use annex-proxy in direct mode.
        last_date = list(
            self.for_each_ref_(
                fields='committerdate:raw',
                count=1,
                pattern='refs/heads',
                sort="-committerdate",
            ))

        if last_date:
            # Drop the "contextual" timezone, leaving the unix timestamp.  We
            # avoid :unix above because it wasn't introduced until Git v2.9.4.
            last_date = last_date[0]['committerdate:raw'].split()[0]
            seconds = int(last_date)
        else:
            seconds = self.cfg.obtain("datalad.fake-dates-start")
        seconds_new = seconds + 1
        date = "@{} +0000".format(seconds_new)

        lgr.debug(
            "Setting date to %s",
            time.strftime("%a %d %b %Y %H:%M:%S +0000",
                          time.gmtime(seconds_new)))

        env["GIT_AUTHOR_DATE"] = date
        env["GIT_COMMITTER_DATE"] = date
        env["GIT_ANNEX_VECTOR_CLOCK"] = str(seconds_new)

        return env

    def _generator_call_git(self, args, files=None, env=None, sep=None):
        """
        Call git, yield stdout and stderr lines when available. Output lines
        are split at line ends or `sep` if `sep` is not None.

        Parameters
        ----------
        sep : str, optional
          Use `sep` as line separator. Does not create an empty last line if
          the input ends on sep.

        All other parameters match those described for `call_git`.

        Returns
        -------
        Generator that yields tuples of `(file_no, line)`, where `file_no` is
        either:

         - `datalad.runner.nonasyncrunner.STDOUT_FILENO` for stdout, or
         - `datalad.runner.nonasyncrunner.STDERR_FILENO` for stderr,

        and `line` is the next result line, split on `sep`, or on standard line
        ends.

        Raises
        ------
        CommandError if the call exits with a non-zero status.
        """
        class GeneratorStdOutErrCapture(GeneratorMixIn, AssemblingDecoderMixIn,
                                        StdOutErrCapture):
            """
            Generator-runner protocol that yields stdout and captures stderr
            in the provided stderr_buffer.
            """
            def __init__(self):
                GeneratorMixIn.__init__(self)
                AssemblingDecoderMixIn.__init__(self)
                StdOutErrCapture.__init__(self)

            def pipe_data_received(self, fd, data):
                if fd in (1, 2):
                    self.send_result((fd, self.decode(fd, data,
                                                      self.encoding)))
                else:
                    StdOutErrCapture.pipe_data_received(self, fd, data)

        cmd = self._git_cmd_prefix + args

        if files:
            # only call the wrapper if needed (adds distraction logs
            # otherwise, and also maintains the possibility to connect
            # stdin in the future)
            generator = self._git_runner.run_on_filelist_chunks_items_(
                cmd, files, protocol=GeneratorStdOutErrCapture, env=env)
        else:
            generator = self._git_runner.run(
                cmd, protocol=GeneratorStdOutErrCapture, env=env)

        line_splitter = {
            STDOUT_FILENO: LineSplitter(sep),
            STDERR_FILENO: LineSplitter(sep)
        }

        for file_no, content in generator:
            if file_no in (STDOUT_FILENO, STDERR_FILENO):
                for line in line_splitter[file_no].process(content):
                    yield file_no, line + "\n"
            else:
                raise ValueError(f"unknown file number: {file_no}")

        for file_no in (STDOUT_FILENO, STDERR_FILENO):
            remaining_content = line_splitter[file_no].finish_processing()
            if remaining_content is not None:
                yield file_no, remaining_content

    def _call_git(self,
                  args,
                  files=None,
                  expect_stderr=False,
                  expect_fail=False,
                  env=None,
                  read_only=False):
        """Allows for calling arbitrary commands.

        Internal helper to the call_git*() methods.

        The parameters, return value, and raised exceptions match those
        documented for `call_git`.
        """
        runner = self._git_runner
        stderr_log_level = {True: 5, False: 11}[expect_stderr]

        read_write = not read_only
        if read_write and self._fake_dates_enabled:
            env = self.add_fake_dates_to_env(env if env else runner.env)

        output = {
            STDOUT_FILENO: [],
            STDERR_FILENO: [],
        }

        with lock_if_required(read_write, self._write_lock), \
             git_ignore_check(expect_fail, output[STDOUT_FILENO], output[STDERR_FILENO]):

            for file_no, line in self._generator_call_git(args,
                                                          files=files,
                                                          env=env):
                output[file_no].append(line)

        for line in output[STDERR_FILENO]:
            lgr.log(stderr_log_level, "stderr| " + line.rstrip("\n"))
        return ("".join(output[STDOUT_FILENO]), "".join(output[STDERR_FILENO]))

    def call_git(self,
                 args,
                 files=None,
                 expect_stderr=False,
                 expect_fail=False,
                 read_only=False):
        """Call git and return standard output.

        Parameters
        ----------
        args : list of str
          Arguments to pass to `git`.
        files : list of str, optional
          File arguments to pass to `git`. The advantage of passing these here
          rather than as part of `args` is that the call will be split into
          multiple calls to avoid exceeding the maximum command line length.
        expect_stderr : bool, optional
          Standard error is expected and should not be elevated above the DEBUG
          level.
        expect_fail : bool, optional
          A non-zero exit is expected and should not be elevated above the
          DEBUG level.
        read_only : bool, optional
          By setting this to True, the caller indicates that the command does
          not write to the repository, which lets this function skip some
          operations that are necessary only for commands the modify the
          repository. Beware that even commands that are conceptually
          read-only, such as `git-status` and `git-diff`, may refresh and write
          the index.

        Returns
        -------
        standard output (str)

        Raises
        ------
        CommandError if the call exits with a non-zero status.
        """
        return "\n".join(
            self.call_git_items_(args,
                                 files,
                                 expect_stderr=expect_stderr,
                                 expect_fail=expect_fail,
                                 read_only=read_only))

    def call_git_items_(self,
                        args,
                        files=None,
                        expect_stderr=False,
                        expect_fail=False,
                        env=None,
                        read_only=False,
                        sep=None):
        """
        Call git, yield output lines when available. Output lines are split
        at line ends or `sep` if `sep` is not None.

        Parameters
        ----------
        sep : str, optional
          Use sep as line separator. Does not create an empty last line if
          the input ends on sep.

        All other parameters match those described for `call_git`.

        Returns
        -------
        Generator that yields stdout items.

        Raises
        ------
        CommandError if the call exits with a non-zero status.
        """

        read_write = not read_only
        if read_write and self._fake_dates_enabled:
            env = self.add_fake_dates_to_env(
                env if env else self._git_runner.env)

        stderr_lines = []

        with lock_if_required(read_write, self._write_lock), \
             git_ignore_check(expect_fail, None, stderr_lines):

            for file_no, line in self._generator_call_git(args,
                                                          files=files,
                                                          env=env,
                                                          sep=sep):
                if file_no == STDOUT_FILENO:
                    yield line.rstrip("\n")
                else:
                    stderr_lines.append(line)

        stderr_log_level = {True: 5, False: 11}[expect_stderr]
        for line in stderr_lines:
            lgr.log(stderr_log_level, "stderr| " + line.strip("\n"))

    def call_git_oneline(self,
                         args,
                         files=None,
                         expect_stderr=False,
                         read_only=False):
        """Call git for a single line of output.

        All other parameters match those described for `call_git`.

        Raises
        ------
        CommandError if the call exits with a non-zero status.
        AssertionError if there is more than one line of output.
        """
        lines = list(
            self.call_git_items_(args,
                                 files=files,
                                 expect_stderr=expect_stderr,
                                 read_only=read_only))
        if len(lines) > 1:
            raise AssertionError(
                "Expected {} to return single line, but it returned {}".format(
                    ["git"] + args, lines))
        return lines[0]

    def call_git_success(self,
                         args,
                         files=None,
                         expect_stderr=False,
                         read_only=False):
        """Call git and return true if the call exit code of 0.

        All parameters match those described for `call_git`.

        Returns
        -------
        bool
        """
        try:
            self._call_git(args,
                           files,
                           expect_fail=True,
                           expect_stderr=expect_stderr,
                           read_only=read_only)

        except CommandError:
            return False
        return True

    def init(self, sanity_checks=True, init_options=None):
        """Initializes the Git repository.

        Parameters
        ----------
        create_sanity_checks: bool, optional
          Whether to perform sanity checks during initialization if the target
          path already exists, such as that new repository is not created in
          the directory where git already tracks some files.
        init_options: list, optional
          Additional options to be appended to the `git-init` call.
        """
        pathobj = self.pathobj
        path = str(pathobj)

        if not lexists(path):
            pathobj.mkdir(parents=True)
        elif sanity_checks:
            # Verify that we are not trying to initialize a new git repository
            # under a directory some files of which are already tracked by git
            # use case: https://github.com/datalad/datalad/issues/3068
            try:
                stdout, _ = self._call_git(
                    ['-C', path, 'ls-files'],
                    expect_fail=True,
                    read_only=True,
                )
                if stdout:
                    raise PathKnownToRepositoryError(
                        "Failing to initialize new repository under %s where "
                        "following files are known to a repository above: %s" %
                        (path, stdout))
            except CommandError:
                # assume that all is good -- we are not under any repo
                pass

        cmd = ['-C', path, 'init']
        cmd.extend(ensure_list(init_options))
        lgr.debug("Initializing empty Git repository at '%s'%s", path,
                  ' %s' % cmd[3:] if cmd[3:] else '')

        stdout, stderr = self._call_git(
            cmd,
            # we don't want it to scream on stdout
            expect_fail=True,
            # there is no commit, and none will be made
            read_only=True)

        # after creation we need to reconsider .git path
        self.dot_git = _get_dot_git(self.pathobj, ok_missing=True)

        return self

    def for_each_ref_(self,
                      fields=('objectname', 'objecttype', 'refname'),
                      pattern=None,
                      points_at=None,
                      sort=None,
                      count=None,
                      contains=None):
        """Wrapper for `git for-each-ref`

        Please see manual page git-for-each-ref(1) for a complete overview
        of its functionality. Only a subset of it is supported by this
        wrapper.

        Parameters
        ----------
        fields : iterable or str
          Used to compose a NULL-delimited specification for for-each-ref's
          --format option. The default field list reflects the standard
          behavior of for-each-ref when the --format option is not given.
        pattern : list or str, optional
          If provided, report only refs that match at least one of the given
          patterns.
        points_at : str, optional
          Only list refs which points at the given object.
        sort : list or str, optional
          Field name(s) to sort-by. If multiple fields are given, the last one
          becomes the primary key. Prefix any field name with '-' to sort in
          descending order.
        count : int, optional
          Stop iteration after the given number of matches.
        contains : str, optional
          Only list refs which contain the specified commit.

        Yields
        ------
        dict with items matching the given `fields`

        Raises
        ------
        ValueError
          if no `fields` are given

        RuntimeError
          if `git for-each-ref` returns a record where the number of
          properties does not match the number of `fields`
        """
        if not fields:
            raise ValueError('no `fields` provided, refuse to proceed')
        fields = ensure_list(fields)
        cmd = [
            "for-each-ref",
            "--format={}".format('%00'.join('%({})'.format(f)
                                            for f in fields)),
        ]
        if points_at:
            cmd.append('--points-at={}'.format(points_at))
        if contains:
            cmd.append('--contains={}'.format(contains))
        if sort:
            for k in ensure_list(sort):
                cmd.append('--sort={}'.format(k))
        if pattern:
            cmd += ensure_list(pattern)
        if count:
            cmd.append('--count={:d}'.format(count))

        for line in self.call_git_items_(cmd, read_only=True):
            props = line.split('\0')
            if len(fields) != len(props):
                raise RuntimeError(
                    'expected fields {} from git-for-each-ref, but got: {}'.
                    format(fields, props))
            yield dict(zip(fields, props))
Пример #19
0

#
# Custom handlers
#
from datalad.cmd import (
    WitlessRunner,
    GitWitlessRunner,
    StdOutErrCapture,
)
from datalad.support.exceptions import (
    MissingExternalDependency,
    OutdatedExternalDependency,
)
_runner = WitlessRunner()
_git_runner = GitWitlessRunner()


def _get_annex_version():
    """Return version of available git-annex"""
    try:
        return _runner.run(
            'git annex version --raw'.split(),
            protocol=StdOutErrCapture)['stdout']
    except CommandError:
        # fall back on method that could work with older installations
        out = _runner.run(
            ['git', 'annex', 'version'],
            protocol=StdOutErrCapture)
        return out['stdout'].splitlines()[0].split(':')[1].strip()
Пример #20
0
def postclonecfg_ria(ds, props):
    """Configure a dataset freshly cloned from a RIA store"""
    repo = ds.repo
    # RIA uses hashdir mixed, copying data to it via git-annex (if cloned via
    # ssh) would make it see a bare repo and establish a hashdir lower annex
    # object tree.
    # Moreover, we want the ORA remote to receive all data for the store, so its
    # objects could be moved into archives (the main point of a RIA store).
    RIA_REMOTE_NAME = 'origin'  # don't hardcode everywhere
    ds.config.set(
        'remote.{}.annex-ignore'.format(RIA_REMOTE_NAME), 'true',
        where='local')

    # chances are that if this dataset came from a RIA store, its subdatasets
    # may live there too. Place a subdataset source candidate config that makes
    # get probe this RIA store when obtaining subdatasets
    ds.config.set(
        # we use the label 'origin' for this candidate in order to not have to
        # generate a complicated name from the actual source specification.
        # we pick a cost of 200 to sort it before datalad's default candidates
        # for non-RIA URLs, because they prioritize hierarchical layouts that
        # cannot be found in a RIA store
        'datalad.get.subdataset-source-candidate-200origin',
        # use the entire original URL, up to the fragment + plus dataset ID
        # placeholder, this should make things work with any store setup we
        # support (paths, ports, ...)
        props['source'].split('#', maxsplit=1)[0] + '#{id}',
        where='local')

    # setup publication dependency, if a corresponding special remote exists
    # and was enabled (there could be RIA stores that actually only have repos)
    # make this function be a generator
    ora_remotes = [s for s in ds.siblings('query', result_renderer='disabled')
                   if s.get('annex-externaltype') == 'ora']
    if not ora_remotes and any(
            r.get('externaltype') == 'ora'
            for r in (repo.get_special_remotes().values()
                      if hasattr(repo, 'get_special_remotes')
                      else [])):
        # no ORA remote autoenabled, but configuration known about at least one.
        # Let's check origin's config for datalad.ora-remote.uuid as stored by
        # create-sibling-ria and enable try enabling that one.
        lgr.debug("Found no autoenabled ORA special remote. Trying to look it "
                  "up in source config ...")

        # First figure whether we cloned via SSH, HTTP or local path and then
        # get that config file the same way:
        config_content = None
        scheme = props['giturl'].split(':', 1)[0]
        if scheme in ['http', 'https']:
            try:
                config_content = download_url(
                    "{}{}config".format(
                        props['giturl'],
                        '/' if not props['giturl'].endswith('/') else ''))
            except DownloadError as e:
                lgr.debug("Failed to get config file from source:\n%s",
                          exc_str(e))
        elif scheme == 'ssh':
            # TODO: switch the following to proper command abstraction:
            # SSHRemoteIO ignores the path part ATM. No remote CWD! (To be
            # changed with command abstractions). So we need to get that part to
            # have a valid path to origin's config file:
            cfg_path = PurePosixPath(URL(props['giturl']).path) / 'config'
            op = SSHRemoteIO(props['giturl'])
            try:
                config_content = op.read_file(cfg_path)
            except RIARemoteError as e:
                lgr.debug("Failed to get config file from source: %s",
                          exc_str(e))

        elif scheme == 'file':
            # TODO: switch the following to proper command abstraction:
            op = LocalIO()
            cfg_path = Path(URL(props['giturl']).localpath) / 'config'
            try:
                config_content = op.read_file(cfg_path)
            except (RIARemoteError, OSError) as e:
                lgr.debug("Failed to get config file from source: %s",
                          exc_str(e))
        else:
            lgr.debug("Unknown URL-Scheme %s in %s. Can handle SSH, HTTP or "
                      "FILE scheme URLs.", scheme, props['source'])

        # 3. And read it
        org_uuid = None
        if config_content:
            # TODO: We might be able to spare the saving to a file.
            #       "git config -f -" is not explicitly documented but happens
            #       to work and would read from stdin. Make sure we know this
            #       works for required git versions and on all platforms.
            with make_tempfile(content=config_content) as cfg_file:
                runner = GitWitlessRunner()
                try:
                    result = runner.run(
                        ['git', 'config', '-f', cfg_file,
                         'datalad.ora-remote.uuid'],
                        protocol=StdOutCapture
                    )
                    org_uuid = result['stdout'].strip()
                except CommandError as e:
                    # doesn't contain what we are looking for
                    lgr.debug("Found no UUID for ORA special remote at "
                              "'%s' (%s)", RIA_REMOTE_NAME, exc_str(e))

        # Now, enable it. If annex-init didn't fail to enable it as stored, we
        # wouldn't end up here, so enable with store URL as suggested by the URL
        # we cloned from.
        if org_uuid:
            srs = repo.get_special_remotes()
            if org_uuid in srs.keys():
                # TODO: - Double-check autoenable value and only do this when
                #         true?
                #       - What if still fails? -> Annex shouldn't change config
                #         in that case

                # we only need the store:
                new_url = props['source'].split('#')[0]
                try:
                    repo.enable_remote(srs[org_uuid]['name'],
                                       options=['url={}'.format(new_url)]
                                       )
                    lgr.info("Reconfigured %s for %s",
                             srs[org_uuid]['name'], new_url)
                    # update ora_remotes for considering publication dependency
                    # below
                    ora_remotes = [s for s in
                                   ds.siblings('query',
                                               result_renderer='disabled')
                                   if s.get('annex-externaltype', None) ==
                                   'ora']
                except CommandError as e:
                    lgr.debug("Failed to reconfigure ORA special remote: %s",
                              exc_str(e))
            else:
                lgr.debug("Unknown ORA special remote uuid at '%s': %s",
                          RIA_REMOTE_NAME, org_uuid)
    if ora_remotes:
        if len(ora_remotes) == 1:
            yield from ds.siblings('configure',
                                   name=RIA_REMOTE_NAME,
                                   publish_depends=ora_remotes[0]['name'],
                                   result_filter=None,
                                   result_renderer='disabled')
        else:
            lgr.warning("Found multiple ORA remotes. Couldn't decide which "
                        "publishing to 'origin' should depend on: %s. Consider "
                        "running 'datalad siblings configure -s origin "
                        "--publish-depends ORAREMOTENAME' to set publication "
                        "dependency manually.",
                        [r['name'] for r in ora_remotes])
Пример #21
0
    def __init__(self, dataset=None, overrides=None, source='any'):
        if source not in ('any', 'local', 'dataset', 'dataset-local'):
            raise ValueError(
                'Unknown ConfigManager(source=) setting: {}'.format(source))
        store = dict(
            # store in a simple dict
            # no subclassing, because we want to be largely read-only, and implement
            # config writing separately
            cfg={},
            # track the files that jointly make up the config in this store
            files=set(),
            # and their modification times to be able to avoid needless unforced reloads
            mtimes=None,
        )
        self._stores = dict(
            # populated with info from git
            git=store,
            # only populated with info from commited dataset config
            dataset=store.copy(),
        )
        # merged representation (the only one that existed pre datalad 0.14)
        # will be built on initial reload
        self._merged_store = {}

        self._repo_dot_git = None
        self._repo_pathobj = None
        if dataset:
            if hasattr(dataset, 'dot_git'):
                self._repo_dot_git = dataset.dot_git
                self._repo_pathobj = dataset.pathobj
            elif dataset.repo:
                self._repo_dot_git = dataset.repo.dot_git
                self._repo_pathobj = dataset.repo.pathobj

        self._config_cmd = ['git', 'config']
        # public dict to store variables that always override any setting
        # read from a file
        # `hasattr()` is needed because `datalad.cfg` is generated upon first module
        # import, hence when this code runs first, there cannot be any config manager
        # to inherit from
        self.overrides = datalad.cfg.overrides.copy() if hasattr(
            datalad, 'cfg') else {}
        if overrides is not None:
            self.overrides.update(overrides)
        if dataset is None:
            if source in ('dataset', 'dataset-local'):
                raise ValueError(
                    'ConfigManager configured to read dataset only, '
                    'but no dataset given')
            # The caller didn't specify a repository. Unset the git directory
            # when calling 'git config' to prevent a repository in the current
            # working directory from leaking configuration into the output.
            self._config_cmd = ['git', '--git-dir=', 'config']

        self._src_mode = source
        run_kwargs = dict()
        self._runner = None
        if dataset is not None:
            if hasattr(dataset, '_git_runner'):
                self._runner = dataset._git_runner
            elif dataset.repo:
                self._runner = dataset.repo._git_runner
            else:
                # make sure we run the git config calls in the dataset
                # to pick up the right config files
                run_kwargs['cwd'] = dataset.path
        if self._runner is None:
            self._runner = GitWitlessRunner(**run_kwargs)

        self.reload(force=True)

        if not ConfigManager._checked_git_identity:
            for cfg, envs in (('user.name', ('GIT_AUTHOR_NAME',
                                             'GIT_COMMITTER_NAME')),
                              ('user.email', ('GIT_AUTHOR_EMAIL',
                                              'GIT_COMMITTER_EMAIL'))):
                if cfg not in self \
                        and not any(e in os.environ for e in envs):
                    lgr.warning(
                        "It is highly recommended to configure Git before using "
                        "DataLad. Set both 'user.name' and 'user.email' "
                        "configuration variables.")
            ConfigManager._checked_git_identity = True