Beispiel #1
0
def _describe_annex():
    from datalad.cmd import (
        GitWitlessRunner,
        StdOutErrCapture,
    )

    runner = GitWitlessRunner()
    try:
        out = runner.run(['git', 'annex', 'version'],
                         protocol=StdOutErrCapture)
    except CommandError as e:
        ce = CapturedException(e)
        return dict(
            version='not available',
            message=ce.format_short(),
        )
    info = {}
    for line in out['stdout'].split(os.linesep):
        key = line.split(':')[0]
        if not key:
            continue
        value = line[len(key) + 2:].strip()
        key = key.replace('git-annex ', '')
        if key.endswith('s'):
            value = value.split()
        info[key] = value
    return info
Beispiel #2
0
    def ensure_initialized(self):
        """Assures that manager is initialized - knows socket_dir, previous connections
        """
        if self._socket_dir is not None:
            return
        from datalad import cfg
        self._socket_dir = Path(cfg.obtain('datalad.locations.sockets'))
        self._socket_dir.mkdir(exist_ok=True, parents=True)
        try:
            os.chmod(str(self._socket_dir), 0o700)
        except OSError as exc:
            lgr.warning(
                "Failed to (re)set permissions on the %s. "
                "Most likely future communications would be impaired or fail. "
                "Original exception: %s", self._socket_dir,
                CapturedException(exc))

        try:
            self._prev_connections = [
                p for p in self.socket_dir.iterdir() if not p.is_dir()
            ]
        except OSError as exc:
            self._prev_connections = []
            lgr.warning(
                "Failed to list %s for existing sockets. "
                "Most likely future communications would be impaired or fail. "
                "Original exception: %s", self._socket_dir,
                CapturedException(exc))

        lgr.log(5, "Found %d previous connections",
                len(self._prev_connections))
Beispiel #3
0
 def checkpresent(self, key):
     resp = None
     for url in self.gen_URLS(key):
         # somewhat duplicate of CHECKURL
         try:
             status = self._providers.get_status(url)
             if status:  # TODO:  anything specific to check???
                 return True
             # TODO:  for CHECKPRESENT-FAILURE we somehow need to figure out
             # that we can connect to that server but that specific url is
             # N/A, probably check the connection etc
         except TargetFileAbsent as exc:
             ce = CapturedException(exc)
             self.message("Target url %s file seems to be missing: %s" %
                          (url, ce))
             if not resp:
                 # if it is already marked as UNKNOWN -- let it stay that
                 # way but if not -- we might as well say that we can no
                 # longer access it
                 return False
         except Exception as exc:
             ce = CapturedException(exc)
             self.message("Failed to check status of url %s: %s" %
                          (url, ce))
     if resp is None:
         raise RemoteError(f'Could not determine presence of key {key}')
     else:
         return False
Beispiel #4
0
    def init_remote_repo(path, ssh, shared, dataset, description=None):
        cmd = "git -C {} init{}".format(
            sh_quote(path),
            " --shared='{}'".format(sh_quote(shared)) if shared else '')
        try:
            ssh(cmd)
        except CommandError as e:
            ce = CapturedException(e)
            lgr.error(
                "Initialization of remote git repository failed at %s."
                "\nError: %s\nSkipping ...", path, ce)
            return False

        if isinstance(dataset.repo, AnnexRepo):
            # init remote git annex repo (part fix of #463)
            try:
                ssh("git -C {} annex init {}".format(
                    sh_quote(path),
                    sh_quote(description) if description else ''))
            except CommandError as e:
                ce = CapturedException(e)
                lgr.error(
                    "Initialization of remote git annex repository failed at %s."
                    "\nError: %s\nSkipping ...", path, ce)
                return False
        return True
Beispiel #5
0
def _describe_metadata_elements(group):
    infos = {}
    from datalad.support.entrypoints import iter_entrypoints
    from importlib import import_module
    if sys.version_info < (3, 10):
        # 3.10 is when it was no longer provisional
        from importlib_metadata import distribution
    else:
        from importlib.metadata import distribution

    for ename, emod, eload in iter_entrypoints(group):
        info = {}
        infos[f'{ename}'] = info
        try:
            info['module'] = emod
            dist = distribution(emod.split('.', maxsplit=1)[0])
            info['distribution'] = f'{dist.name} {dist.version}'
            mod = import_module(emod, package='datalad')
            version = getattr(mod, '__version__', None)
            if version:
                # no not clutter the report with no version
                info['version'] = version
            eload()
            info['load_error'] = None
        except Exception as e:
            ce = CapturedException(e)
            info['load_error'] = ce.format_short()
            continue
    return infos
Beispiel #6
0
def get_bucket(conn, bucket_name):
    """A helper to get a bucket

    Parameters
    ----------
    bucket_name: str
        Name of the bucket to connect to
    """
    try:
        return try_multiple_dec_s3(conn.get_bucket)(bucket_name)
    except S3ResponseError as e:
        ce = CapturedException(e)
        # can initially deny or error to connect to the specific bucket by name,
        # and we would need to list which buckets are available under following
        # credentials:
        lgr.debug("Cannot access bucket %s by name with validation: %s",
                  bucket_name, ce)
        if conn.anon:
            raise AnonymousAccessDeniedError(
                "Access to the bucket %s did not succeed.  Requesting "
                "'all buckets' for anonymous S3 connection makes "
                "little sense and thus not supported." % bucket_name,
                supported_types=['aws-s3'])

        if e.reason == "Forbidden":
            # Could be just HEAD call boto issues is not allowed, and we should not
            # try to verify that bucket is "reachable".  Just carry on
            try:
                return try_multiple_dec_s3(conn.get_bucket)(bucket_name,
                                                            validate=False)
            except S3ResponseError as e2:
                lgr.debug(
                    "Cannot access bucket %s even without validation: %s",
                    bucket_name, CapturedException(e2))
                _handle_exception(e, bucket_name)

        try:
            all_buckets = try_multiple_dec_s3(conn.get_all_buckets)()
            all_bucket_names = [b.name for b in all_buckets]
            lgr.debug("Found following buckets %s",
                      ', '.join(all_bucket_names))
            if bucket_name in all_bucket_names:
                return all_buckets[all_bucket_names.index(bucket_name)]
        except S3ResponseError as e2:
            lgr.debug("Cannot access all buckets: %s", CapturedException(e2))
            _handle_exception(e, 'any (originally requested %s)' % bucket_name)
        else:
            _handle_exception(e, bucket_name)
Beispiel #7
0
def _describe_system():
    import platform as pl
    from datalad import get_encoding_info
    from datalad.utils import get_linux_distribution
    try:
        dist = get_linux_distribution()
    except Exception as exc:
        ce = CapturedException(exc)
        lgr.warning("Failed to get distribution information: %s", ce)
        dist = tuple()

    return {
        'type':
        os.name,
        'name':
        pl.system(),
        'release':
        pl.release(),
        'version':
        pl.version(),
        'distribution':
        ' '.join([_t2s(dist),
                  _t2s(pl.mac_ver()),
                  _t2s(pl.win32_ver())]).rstrip(),
        'max_path_length':
        get_max_path_length(getpwd()),
        'encoding':
        get_encoding_info(),
        'filesystem': {
            l: _get_fs_type(l, p)
            for l, p in [('CWD',
                          Path.cwd()), ('TMP', Path(tempfile.gettempdir())
                                        ), ('HOME', Path.home())]
        }
    }
Beispiel #8
0
 def __getitem__(self, url):
     try:
         return self.cookies_db[self._get_provider(url)]
     except Exception as exc:
         lgr.warning("Failed to get a cookie for %s: %s", url,
                     CapturedException(exc))
         return None
Beispiel #9
0
    def _load(self):
        if self._cookies_db is not None:
            return
        if self._filename:
            filename = self._filename
            cookies_dir = os.path.dirname(filename)
        else:
            cookies_dir = os.path.join(
                platformdirs.user_config_dir(),
                'datalad')  # FIXME prolly shouldn't hardcode 'datalad'
            filename = os.path.join(cookies_dir, 'cookies')

        # TODO: guarantee restricted permissions

        if not os.path.exists(cookies_dir):
            os.makedirs(cookies_dir)

        lgr.debug("Opening cookies DB %s", filename)
        try:
            self._cookies_db = shelve.open(filename,
                                           writeback=True,
                                           protocol=2)
        except Exception as exc:
            lgr.warning("Failed to open cookies DB %s: %s", filename,
                        CapturedException(exc))
Beispiel #10
0
    def _has_active_postupdate(ds, name, ssh):
        """Figure out either has active post-update hook

        Returns
        -------
        bool or None
          None if something went wrong and we could not figure out
        """
        has_active_post_update = None
        try:
            # TODO -- we might need to expanduser taking .user into account
            # but then it must be done also on remote side
            out = CreateSibling._run_on_ds_ssh_remote(
                ds, name, ssh,
                'cd {path} && [ -x .git/hooks/post-update ] && echo yes || echo no'
            )
            out = out.strip()
            assert out in ('yes', 'no')
            has_active_post_update = out == "yes"
        except CommandError as e:
            ce = CapturedException(e)
            lgr.debug(
                "Could not figure out either %s on remote %s has active "
                "post_update hook due to %s", ds, name, ce)
        return has_active_post_update
Beispiel #11
0
def load_extensions():
    """Load entrypoint for any configured extension package

    Log a warning in case a requested extension is not available, or if
    a requested extension fails on load.

    Extensions to load are taken from the 'datalad.extensions.load'
    configuration item.
    """
    from datalad import cfg
    load_extensions = cfg.get('datalad.extensions.load', get_all=True)
    if load_extensions:
        from datalad.utils import ensure_list
        exts = {
            ename: eload
            for ename, _, eload in iter_entrypoints('datalad.extensions')
        }
        for el in ensure_list(load_extensions):
            if el not in exts:
                lgr.warning('Requested extension %r is not available', el)
                continue
            try:
                exts[el]()
            except Exception as e:
                ce = CapturedException(e)
                lgr.warning('Could not load extension %r: %s', el, ce)
Beispiel #12
0
 def __contains__(self, url):
     try:
         return self._get_provider(url) in self.cookies_db
     except Exception as exc:
         lgr.warning("Failed to check for having a cookie for %s: %s", url,
                     CapturedException(exc))
         return None
Beispiel #13
0
    def close(self, allow_fail=True, ctrl_path=None):
        """Closes all connections, known to this instance.

        Parameters
        ----------
        allow_fail: bool, optional
          If True, swallow exceptions which might be thrown during
          connection.close, and just log them at DEBUG level
        ctrl_path: str, Path, or list of str or Path, optional
          If specified, only the path(s) provided would be considered
        """
        if self._connections:
            ctrl_paths = [Path(p) for p in ensure_list(ctrl_path)]
            to_close = [
                c for c in self._connections
                # don't close if connection wasn't opened by SSHManager
                if self._connections[c].ctrl_path not in self._prev_connections
                and self._connections[c].ctrl_path.exists() and
                (not ctrl_paths or self._connections[c].ctrl_path in ctrl_paths
                 )
            ]
            if to_close:
                lgr.debug("Closing %d SSH connections...", len(to_close))
            for cnct in to_close:
                f = self._connections[cnct].close
                if allow_fail:
                    f()
                else:
                    try:
                        f()
                    except Exception as exc:
                        ce = CapturedException(exc)
                        lgr.debug("Failed to close a connection: "
                                  "%s", ce.message)
            self._connections = dict()
Beispiel #14
0
    def transfer_retrieve(self, key, file):
        urls = []
        error_causes = []
        # TODO: priorities etc depending on previous experience or settings
        for url in self.gen_URLS(key):
            urls.append(url)
            try:
                downloaded_path = self._providers.download(url,
                                                           path=file,
                                                           overwrite=True)
                assert (downloaded_path == file)
                return
            except Exception as exc:
                ce = CapturedException(exc)
                cause = getattr(exc, '__cause__', None)
                debug_msg = f"Failed to download {url} for key {key}: {ce}"
                if cause:
                    debug_msg += f' [{cause}]'
                self.message(debug_msg)
                error_causes.append(cause)

        error_msg = f"Failed to download from any of {len(urls)} locations"
        if error_causes:
            error_msg += f' {unique(error_causes)}'
        raise RemoteError(error_msg)
Beispiel #15
0
def get_max_path_length(top_path=None, maxl=1000):
    """Deduce the maximal length of the filename in a given path
    """
    if not top_path:
        top_path = getpwd()
    import random
    from datalad import lgr
    from datalad.support import path
    prefix = path.join(top_path, "dl%d" % random.randint(1, 100000))
    # some smart folks could implement binary search for this
    max_path_length = None
    for i in range(maxl - len(prefix)):
        filename = prefix + '_' * i
        path_length = len(filename)
        try:
            with open(filename, 'w') as f:
                max_path_length = path_length
        except Exception as exc:
            ce = CapturedException(exc)
            lgr.debug(
                "Failed to create sample file for length %d. Last succeeded was %s. Exception: %s",
                path_length, max_path_length, ce)
            break
        unlink(filename)
    return max_path_length
Beispiel #16
0
def exc_str(exc=None, limit=None, include_str=True):
    """Temporary adapter

    The CapturedException should be available and be used directly instead.
    """

    return str(CapturedException(exc))
Beispiel #17
0
def exc_str(exc=None, limit=None, include_str=True):
    """Temporary test shim, for finding issues re refactoring for
    using CapturedException instead.

    See gh-5716
    """

    return str(CapturedException(exc))
Beispiel #18
0
    def cleanup_cachedrepo(self):
        # TODO this could also be the place to stop lingering batch processes
        if not self._tmpdir:
            return

        try:
            self._tmpdir.rmdir()
        except OSError as e:
            ce = CapturedException(e)
            lgr.warning('Failed to clean up temporary directory: %s', ce)
Beispiel #19
0
 def checkurl(self, url):
     try:
         status = self._providers.get_status(url)
         props = dict(filename=status.filename, url=url)
         if status.size is not None:
             props['size'] = status.size
         return [props]
     except Exception as exc:
         ce = CapturedException(exc)
         self.message("Failed to check url %s: %s" % (url, ce))
         return False
Beispiel #20
0
 def get_git_version(self):
     key = 'cmd:git'
     if key in self._remote_props:
         return self._remote_props[key]
     git_version = None
     try:
         git_version = self('git version')[0].split()[2]
     except CommandError as e:
         lgr.debug('Failed to determine Git version: %s',
                   CapturedException(e))
     self._remote_props[key] = git_version
     return git_version
Beispiel #21
0
def _describe_extensions():
    infos = {}
    from datalad.support.entrypoints import iter_entrypoints
    from importlib import import_module

    for ename, emod, eload in iter_entrypoints('datalad.extensions'):
        info = {}
        infos[ename] = info
        try:
            ext = eload()
            info['load_error'] = None
            info['description'] = ext[0]
            info['module'] = emod
            mod = import_module(emod, package='datalad')
            info['version'] = getattr(mod, '__version__', None)
        except Exception as e:
            ce = CapturedException(e)
            info['load_error'] = ce.format_short()
            continue
        info['entrypoints'] = entry_points = {}
        for ep in ext[1]:
            ep_info = {
                'module': ep[0],
                'class': ep[1],
                'names': ep[2:],
            }
            entry_points['{}.{}'.format(*ep[:2])] = ep_info
            try:
                import_module(ep[0], package='datalad')
                ep_info['load_error'] = None
            except Exception as e:
                ce = CapturedException(e)
                ep_info['load_error'] = ce.format_short()
                continue
    return infos
Beispiel #22
0
 def get_pids_msg():
     try:
         pids = get_open_files(lock_path)
         if pids:
             proc = pids[lock_path]
             return f'Check following process: PID={proc.pid} CWD={proc.cwd()} CMDLINE={proc.cmdline()}.'
         else:
             return 'Stale lock? I found no processes using it.'
     except Exception as exc:
         lgr.debug(
             "Failed to get a list of processes which 'posses' the file %s: %s",
             lock_path, CapturedException(exc))
         return 'Another process is using it (failed to determine one)?'
Beispiel #23
0
def keep_result(res, rfilter, **kwargs):
    if not rfilter:
        return True
    try:
        if not rfilter(res, **kwargs):
            # give the slightest indication which filter was employed
            raise ValueError('excluded by filter {} with arguments {}'.format(
                rfilter, kwargs))
    except ValueError as e:
        # make sure to report the excluded result to massively improve
        # debugging experience
        lgr.debug('Not reporting result (%s): %s', CapturedException(e), res)
        return False
    return True
Beispiel #24
0
def get_jsonhooks_from_config(cfg):
    """Parse out hook definitions given a ConfigManager instance

    Returns
    -------
    dict
      where keys are hook names/labels, and each value is a dict with
      three keys: 'cmd' contains the name of the to-be-executed DataLad
      command; 'args' has a JSON-encoded string with a dict of keyword
      arguments for the command (format()-language based placeholders
      can be present); 'match' holds a JSON-encoded string representing
      a dict with key/value pairs that need to match a result in order
      for a hook to be triggered.
    """
    hooks = {}
    for h in cfg.keys():
        if not (h.startswith('datalad.result-hook.')
                and h.endswith('.match-json')):
            continue
        hook_basevar = h[:-11]
        hook_name = hook_basevar[20:]
        # do not use a normal `get()` here, because it reads the committed dataset
        # config too. That means a datalad update can silently bring in new
        # procedure definitions from the outside, and in some sense enable
        # remote code execution by a 3rd-party
        call = cfg.get_from_source('local',
                                   '{}.call-json'.format(hook_basevar), None)
        if not call:
            lgr.warning('Incomplete result hook configuration %s in %s' %
                        (hook_basevar, cfg))
            continue
        # split command from any args
        call = call.split(maxsplit=1)
        # get the match specification in JSON format
        try:
            match = json.loads(cfg.get(h))
        except Exception as e:
            ce = CapturedException(e)
            lgr.warning(
                'Invalid match specification in %s: %s [%s], '
                'hook will be skipped', h, cfg.get(h), ce)
            continue

        hooks[hook_name] = dict(
            cmd=call[0],
            # support no-arg calls too
            args=call[1] if len(call) > 1 else '{{}}',
            match=match,
        )
    return hooks
Beispiel #25
0
    def __call__(paths,
                 *,
                 reference_date="@1514764800",
                 revs=None,
                 annex="all",
                 no_tags=False,
                 older=False):
        from datalad.support.repodates import check_dates

        which = "older" if older else "newer"

        try:
            ref_ts = _parse_date(reference_date)
        except ValueError as exc:
            lgr.error("Could not parse '%s' as a date", reference_date)
            ce = CapturedException(exc)
            yield get_status_dict("check_dates",
                                  status="error",
                                  message=str(ce),
                                  exception=ce)
            return

        lgr.info("Searching for dates %s than %s",
                 which,
                 time.strftime("%d %b %Y %H:%M:%S +0000", time.gmtime(ref_ts)))

        for repo in _git_repos(paths or ["."]):
            fullpath = os.path.abspath(repo)
            lgr.debug("Checking %s", fullpath)

            try:
                report = check_dates(repo,
                                     ref_ts,
                                     which=which,
                                     revs=revs or ["--all"],
                                     annex={"all": True,
                                            "none": False,
                                            "tree": "tree"}[annex],
                                     tags=not no_tags)
            except InvalidGitRepositoryError as exc:
                lgr.warning("Skipping invalid Git repo: %s", repo)
                continue

            yield get_status_dict(
                "check_dates",
                status="ok",
                path=fullpath,
                message=("Found {} dates" if report["objects"]
                         else "No {} dates found").format(which),
                report=report)
Beispiel #26
0
    def get_metadata(self, dataset, content):
        if not content:
            return {}, []
        contentmeta = []
        log_progress(
            lgr.info,
            'extractorimage',
            'Start image metadata extraction from %s',
            self.ds,
            total=len(self.paths),
            label='image metadata extraction',
            unit=' Files',
        )
        for f in self.paths:
            absfp = opj(self.ds.path, f)
            log_progress(lgr.info,
                         'extractorimage',
                         'Extract image metadata from %s',
                         absfp,
                         update=1,
                         increment=True)
            try:
                img = Image.open(absfp)
            except Exception as e:
                lgr.debug("Image metadata extractor failed to load %s: %s",
                          absfp, CapturedException(e))
                continue
            meta = {
                'type': 'dctype:Image',
            }

            # run all extractors
            meta.update({k: v(img) for k, v in self._extractors.items()})
            # filter useless fields (empty strings and NaNs)
            meta = {
                k: v
                for k, v in meta.items()
                if not (hasattr(v, '__len__') and not len(v))
            }
            contentmeta.append((f, meta))

        log_progress(lgr.info, 'extractorimage',
                     'Finished image metadata extraction from %s', self.ds)
        return {
            '@context': vocabulary,
        }, \
            contentmeta
Beispiel #27
0
def iter_entrypoints(group, load=False):
    """Iterate over all entrypoints of a given group

    Parameters
    ----------
    group: str
      Name of the entry point group to iterator over, such as
      'datalad.extensions'.
    load: bool, optional
      Whether to execute the entry point loader internally in a
      protected manner that only logs a possible exception and emits
      a warning, but otherwise skips over "broken" entrypoints.
      If False, the loader callable is returned unexecuted.

    Yields
    -------
    (name, module, loade(r|d))
      The first item in each yielded tuple is the entry point name (str).
      The second is the name of the module that contains the entry point
      (str). The type of the third items depends on the load parameter.
      It is either a callable that can be used to load the entrypoint
      (this is the default behavior), or the outcome of executing the
      entry point loader.
    """
    lgr.debug("Processing entrypoints")

    if sys.version_info < (3, 10):
        # 3.10 is when it was no longer provisional
        from importlib_metadata import entry_points
    else:
        from importlib.metadata import entry_points
    for ep in entry_points(group=group):
        if not load:
            yield ep.name, ep.module, ep.load
            continue

        try:
            lgr.debug('Loading entrypoint %s from %s', ep.name, group)
            yield ep.name, ep.module, ep.load()
            lgr.debug('Loaded entrypoint %s from %s', ep.name, group)
        except Exception as e:
            ce = CapturedException(e)
            lgr.warning(
                'Failed to load entrypoint %s from %s: %s',
                ep.name, group, ce)
            continue
    lgr.debug("Done processing entrypoints")
Beispiel #28
0
def get_cached_url_content(url, name=None, fetcher=None, maxage=None):
    """Loader of a document from a url, which caches loaded instance on disk

    Doesn't do anything smart about http headers etc which could provide
    information for cache/proxy servers for how long to retain etc

    TODO: theoretically it is not network specific at all -- and just a memoize
    pattern, but may be some time we would make it treat headers etc correctly.
    And ATM would support any URL we support via providers/downloaders

    Parameters
    ----------
    fetcher: callable, optional
       Function to call with url if needed to be refetched
    maxage: float, optional
       Age in days to retain valid for.  <0 - would retain forever.  If None -
       would consult the config, 0 - would force to reload
    """
    doc_fname = get_url_cache_filename(url, name)
    if maxage is None:
        maxage = float(cfg.get('datalad.locations.cache-maxage'))

    doc = None
    if os.path.exists(doc_fname) and maxage != 0:

        fage = (time.time() - os.stat(doc_fname).st_mtime)/(24. * 3600)
        if maxage < 0 or fage < maxage:
            try:
                lgr.debug("use cached request result to '%s' from %s", url, doc_fname)
                doc = pickle.load(open(doc_fname, 'rb'))
            except Exception as e:  # it is OK to ignore any error and fall back on the true source
                lgr.warning(
                    "cannot load cache from '%s', fall back to download: %s",
                    doc_fname, CapturedException(e))

    if doc is None:
        if fetcher is None:
            from datalad.downloaders.providers import Providers
            providers = Providers.from_config_files()
            fetcher = providers.fetch

        doc = fetcher(url)
        ensure_dir(dirname(doc_fname))
        # use pickle to store the entire request result dict
        pickle.dump(doc, open(doc_fname, 'wb'))
        lgr.debug("stored result of request to '{}' in {}".format(url, doc_fname))
    return doc
Beispiel #29
0
 def _get_ds_remote_shared_setting(ds, name, ssh):
     """Figure out setting of sharedrepository for dataset's `name` remote"""
     shared = None
     try:
         # TODO -- we might need to expanduser taking .user into account
         # but then it must be done also on remote side
         out = CreateSibling._run_on_ds_ssh_remote(
             ds, name, ssh,
             'git -C {path} config --get core.sharedrepository')
         shared = out.strip()
     except CommandError as e:
         ce = CapturedException(e)
         lgr.debug(
             "Could not figure out remote shared setting of %s for %s due "
             "to %s", ds, name, ce)
         # could well be ok if e.g. not shared
         # TODO: more detailed analysis may be?
     return shared
Beispiel #30
0
 def get_annex_version(self):
     key = 'cmd:annex'
     if key in self._remote_props:
         return self._remote_props[key]
     try:
         # modern annex versions
         version = self('git annex version --raw')[0]
     except CommandError:
         # either no annex, or old version
         try:
             # fall back on method that could work with older installations
             out, err = self('git annex version')
             version = out.split('\n')[0].split(':')[1].strip()
         except CommandError as e:
             lgr.debug('Failed to determine remote git-annex version: %s',
                       CapturedException(e))
             version = None
     self._remote_props[key] = version
     return version