def _describe_system(): import platform as pl from datalad import get_encoding_info from datalad.utils import get_linux_distribution try: dist = get_linux_distribution() except Exception as exc: lgr.warning("Failed to get distribution information: %s", exc_str(exc)) dist = tuple() return { 'type': os.name, 'name': pl.system(), 'release': pl.release(), 'version': pl.version(), 'distribution': ' '.join([_t2s(dist), _t2s(pl.mac_ver()), _t2s(pl.win32_ver())]).rstrip(), 'max_path_length': get_max_path_length(getpwd()), 'encoding': get_encoding_info(), }
def _describe_system(): import platform as pl from datalad import get_encoding_info if hasattr(pl, 'dist'): dist = pl.dist() else: # Python 3.8 removed .dist but recommended "distro" is slow, so we # try it only if needed try: import distro dist = distro.linux_distribution(full_distribution_name=False) except ImportError: lgr.info( "Please install 'distro' package to obtain distribution information" ) dist = tuple() except Exception as exc: lgr.warning( "No distribution information will be provided since 'distro' " "fails to import/run: %s", exc_str(exc) ) dist = tuple() return { 'type': os.name, 'name': pl.system(), 'release': pl.release(), 'version': pl.version(), 'distribution': ' '.join([_t2s(dist), _t2s(pl.mac_ver()), _t2s(pl.win32_ver())]).rstrip(), 'max_path_length': get_max_path_length(getpwd()), 'encoding': get_encoding_info(), }
def _describe_system(): import platform as pl from datalad import get_encoding_info from datalad.utils import get_linux_distribution try: dist = get_linux_distribution() except Exception as exc: ce = CapturedException(exc) lgr.warning("Failed to get distribution information: %s", ce) dist = tuple() return { 'type': os.name, 'name': pl.system(), 'release': pl.release(), 'version': pl.version(), 'distribution': ' '.join([_t2s(dist), _t2s(pl.mac_ver()), _t2s(pl.win32_ver())]).rstrip(), 'max_path_length': get_max_path_length(getpwd()), 'encoding': get_encoding_info(), 'filesystem': { l: _get_fs_type(l, p) for l, p in [('CWD', Path.cwd()), ('TMP', Path(tempfile.gettempdir()) ), ('HOME', Path.home())] } }
def test_gitattributes(path): gr = GitRepo(path, create=True) # starts without any attributes file ok_(not op.exists(op.join(gr.path, '.gitattributes'))) eq_(gr.get_gitattributes('.')['.'], {}) # bool is a tag or unsets, anything else is key/value gr.set_gitattributes([('*', {'tag': True}), ('*', {'sec.key': 'val'})]) ok_(op.exists(op.join(gr.path, '.gitattributes'))) eq_(gr.get_gitattributes('.')['.'], {'tag': True, 'sec.key': 'val'}) # unset by amending the record, but does not remove notion of the # tag entirely gr.set_gitattributes([('*', {'tag': False})]) eq_(gr.get_gitattributes('.')['.'], {'tag': False, 'sec.key': 'val'}) # attributes file is not added or commited, we can ignore such # attributes eq_(gr.get_gitattributes('.', index_only=True)['.'], {}) # we can send absolute path patterns and write to any file, and # the patterns will be translated relative to the target file gr.set_gitattributes([(op.join(gr.path, 'relative', 'ikethemike/**'), { 'bang': True })], attrfile=op.join('relative', '.gitattributes')) # directory and file get created ok_(op.exists(op.join(gr.path, 'relative', '.gitattributes'))) eq_( gr.get_gitattributes( op.join(gr.path, 'relative', 'ikethemike', 'probe')), # always comes out relative to the repo root, even if abs goes in { op.join('relative', 'ikethemike', 'probe'): { 'tag': False, 'sec.key': 'val', 'bang': True } }) if get_encoding_info()['default'] != 'ascii': # do not perform this on obscure systems without anything like UTF # it is not relevant whether a path actually exists, and paths # with spaces and other funky stuff are just fine funky = u'{} {}'.format(get_most_obscure_supported_name(), get_most_obscure_supported_name()) gr.set_gitattributes([(funky, {'this': 'that'})]) eq_( gr.get_gitattributes(funky)[funky], { 'this': 'that', 'tag': False, 'sec.key': 'val', }) # mode='w' should replace the entire file: gr.set_gitattributes([('**', {'some': 'nonsense'})], mode='w') eq_(gr.get_gitattributes('.')['.'], {'some': 'nonsense'})
def _describe_system(): import platform as pl from datalad import get_encoding_info return { 'type': os.name, 'name': pl.system(), 'release': pl.release(), 'version': pl.version(), 'distribution': ' '.join([_t2s(pl.dist()), _t2s(pl.mac_ver()), _t2s(pl.win32_ver())]).rstrip(), 'max_path_length': get_max_path_length(getpwd()), 'encoding': get_encoding_info(), }
def test_gitattributes(path): gr = GitRepo(path, create=True) # starts without any attributes file ok_(not op.exists(op.join(gr.path, '.gitattributes'))) eq_(gr.get_gitattributes('.')['.'], {}) # bool is a tag or unsets, anything else is key/value gr.set_gitattributes([('*', {'tag': True}), ('*', {'sec.key': 'val'})]) ok_(op.exists(op.join(gr.path, '.gitattributes'))) eq_(gr.get_gitattributes('.')['.'], {'tag': True, 'sec.key': 'val'}) # unset by amending the record, but does not remove notion of the # tag entirely gr.set_gitattributes([('*', {'tag': False})]) eq_(gr.get_gitattributes('.')['.'], {'tag': False, 'sec.key': 'val'}) # attributes file is not added or commited, we can ignore such # attributes eq_(gr.get_gitattributes('.', index_only=True)['.'], {}) # we can send absolute path patterns and write to any file, and # the patterns will be translated relative to the target file gr.set_gitattributes([ (op.join(gr.path, 'relative', 'ikethemike/**'), {'bang': True})], attrfile=op.join('relative', '.gitattributes')) # directory and file get created ok_(op.exists(op.join(gr.path, 'relative', '.gitattributes'))) eq_(gr.get_gitattributes( op.join(gr.path, 'relative', 'ikethemike', 'probe')), # always comes out relative to the repo root, even if abs goes in {op.join('relative', 'ikethemike', 'probe'): {'tag': False, 'sec.key': 'val', 'bang': True}}) if get_encoding_info()['default'] != 'ascii': # do not perform this on obscure systems without anything like UTF # it is not relevant whether a path actually exists, and paths # with spaces and other funky stuff are just fine funky = u'{} {}'.format( get_most_obscure_supported_name(), get_most_obscure_supported_name()) gr.set_gitattributes([(funky, {'this': 'that'})]) eq_(gr.get_gitattributes(funky)[funky], { 'this': 'that', 'tag': False, 'sec.key': 'val', }) # mode='w' should replace the entire file: gr.set_gitattributes([('**', {'some': 'nonsense'})], mode='w') eq_(gr.get_gitattributes('.')['.'], {'some': 'nonsense'})
def _describe_system(): import platform as pl from datalad import get_encoding_info if hasattr(pl, 'dist'): dist = pl.dist() else: # Python 3.8 removed .dist but recommended "distro" is slow, so we # try it only if needed try: import distro dist = distro.linux_distribution(full_distribution_name=False) except ImportError: lgr.info( "Please install 'distro' package to obtain distribution information" ) dist = tuple() except Exception as exc: lgr.warning( "No distribution information will be provided since 'distro' " "fails to import/run: %s", exc_str(exc)) dist = tuple() return { 'type': os.name, 'name': pl.system(), 'release': pl.release(), 'version': pl.version(), 'distribution': ' '.join([_t2s(dist), _t2s(pl.mac_ver()), _t2s(pl.win32_ver())]).rstrip(), 'max_path_length': get_max_path_length(getpwd()), 'encoding': get_encoding_info(), }
def __call__(dataset=None, sensitive=None, clipboard=None): from datalad import get_encoding_info from datalad import get_envvars_info from datalad.distribution.dataset import require_dataset from datalad.support.exceptions import NoDatasetArgumentFound ds = None try: ds = require_dataset(dataset, check_installed=False, purpose='reporting') except NoDatasetArgumentFound: # failure is already logged pass if ds and not ds.is_installed(): # we don't deal with absent datasets ds = None if sensitive: if ds is None: from datalad import cfg else: cfg = ds.config else: cfg = None from pkg_resources import iter_entry_points from datalad.ui import ui from datalad.api import metadata from datalad.support.external_versions import external_versions from datalad.dochelpers import exc_str from datalad.interface.results import success_status_map import os import platform as pl import json extractors={} for ep in iter_entry_points('datalad.metadata.extractors'): try: ep.load() status = 'OK' except Exception as e: status = 'BROKEN ({})'.format(exc_str(e)) extractors[ep.name] = status # formatting helper def _t2s(t): res = [] for e in t: if isinstance(e, tuple): es = _t2s(e) if es != '': res += ['(%s)' % es] elif e != '': res += [e] return '/'.join(res) report_template = """\ DataLad ======= {datalad} System ====== {system} Locale/Encoding =============== {loc} Environment =========== {env} Externals ========= {externals} Installed extensions ==================== {extensions} Known metadata extractors ========================= {metaextractors} Configuration ============= {cfg} {dataset} """ dataset_template = """\ Dataset information =================== {basic} Metadata -------- {meta} """ ds_meta = None if not sensitive: ds_meta = _HIDDEN elif ds and ds.is_installed() and ds.id: ds_meta = metadata( dataset=ds, reporton='datasets', return_type='list', result_filter=lambda x: x['action'] == 'metadata' and success_status_map[x['status']] == 'success', result_renderer='disabled', on_failure='ignore') if ds_meta: ds_meta = [dm['metadata'] for dm in ds_meta] if len(ds_meta) == 1: ds_meta = ds_meta.pop() if cfg is not None: # make it into a dict to be able to reassign cfg = dict(cfg.items()) if sensitive != 'all' and cfg: # filter out some of the entries which known to be highly sensitive for k in cfg.keys(): if 'user' in k or 'token' in k or 'passwd' in k: cfg[k] = _HIDDEN from datalad.version import __version__, __full_version__ text = report_template.format( datalad=_format_dict([ ('Version', __version__), ('Full version', __full_version__) ], indent=True), system=_format_dict([ ('OS', ' '.join([ os.name, pl.system(), pl.release(), pl.version()]).rstrip()), ('Distribution', ' '.join([_t2s(pl.dist()), _t2s(pl.mac_ver()), _t2s(pl.win32_ver())]).rstrip()) ], indent=True), loc=_format_dict(get_encoding_info(), indent=True), # , fmt="{}={!r}"), env=_format_dict(get_envvars_info(), fmt="{}={!r}"), dataset='' if not ds else dataset_template.format( basic=_format_dict([ ('path', ds.path), ('repo', ds.repo.__class__.__name__ if ds.repo else '[NONE]'), ]), meta=_HIDDEN if not sensitive else json.dumps(ds_meta, indent=1) if ds_meta else '[no metadata]' ), externals=external_versions.dumps(preamble=None, indent='', query=True), extensions='\n'.join(ep.name for ep in iter_entry_points('datalad.extensions')), metaextractors=_format_dict(extractors), cfg=_format_dict(sorted(cfg.items(), key=lambda x: x[0])) if cfg else _HIDDEN, ) if clipboard: from datalad.support.external_versions import external_versions external_versions.check( 'pyperclip', msg="It is needed to be able to use clipboard") import pyperclip pyperclip.copy(text) ui.message("WTF information of length %s copied to clipboard" % len(text)) else: ui.message(text) yield