Ejemplo n.º 1
0
    def setup_class(cls):
        mktmp_kws = get_tempfile_kwargs()
        path = tempfile.mkdtemp(**mktmp_kws)
        create_tree(
            path, {
                "udir": {
                    x + ".dat" + ver: x + " content"
                    for x in "abcd" for ver in ["", ".v1"]
                }
            })

        cls._hpath = HTTPPath(path)
        cls._hpath.start()
        cls.url = cls._hpath.url

        cls.json_file = tempfile.mktemp(suffix=".json", **mktmp_kws)
        with open(cls.json_file, "w") as jfh:
            json.dump([{
                "url": cls.url + "udir/a.dat",
                "name": "a",
                "subdir": "foo"
            }, {
                "url": cls.url + "udir/b.dat",
                "name": "b",
                "subdir": "bar"
            }, {
                "url": cls.url + "udir/c.dat",
                "name": "c",
                "subdir": "foo"
            }], jfh)
Ejemplo n.º 2
0
    def setup_class(cls):
        mktmp_kws = get_tempfile_kwargs()
        path = tempfile.mkdtemp(**mktmp_kws)
        http_root = op.join(path, "srv")
        create_tree(
            http_root, {
                "udir": {
                    x + ".dat" + ver: x + " content"
                    for x in "abcd" for ver in ["", ".v1"]
                }
            })

        cls._hpath = HTTPPath(http_root)
        cls._hpath.start()
        cls.url = cls._hpath.url

        cls.data = [{
            "url": cls.url + "udir/a.dat",
            "name": "a",
            "subdir": "foo",
            "md5sum": "3fb7c40c70b0ed19da713bd69ee12014",
            "size": "9"
        }, {
            "url": cls.url + "udir/b.dat",
            "name": "b",
            "subdir": "bar",
            "md5sum": "",
            "size": ""
        }, {
            "url": cls.url + "udir/c.dat",
            "name": "c",
            "subdir": "foo",
            "md5sum": "9b72648021b70b8c522642e4490d7ac3",
            "size": "9"
        }]
        cls.json_file = op.join(path, "test_addurls.json")
        with open(cls.json_file, "w") as jfh:
            json.dump(cls.data, jfh)

        cls.temp_dir = path
Ejemplo n.º 3
0
def setup_package():
    import os
    from datalad.utils import on_osx
    from datalad.tests import _TEMP_PATHS_GENERATED

    if on_osx:
        # enforce honoring TMPDIR (see gh-5307)
        import tempfile
        tempfile.tempdir = os.environ.get('TMPDIR', tempfile.gettempdir())

    from datalad import consts

    _test_states['env'] = {}

    def set_envvar(v, val):
        """Memoize and then set env var"""
        _test_states['env'][v] = os.environ.get(v, None)
        os.environ[v] = val

    _test_states['DATASETS_TOPURL'] = consts.DATASETS_TOPURL
    consts.DATASETS_TOPURL = 'http://datasets-tests.datalad.org/'
    set_envvar('DATALAD_DATASETS_TOPURL', consts.DATASETS_TOPURL)

    from datalad.tests.utils import (
        DEFAULT_BRANCH,
        DEFAULT_REMOTE,
    )
    set_envvar(
        "GIT_CONFIG_PARAMETERS",
        "'init.defaultBranch={}' 'clone.defaultRemoteName={}'".format(
            DEFAULT_BRANCH, DEFAULT_REMOTE))

    # To overcome pybuild overriding HOME but us possibly wanting our
    # own HOME where we pre-setup git for testing (name, email)
    if 'GIT_HOME' in os.environ:
        set_envvar('HOME', os.environ['GIT_HOME'])
        set_envvar('DATALAD_LOG_EXC', "1")
    else:
        # we setup our own new HOME, the BEST and HUGE one
        from datalad.utils import make_tempfile
        # TODO: split into a function + context manager
        with make_tempfile(mkdir=True) as new_home:
            pass
        for v, val in get_home_envvars(new_home).items():
            set_envvar(v, val)
        if not os.path.exists(new_home):
            os.makedirs(new_home)
        with open(os.path.join(new_home, '.gitconfig'), 'w') as f:
            f.write("""\
[user]
	name = DataLad Tester
	email = [email protected]
[datalad "log"]
	exc = 1
""")
        _TEMP_PATHS_GENERATED.append(new_home)

    # Re-load ConfigManager, since otherwise it won't consider global config
    # from new $HOME (see gh-4153
    cfg.reload(force=True)

    from datalad.interface.common_cfg import compute_cfg_defaults
    compute_cfg_defaults()
    # datalad.locations.sockets has likely changed. Discard any cached values.
    ssh_manager._socket_dir = None

    # To overcome pybuild by default defining http{,s}_proxy we would need
    # to define them to e.g. empty value so it wouldn't bother touching them.
    # But then haskell libraries do not digest empty value nicely, so we just
    # pop them out from the environment
    for ev in ('http_proxy', 'https_proxy'):
        if ev in os.environ and not (os.environ[ev]):
            lgr.debug("Removing %s from the environment since it is empty", ev)
            os.environ.pop(ev)

    # During tests we allow for "insecure" access to local file:// and
    # http://localhost URLs since all of them either generated as tests
    # fixtures or cloned from trusted sources
    from datalad.support.annexrepo import AnnexRepo
    AnnexRepo._ALLOW_LOCAL_URLS = True

    DATALAD_LOG_LEVEL = os.environ.get('DATALAD_LOG_LEVEL', None)
    if DATALAD_LOG_LEVEL is None:
        # very very silent.  Tests introspecting logs should use
        # swallow_logs(new_level=...)
        _test_states['loglevel'] = lgr.getEffectiveLevel()
        lgr.setLevel(100)

        # And we should also set it within environ so underlying commands also stay silent
        set_envvar('DATALAD_LOG_LEVEL', '100')
    else:
        # We are not overriding them, since explicitly were asked to have some log level
        _test_states['loglevel'] = None

    # Set to non-interactive UI
    from datalad.ui import ui
    _test_states['ui_backend'] = ui.backend
    # obtain() since that one consults for the default value
    ui.set_backend(cfg.obtain('datalad.tests.ui.backend'))

    # Monkey patch nose so it does not ERROR out whenever code asks for fileno
    # of the output. See https://github.com/nose-devs/nose/issues/6
    from io import StringIO as OrigStringIO

    class StringIO(OrigStringIO):
        fileno = lambda self: 1
        encoding = None

    from nose.ext import dtcompat
    from nose.plugins import capture, multiprocess, plugintest
    dtcompat.StringIO = StringIO
    capture.StringIO = StringIO
    multiprocess.StringIO = StringIO
    plugintest.StringIO = StringIO

    # in order to avoid having to fiddle with rather uncommon
    # file:// URLs in the tests, have a standard HTTP server
    # that serves an 'httpserve' directory in the test HOME
    # the URL will be available from datalad.test_http_server.url
    from datalad.tests.utils import HTTPPath
    import tempfile
    global test_http_server
    serve_path = tempfile.mkdtemp(
        dir=cfg.get("datalad.tests.temp.dir"),
        prefix='httpserve',
    )
    test_http_server = HTTPPath(serve_path)
    test_http_server.start()
    _TEMP_PATHS_GENERATED.append(serve_path)

    if cfg.obtain('datalad.tests.setup.testrepos'):
        lgr.debug("Pre-populating testrepos")
        from datalad.tests.utils import with_testrepos
        with_testrepos()(lambda repo: 1)()