def wrapper():
            if LOGIN_ACTION_NAME in interaction:
                globals.login_failed = False
            if globals.login_failed:
                pytest.skip(f"login is failed")
            start = time.time()
            error_msg = 'Success'
            full_exception = ''
            try:
                func()
                success = True
            except Exception:
                success = False
                # https://docs.python.org/2/library/sys.html#sys.exc_info
                exc_type, full_exception = sys.exc_info()[:2]
                error_msg = f"Failed measure: {interaction} - {exc_type.__name__}"
            end = time.time()
            timing = str(int((end - start) * 1000))

            lockfile = f'{selenium_results_file}.lock'

            with filelock.SoftFileLock(lockfile):
                with open(selenium_results_file, "a+") as jtl_file:
                    timestamp = round(time.time() * 1000)
                    jtl_file.write(
                        f"{timestamp},{timing},{interaction},,{error_msg},,{success},0,0,0,0,,0\n"
                    )

            print(f"{timestamp},{timing},{interaction},{error_msg},{success}")

            if not success:
                if LOGIN_ACTION_NAME in interaction:
                    globals.login_failed = True
                raise Exception(error_msg, full_exception)
Exemple #2
0
def merge_tree(src, dst, symlinks=False, timeout=90):
    """
    Merge src into dst recursively by copying all files from src into dst.
    Return a list of all files copied.

    Like copytree(src, dst), but raises an error if merging the two trees
    would overwrite any files.
    """
    assert src not in dst, ("Can't merge/copy source into subdirectory of itself.  Please create "
                            "separate spaces for these things.")

    new_files = copytree(src, dst, symlinks=symlinks, dry_run=True)
    # do not copy lock files
    new_files = [f for f in new_files if not f.endswith('.conda_lock')]
    existing = [f for f in new_files if isfile(f)]

    if existing:
        raise IOError("Can't merge {0} into {1}: file exists: "
                      "{2}".format(src, dst, existing[0]))

    lock = filelock.SoftFileLock(join(src, ".conda_lock"))
    lock.acquire(timeout=timeout)
    try:
        copytree(src, dst, symlinks=symlinks)
    except:
        raise
    finally:
        lock.release()
        rm_rf(os.path.join(dst, '.conda_lock'))
Exemple #3
0
def create_new_location(source, upload, move=False):
    base_dir = current_app.config['BOOKS_BASE_DIR']
    if isinstance(upload, model.Upload):
        new_file = os.path.join(current_app.config['UPLOAD_DIR'], upload.file)
    else:
        new_file = upload
    new_location = os.path.join(source.ebook.base_dir,
                                os.path.basename(norm_file_name(source)))
    #if source.ebook.base_dir else norm_file_name(source) #TODO: Remove this WA
    ebook_dir = os.path.join(base_dir, os.path.split(new_location)[0])
    if not os.path.exists(ebook_dir):
        os.makedirs(ebook_dir, exist_ok=True)
    lock_file = os.path.join(ebook_dir, '.lock_this_dir')
    index = 1
    with filelock.SoftFileLock(lock_file, timeout=5):
        while os.path.exists(os.path.join(base_dir, new_location)):
            name, ext = os.path.splitext(new_location)
            new_location = name + '(%d)' % index + ext
            index += 1
        if move:
            shutil.move(new_file, os.path.join(base_dir, new_location))
        else:
            shutil.copy(new_file, os.path.join(base_dir, new_location))

    return new_location
Exemple #4
0
def get_download_lock(lock_dir):
    mkdir(lock_dir)
    lockfile = os.path.join(lock_dir, 'download.lock')
    try:
        return filelock.FileLock(lockfile).acquire()
    except:
        return filelock.SoftFileLock(lockfile).acquire()
Exemple #5
0
def copy_into(src, dst, timeout=90, symlinks=False):
    "Copy all the files and directories in src to the directory dst"
    lock = None
    if isdir(dst):
        lock = filelock.SoftFileLock(join(dst, ".conda_lock"))
        lock.acquire(timeout=timeout)

    try:
        if isdir(src):
            merge_tree(src, dst, symlinks)

        else:
            if isdir(dst):
                dst_fn = os.path.join(dst, os.path.basename(src))
            else:
                dst_fn = dst

            try:
                if os.path.sep in dst_fn and not os.path.isdir(
                        os.path.dirname(dst_fn)):
                    os.makedirs(os.path.dirname(dst_fn))
                shutil.copy2(src, dst_fn)
            except shutil.Error:
                log.debug("skipping %s - already exists in %s",
                          os.path.basename(src), dst)
    finally:
        if lock:
            lock.release()
Exemple #6
0
def copy_into(src, dst, timeout=90, symlinks=False):
    "Copy all the files and directories in src to the directory dst"
    if isdir(src):
        merge_tree(src, dst, symlinks, timeout=timeout)

    else:
        if isdir(dst):
            dst_fn = os.path.join(dst, os.path.basename(src))
        else:
            dst_fn = dst

        lock = None
        if os.path.isabs(src):
            src_folder = os.path.dirname(src)
            lock = filelock.SoftFileLock(join(src_folder, ".conda_lock"))
        try:
            if os.path.sep in dst_fn and not os.path.isdir(os.path.dirname(dst_fn)):
                os.makedirs(os.path.dirname(dst_fn))
            if lock:
                lock.acquire(timeout=timeout)
            # with each of these, we are copying less metadata.  This seems to be necessary
            #   to cope with some shared filesystems with some virtual machine setups.
            #  See https://github.com/conda/conda-build/issues/1426
            try:
                shutil.copy2(src, dst_fn)
            except OSError:
                try:
                    shutil.copy(src, dst_fn)
                except OSError:
                    shutil.copyfile(src, dst_fn)
        except shutil.Error:
            log.debug("skipping %s - already exists in %s", os.path.basename(src), dst)
        finally:
            if lock:
                lock.release()
Exemple #7
0
def get_lock(folder, timeout=90, filename=".conda_lock"):
    global _locations
    location = os.path.abspath(os.path.normpath(folder))
    if not os.path.isdir(location):
        os.makedirs(location)
    if location not in _locations:
        _locations[location] = filelock.SoftFileLock(os.path.join(location, filename),
                                                     timeout)
    return _locations[location]
Exemple #8
0
def clean_pkg_cache(dist, timeout):
    cc.pkgs_dirs = cc.pkgs_dirs[:1]
    locks = []
    for folder in cc.pkgs_dirs:
        locks.append(filelock.SoftFileLock(join(folder, ".conda_lock")))

    for lock in locks:
        lock.acquire(timeout=timeout)

    try:
        rmplan = [
            'RM_EXTRACTED {0} local::{0}'.format(dist),
            'RM_FETCHED {0} local::{0}'.format(dist),
        ]
        plan.execute_plan(rmplan)

        # Conda does not seem to do a complete cleanup sometimes.  This is supplemental.
        #   Conda's cleanup is still necessary - it keeps track of its own in-memory
        #   list of downloaded things.
        for folder in cc.pkgs_dirs:
            try:
                assert not os.path.exists(os.path.join(folder, dist))
                assert not os.path.exists(
                    os.path.join(folder, dist + '.tar.bz2'))
                for pkg_id in [dist, 'local::' + dist]:
                    assert pkg_id not in package_cache()
            except AssertionError:
                log.debug(
                    "Conda caching error: %s package remains in cache after removal",
                    dist)
                log.debug("Clearing package cache to compensate")
                cache = package_cache()
                keys = [key for key in cache.keys() if dist in key]
                for pkg_id in keys:
                    if pkg_id in cache:
                        del cache[pkg_id]
                for entry in glob(os.path.join(folder, dist + '*')):
                    rm_rf(entry)
    except:
        raise
    finally:
        for lock in locks:
            lock.release()
            if os.path.isfile(lock._lock_file):
                os.remove(lock._lock_file)
Exemple #9
0
    def __init__(self, name: str, path: str = None, logger=None):
        if logger is not None:
            self._logger = logger
        else:
            self._logger = logs.get_logger('LocalQueue')
            self._logger.setLevel(get_storage_verbose_level())

        self.name = name
        if path is None:
            self.path = self._get_queue_directory()
        else:
            self.path = path
        self.path = os.path.join(self.path, name)
        os.makedirs(self.path, exist_ok=True)

        # Local queue is considered active, iff its directory exists.
        self._lock_path = os.path.join(self.path, LOCK_FILE_NAME)
        self._lock = filelock.SoftFileLock(self._lock_path)
Exemple #10
0
def write_repodata(repodata, dir_path, config=None, lock=None):
    """ Write updated repodata.json and repodata.json.bz2 """
    if not config:
        import conda_build.config
        config = conda_build.config.config
    if not lock:
        lock = filelock.SoftFileLock(join(dir_path, ".conda_lock"))
    lock.acquire(timeout=config.timeout)
    data = json.dumps(repodata, indent=2, sort_keys=True)
    # strip trailing whitespace
    data = '\n'.join(line.rstrip() for line in data.splitlines())
    # make sure we have newline at the end
    if not data.endswith('\n'):
        data += '\n'
    with open(join(dir_path, 'repodata.json'), 'w') as fo:
        fo.write(data)
    with open(join(dir_path, 'repodata.json.bz2'), 'wb') as fo:
        fo.write(bz2.compress(data.encode('utf-8')))
    lock.release()
Exemple #11
0
def read_index_tar(tar_path, config, lock=None):
    """ Returns the index.json dict inside the given package tarball. """

    if not lock:
        lock = filelock.SoftFileLock(join(os.path.dirname(tar_path), ".conda_lock"))
    lock.acquire(timeout=config.timeout)
    try:
        with tarfile.open(tar_path) as t:
            try:
                return json.loads(t.extractfile('info/index.json').read().decode('utf-8'))
            except EOFError:
                raise RuntimeError("Could not extract %s. File probably corrupt."
                    % tar_path)
            except OSError as e:
                raise RuntimeError("Could not extract %s (%s)" % (tar_path, e))
    except tarfile.ReadError:
        raise RuntimeError("Could not extract metadata from %s. "
                            "File probably corrupt." % tar_path)
    finally:
        lock.release()
Exemple #12
0
def test(m, config, move_broken=True):
    '''
    Execute any test scripts for the given package.

    :param m: Package's metadata.
    :type m: Metadata
    '''

    if not os.path.isdir(config.build_folder):
        os.makedirs(config.build_folder)

    clean_pkg_cache(m.dist(), config.timeout)

    with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"),
                               timeout=config.timeout):
        tmp_dir = config.test_dir
        if not isdir(tmp_dir):
            os.makedirs(tmp_dir)
        create_files(tmp_dir, m, config)
        # Make Perl or Python-specific test files
        if m.name().startswith('perl-'):
            pl_files = create_pl_files(tmp_dir, m)
            py_files = False
            lua_files = False
        else:
            py_files = create_py_files(tmp_dir, m)
            pl_files = False
            lua_files = False
        shell_files = create_shell_files(tmp_dir, m, config)
        if not (py_files or shell_files or pl_files or lua_files):
            print("Nothing to test for:", m.dist())
            return

        print("TEST START:", m.dist())

        get_build_metadata(m, config=config)
        specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())]

        # add packages listed in the run environment and test/requires
        specs.extend(ms.spec for ms in m.ms_depends('run'))
        specs += m.get_value('test/requires', [])

        if py_files:
            # as the tests are run by python, ensure that python is installed.
            # (If they already provided python as a run or test requirement,
            #  this won't hurt anything.)
            specs += ['python %s*' % environ.get_py_ver(config)]
        if pl_files:
            # as the tests are run by perl, we need to specify it
            specs += ['perl %s*' % environ.get_perl_ver(config)]
        if lua_files:
            # not sure how this shakes out
            specs += ['lua %s*' % environ.get_lua_ver(config)]

        create_env(config.test_prefix, specs, config=config)

        with path_prepended(config.test_prefix):
            env = dict(os.environ.copy())
            env.update(
                environ.get_dict(config=config, m=m,
                                 prefix=config.test_prefix))
            env["CONDA_BUILD_STATE"] = "TEST"

        if not config.activate:
            # prepend bin (or Scripts) directory
            env = prepend_bin_path(env,
                                   config.test_prefix,
                                   prepend_prefix=True)

            if on_win:
                env['PATH'] = config.test_prefix + os.pathsep + env['PATH']

        for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL', 'CONDA_LUA':
            env[varname] = str(getattr(config, varname) or '')

        # Python 2 Windows requires that envs variables be string, not unicode
        env = {str(key): str(value) for key, value in env.items()}
        suffix = "bat" if on_win else "sh"
        test_script = join(tmp_dir,
                           "conda_test_runner.{suffix}".format(suffix=suffix))

        with open(test_script, 'w') as tf:
            if config.activate:
                ext = ".bat" if on_win else ""
                tf.write(
                    "{source} {conda_root}activate{ext} {test_env} {squelch}\n"
                    .format(conda_root=root_script_dir + os.path.sep,
                            source="call" if on_win else "source",
                            ext=ext,
                            test_env=config.test_prefix,
                            squelch=">nul 2>&1" if on_win else "&> /dev/null"))
                if on_win:
                    tf.write("if errorlevel 1 exit 1\n")
            if py_files:
                tf.write("{python} -s {test_file}\n".format(
                    python=config.test_python,
                    test_file=join(tmp_dir, 'run_test.py')))
                if on_win:
                    tf.write("if errorlevel 1 exit 1\n")
            if pl_files:
                tf.write("{perl} {test_file}\n".format(perl=config.test_perl,
                                                       test_file=join(
                                                           tmp_dir,
                                                           'run_test.pl')))
                if on_win:
                    tf.write("if errorlevel 1 exit 1\n")
            if lua_files:
                tf.write("{lua} {test_file}\n".format(lua=config.test_lua,
                                                      test_file=join(
                                                          tmp_dir,
                                                          'run_test.lua')))
                if on_win:
                    tf.write("if errorlevel 1 exit 1\n")
            if shell_files:
                test_file = join(tmp_dir, 'run_test.' + suffix)
                if on_win:
                    tf.write("call {test_file}\n".format(test_file=test_file))
                    if on_win:
                        tf.write("if errorlevel 1 exit 1\n")
                else:
                    # TODO: Run the test/commands here instead of in run_test.py
                    tf.write("{shell_path} -x -e {test_file}\n".format(
                        shell_path=shell_path, test_file=test_file))

        if on_win:
            cmd = ['cmd.exe', "/d", "/c", test_script]
        else:
            cmd = [shell_path, '-x', '-e', test_script]
        try:
            subprocess.check_call(cmd, env=env, cwd=tmp_dir)
        except subprocess.CalledProcessError:
            tests_failed(m,
                         move_broken=move_broken,
                         broken_dir=config.broken_dir,
                         config=config)

    print("TEST END:", m.dist())
Exemple #13
0
def build(m,
          config,
          post=None,
          need_source_download=True,
          need_reparse_in_env=False):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    :type keep_old_work: bool: Keep any previous work directory.
    :type need_source_download: bool: if rendering failed to download source
    (due to missing tools), retry here after build env is populated
    '''

    if m.skip():
        print_skip_message(m)
        return False

    if config.skip_existing:
        package_exists = is_package_built(m, config)
        if package_exists:
            print(m.dist(),
                  "is already built in {0}, skipping.".format(package_exists))
            return False

    if post in [False, None]:
        print("BUILD START:", m.dist())
        if m.uses_jinja and (need_source_download or need_reparse_in_env):
            print(
                "    (actual version deferred until further download or env creation)"
            )

        specs = [ms.spec for ms in m.ms_depends('build')]
        create_env(config.build_prefix, specs, config=config)
        vcs_source = m.uses_vcs_in_build
        if vcs_source and vcs_source not in specs:
            vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source
            has_vcs_available = os.path.isfile(
                external.find_executable(vcs_executable, config.build_prefix)
                or "")
            if not has_vcs_available:
                if (vcs_source != "mercurial" or not any(
                        spec.startswith('python') and "3." in spec
                        for spec in specs)):
                    specs.append(vcs_source)

                    log.warn(
                        "Your recipe depends on %s at build time (for templates), "
                        "but you have not listed it as a build dependency.  Doing "
                        "so for this build.", vcs_source)

                    # Display the name only
                    # Version number could be missing due to dependency on source info.
                    create_env(config.build_prefix, specs, config=config)
                else:
                    raise ValueError(
                        "Your recipe uses mercurial in build, but mercurial"
                        " does not yet support Python 3.  Please handle all of "
                        "your mercurial actions outside of your build script.")

        if need_source_download:
            # Execute any commands fetching the source (e.g., git) in the _build environment.
            # This makes it possible to provide source fetchers (eg. git, hg, svn) as build
            # dependencies.
            with path_prepended(config.build_prefix):
                m, need_source_download, need_reparse_in_env = parse_or_try_download(
                    m,
                    no_download_source=False,
                    force_download=True,
                    config=config)
            assert not need_source_download, "Source download failed.  Please investigate."
            if m.uses_jinja:
                print("BUILD START (revised):", m.dist())

        if need_reparse_in_env:
            reparse(m, config=config)
            print("BUILD START (revised):", m.dist())

        if m.name() in [
                i.rsplit('-', 2)[0] for i in linked(config.build_prefix)
        ]:
            print("%s is installed as a build dependency. Removing." %
                  m.name())
            index = get_build_index(config=config, clear_cache=False)
            actions = plan.remove_actions(config.build_prefix, [m.name()],
                                          index=index)
            assert not plan.nothing_to_do(actions), actions
            plan.display_actions(actions, index)
            plan.execute_actions(actions, index)

        print("Package:", m.dist())

        with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"),
                                   timeout=config.timeout):
            # get_dir here might be just work, or it might be one level deeper,
            #    dependening on the source.
            src_dir = source.get_dir(config)
            if isdir(src_dir):
                print("source tree in:", src_dir)
            else:
                print("no source - creating empty work folder")
                os.makedirs(src_dir)

            rm_rf(config.info_dir)
            files1 = prefix_files(prefix=config.build_prefix)
            for pat in m.always_include_files():
                has_matches = False
                for f in set(files1):
                    if fnmatch.fnmatch(f, pat):
                        print("Including in package existing file", f)
                        files1.discard(f)
                        has_matches = True
                if not has_matches:
                    log.warn(
                        "Glob %s from always_include_files does not match any files",
                        pat)
            # Save this for later
            with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
                f.write(u'\n'.join(sorted(list(files1))))
                f.write(u'\n')

            # Use script from recipe?
            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)

            if isdir(src_dir):
                if on_win:
                    build_file = join(m.path, 'bld.bat')
                    if script:
                        build_file = join(src_dir, 'bld.bat')
                        with open(build_file, 'w') as bf:
                            bf.write(script)
                    import conda_build.windows as windows
                    windows.build(m, build_file, config=config)
                else:
                    build_file = join(m.path, 'build.sh')

                    # There is no sense in trying to run an empty build script.
                    if isfile(build_file) or script:
                        with path_prepended(config.build_prefix):
                            env = environ.get_dict(config=config, m=m)
                        env["CONDA_BUILD_STATE"] = "BUILD"
                        work_file = join(source.get_dir(config),
                                         'conda_build.sh')
                        if script:
                            with open(work_file, 'w') as bf:
                                bf.write(script)
                        if config.activate:
                            if isfile(build_file):
                                data = open(build_file).read()
                            else:
                                data = open(work_file).read()
                            with open(work_file, 'w') as bf:
                                bf.write(
                                    "source {conda_root}activate {build_prefix} &> "
                                    "/dev/null\n".format(
                                        conda_root=root_script_dir +
                                        os.path.sep,
                                        build_prefix=config.build_prefix))
                                bf.write(data)
                        else:
                            if not isfile(work_file):
                                copy_into(build_file, work_file,
                                          config.timeout)
                        os.chmod(work_file, 0o766)

                        if isfile(work_file):
                            cmd = [shell_path, '-x', '-e', work_file]
                            # this should raise if any problems occur while building
                            _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m, config=config)
        create_post_scripts(m, config=config)
        create_entry_points(m.get_value('build/entry_points'), config=config)
        files2 = prefix_files(prefix=config.build_prefix)

        post_process(sorted(files2 - files1),
                     prefix=config.build_prefix,
                     config=config,
                     preserve_egg_dir=bool(
                         m.get_value('build/preserve_egg_dir')))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files(prefix=config.build_prefix)
        if any(config.meta_dir in join(config.build_prefix, f)
               for f in files2 - files1):
            meta_files = (tuple(
                f for f in files2 - files1
                if config.meta_dir in join(config.build_prefix, f)), )
            sys.exit(
                indent(
                    """Error: Untracked file(s) %s found in conda-meta directory.
This error usually comes from using conda in the build script.  Avoid doing this, as it
can lead to packages that include their dependencies.""" % meta_files))
        post_build(m,
                   sorted(files2 - files1),
                   prefix=config.build_prefix,
                   build_python=config.build_python,
                   croot=config.croot)
        create_info_files(m,
                          sorted(files2 - files1),
                          config=config,
                          prefix=config.build_prefix)
        if m.get_value('build/noarch_python'):
            import conda_build.noarch_python as noarch_python
            noarch_python.transform(m, sorted(files2 - files1),
                                    config.build_prefix)

        files3 = prefix_files(prefix=config.build_prefix)
        fix_permissions(files3 - files1, config.build_prefix)

        path = bldpkg_path(m, config)

        # lock the output directory while we build this file
        # create the tarball in a temporary directory to minimize lock time
        with TemporaryDirectory() as tmp:
            tmp_path = os.path.join(tmp, os.path.basename(path))
            t = tarfile.open(tmp_path, 'w:bz2')

            def order(f):
                # we don't care about empty files so send them back via 100000
                fsize = os.stat(join(config.build_prefix, f)).st_size or 100000
                # info/* records will be False == 0, others will be 1.
                info_order = int(os.path.dirname(f) != 'info')
                return info_order, fsize

            # add files in order of a) in info directory, b) increasing size so
            # we can access small manifest or json files without decompressing
            # possible large binary or data files
            for f in sorted(files3 - files1, key=order):
                t.add(join(config.build_prefix, f), f)
            t.close()

            # we're done building, perform some checks
            tarcheck.check_all(tmp_path)

            copy_into(tmp_path, path, config.timeout)
        update_index(config.bldpkgs_dir, config)

    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())

    # returning true here says package is OK to test
    return True
Exemple #14
0
def create_env(prefix, specs, config, clear_cache=True):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if config.debug:
        logging.getLogger("conda").setLevel(logging.DEBUG)
        logging.getLogger("binstar").setLevel(logging.DEBUG)
        logging.getLogger("install").setLevel(logging.DEBUG)
        logging.getLogger("conda.install").setLevel(logging.DEBUG)
        logging.getLogger("fetch").setLevel(logging.DEBUG)
        logging.getLogger("print").setLevel(logging.DEBUG)
        logging.getLogger("progress").setLevel(logging.DEBUG)
        logging.getLogger("dotupdate").setLevel(logging.DEBUG)
        logging.getLogger("stdoutlog").setLevel(logging.DEBUG)
        logging.getLogger("requests").setLevel(logging.DEBUG)
    else:
        silence_loggers(show_warnings_and_errors=True)

    if os.path.isdir(prefix):
        rm_rf(prefix)

    specs = list(specs)
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    if specs:  # Don't waste time if there is nothing to do
        with path_prepended(prefix):
            locks = []
            try:
                cc.pkgs_dirs = cc.pkgs_dirs[:1]
                locked_folders = cc.pkgs_dirs + list(config.bldpkgs_dirs)
                for folder in locked_folders:
                    if not os.path.isdir(folder):
                        os.makedirs(folder)
                    lock = filelock.SoftFileLock(join(folder, '.conda_lock'))
                    update_index(folder, config=config, lock=lock)
                    locks.append(lock)
                for lock in locks:
                    lock.acquire(timeout=config.timeout)

                index = get_build_index(config=config, clear_cache=True)

                actions = plan.install_actions(prefix, index, specs)
                if config.disable_pip:
                    actions['LINK'] = [
                        spec for spec in actions['LINK']
                        if not spec.startswith('pip-')
                    ]  # noqa
                    actions['LINK'] = [
                        spec for spec in actions['LINK']
                        if not spec.startswith('setuptools-')
                    ]  # noqa
                plan.display_actions(actions, index)
                if on_win:
                    for k, v in os.environ.items():
                        os.environ[k] = str(v)
                plan.execute_actions(actions, index, verbose=config.debug)
            except (SystemExit, PaddingError, LinkError) as exc:
                if (("too short in" in str(exc)
                     or 'post-link failed for: openssl' in str(exc)
                     or isinstance(exc, PaddingError))
                        and config.prefix_length > 80):
                    log.warn("Build prefix failed with prefix length %d",
                             config.prefix_length)
                    log.warn("Error was: ")
                    log.warn(str(exc))
                    log.warn(
                        "One or more of your package dependencies needs to be rebuilt "
                        "with a longer prefix length.")
                    log.warn(
                        "Falling back to legacy prefix length of 80 characters."
                    )
                    log.warn(
                        "Your package will not install into prefixes > 80 characters."
                    )
                    config.prefix_length = 80

                    # Set this here and use to create environ
                    #   Setting this here is important because we use it below (symlink)
                    prefix = config.build_prefix

                    for lock in locks:
                        lock.release()
                        if os.path.isfile(lock._lock_file):
                            os.remove(lock._lock_file)
                    create_env(prefix,
                               specs,
                               config=config,
                               clear_cache=clear_cache)
                else:
                    for lock in locks:
                        lock.release()
                        if os.path.isfile(lock._lock_file):
                            os.remove(lock._lock_file)
                    raise
            finally:
                for lock in locks:
                    lock.release()
                    if os.path.isfile(lock._lock_file):
                        os.remove(lock._lock_file)
        warn_on_old_conda_build(index=index)

    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
    if on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
 def get_dataset_downloading_lock(self, name: str) -> None:
     return filelock.SoftFileLock("./bogus.lock", 0)
Exemple #16
0
 def get_dataset_downloading_lock(self, ds_name: str) -> None:
     'Returns a lock. Use in a with statement'
     f_lock = self._get_filename('download_lock', ds_name, ext='lock')
     return filelock.SoftFileLock(f_lock, 0)
Exemple #17
0
def update_index(dir_path,
                 config,
                 force=False,
                 check_md5=False,
                 remove=True,
                 lock=None):
    """
    Update all index files in dir_path with changed packages.

    :param verbose: Should detailed status messages be output?
    :type verbose: bool
    :param force: Whether to re-index all packages (including those that
                  haven't changed) or not.
    :type force: bool
    :param check_md5: Whether to check MD5s instead of mtimes for determining
                      if a package changed.
    :type check_md5: bool
    """

    if config.verbose:
        print("updating index in:", dir_path)
    index_path = join(dir_path, '.index.json')
    if not os.path.isdir(dir_path):
        os.makedirs(dir_path)

    if not lock:
        lock = filelock.SoftFileLock(join(dir_path, ".conda_lock"))
    lock.acquire(timeout=config.timeout)

    if force:
        index = {}
    else:
        try:
            mode_dict = {
                'mode': 'r',
                'encoding': 'utf-8'
            } if PY3 else {
                'mode': 'rb'
            }
            with open(index_path, **mode_dict) as fi:
                index = json.load(fi)
        except (IOError, ValueError):
            index = {}

    files = set(fn for fn in os.listdir(dir_path) if fn.endswith('.tar.bz2'))
    if any(fn.startswith('_license-') for fn in files):
        sys.exit("""\
Error:
    Indexing a copy of the Anaconda conda package channel is neither
    necessary nor supported.  If you wish to add your own packages,
    you can do so by adding them to a separate channel.
""")
    for fn in files:
        path = join(dir_path, fn)
        if fn in index:
            if check_md5:
                if index[fn]['md5'] == md5_file(path):
                    continue
            elif index[fn]['mtime'] == getmtime(path):
                continue
        if config.verbose:
            print('updating:', fn)
        d = read_index_tar(path, config, lock=lock)
        d.update(file_info(path))
        index[fn] = d

    for fn in files:
        index[fn]['sig'] = '.' if isfile(join(dir_path, fn + '.sig')) else None

    if remove:
        # remove files from the index which are not on disk
        for fn in set(index) - files:
            if config.verbose:
                print("removing:", fn)
            del index[fn]

    # Deal with Python 2 and 3's different json module type reqs
    mode_dict = {'mode': 'w', 'encoding': 'utf-8'} if PY3 else {'mode': 'wb'}
    with open(index_path, **mode_dict) as fo:
        json.dump(index, fo, indent=2, sort_keys=True, default=str)

    # --- new repodata
    for fn in index:
        info = index[fn]
        for varname in 'arch', 'platform', 'mtime', 'ucs':
            try:
                del info[varname]
            except KeyError:
                pass

        if 'requires' in info and 'depends' not in info:
            info['depends'] = info['requires']

    repodata = {'packages': index, 'info': {}}
    write_repodata(repodata, dir_path, config, lock=lock)
    lock.release()
    if os.path.isfile(join(dir_path, ".conda_lock")):
        os.remove(join(dir_path, ".conda_lock"))
Exemple #18
0
def prep_conversion(sid, dicoms, outdir, heuristic, converter, anon_sid,
                    anon_outdir, with_prov, ses, bids_options, seqinfo,
                    min_meta, overwrite, dcmconfig, grouping):
    if dicoms:
        lgr.info("Processing %d dicoms", len(dicoms))
    elif seqinfo:
        lgr.info("Processing %d pre-sorted seqinfo entries", len(seqinfo))
    else:
        raise ValueError("neither dicoms nor seqinfo dict was provided")

    if bids_options is not None:
        if not sid:
            raise ValueError(
                "BIDS requires alphanumeric subject ID. Got an empty value")
        if not sid.isalnum():  # alphanumeric only
            sid, old_sid = convert_sid_bids(sid)

    if not anon_sid:
        anon_sid = sid
    if not anon_outdir:
        anon_outdir = outdir

    # Generate heudiconv info folder
    idir = op.join(outdir, '.heudiconv', anon_sid)
    if bids_options is not None and ses:
        idir = op.join(idir, 'ses-%s' % str(ses))
    if anon_outdir == outdir:
        idir = op.join(idir, 'info')
    if not op.exists(idir):
        os.makedirs(idir)

    ses_suffix = "_ses-%s" % ses if ses is not None else ""
    info_file = op.join(idir, '%s%s.auto.txt' % (sid, ses_suffix))
    edit_file = op.join(idir, '%s%s.edit.txt' % (sid, ses_suffix))
    filegroup_file = op.join(idir, 'filegroup%s.json' % ses_suffix)

    # if conversion table(s) do not exist -- we need to prepare them
    # (the *prepare* stage in https://github.com/nipy/heudiconv/issues/134)
    # if overwrite - recalculate this anyways
    reuse_conversion_table = op.exists(edit_file)
    # We also might need to redo it if changes in the heuristic file
    # detected
    # ref: https://github.com/nipy/heudiconv/issues/84#issuecomment-330048609
    # for more automagical wishes
    target_heuristic_filename = op.join(idir, 'heuristic.py')
    # faciliates change - TODO: remove in 1.0
    old_heuristic_filename = op.join(idir, op.basename(heuristic.filename))
    if op.exists(old_heuristic_filename):
        assure_no_file_exists(target_heuristic_filename)
        safe_copyfile(old_heuristic_filename, target_heuristic_filename)
        assure_no_file_exists(old_heuristic_filename)
    # TODO:
    #  1. add a test
    #  2. possibly extract into a dedicated function for easier logic flow here
    #     and a dedicated unittest
    if (op.exists(target_heuristic_filename)
            and file_md5sum(target_heuristic_filename) != file_md5sum(
                heuristic.filename)):
        # remake conversion table
        reuse_conversion_table = False
        lgr.info(
            "Will not reuse existing conversion table files because heuristic "
            "has changed")

    if reuse_conversion_table:
        lgr.info("Reloading existing filegroup.json "
                 "because %s exists", edit_file)
        info = read_config(edit_file)
        filegroup = load_json(filegroup_file)
        # XXX Yarik finally understood why basedir was dragged along!
        # So we could reuse the same PATHs definitions possibly consistent
        # across re-runs... BUT that wouldn't work anyways if e.g.
        # DICOMs dumped with SOP UUIDs thus differing across runs etc
        # So either it would need to be brought back or reconsidered altogether
        # (since no sample data to test on etc)
    else:
        assure_no_file_exists(target_heuristic_filename)
        safe_copyfile(heuristic.filename, target_heuristic_filename)
        if dicoms:
            seqinfo = group_dicoms_into_seqinfos(
                dicoms,
                grouping,
                file_filter=getattr(heuristic, 'filter_files', None),
                dcmfilter=getattr(heuristic, 'filter_dicom', None),
                flatten=True,
                custom_grouping=getattr(heuristic, 'grouping', None))

        seqinfo_list = list(seqinfo.keys())
        filegroup = {si.series_id: x for si, x in seqinfo.items()}
        dicominfo_file = op.join(idir, 'dicominfo%s.tsv' % ses_suffix)
        # allow to overwrite even if was present under git-annex already
        assure_no_file_exists(dicominfo_file)
        with open(dicominfo_file, 'wt') as fp:
            fp.write('\t'.join([val for val in seqinfo_fields]) + '\n')
            for seq in seqinfo_list:
                fp.write('\t'.join([str(val) for val in seq]) + '\n')
        lgr.debug("Calling out to %s.infodict", heuristic)
        info = heuristic.infotodict(seqinfo_list)
        lgr.debug("Writing to {}, {}, {}".format(info_file, edit_file,
                                                 filegroup_file))
        assure_no_file_exists(info_file)
        write_config(info_file, info)
        assure_no_file_exists(edit_file)
        write_config(edit_file, info)
        save_json(filegroup_file, filegroup)

    if bids_options is not None:
        # the other portion of the path would mimic BIDS layout
        # so we don't need to worry here about sub, ses at all
        tdir = anon_outdir
    else:
        tdir = op.join(anon_outdir, anon_sid)

    if converter.lower() != 'none':
        lgr.info("Doing conversion using %s", converter)
        cinfo = conversion_info(anon_sid, tdir, info, filegroup, ses)
        convert(
            cinfo,
            converter=converter,
            scaninfo_suffix=getattr(heuristic, 'scaninfo_suffix', '.json'),
            custom_callable=getattr(heuristic, 'custom_callable', None),
            with_prov=with_prov,
            bids_options=bids_options,
            outdir=tdir,
            min_meta=min_meta,
            overwrite=overwrite,
            dcmconfig=dcmconfig,
        )

    for item_dicoms in filegroup.values():
        clear_temp_dicoms(item_dicoms)

    if bids_options is not None and 'notop' not in bids_options:
        lockfile = op.join(anon_outdir, LOCKFILE)
        if op.exists(lockfile):
            lgr.warning(
                "Existing lockfile found in {0} - waiting for the "
                "lock to be released. To set a timeout limit, set "
                "the HEUDICONV_FILELOCK_TIMEOUT environmental variable "
                "to a value in seconds. If this process hangs, it may "
                "require a manual deletion of the {0}.".format(lockfile))
        timeout = os.getenv("HEUDICONV_LOCKFILE_TIMEOUT", -1)
        with filelock.SoftFileLock(lockfile, timeout=timeout):
            if seqinfo:
                keys = list(seqinfo)
                add_participant_record(anon_outdir, anon_sid,
                                       keys[0].patient_age,
                                       keys[0].patient_sex)
            populate_bids_templates(anon_outdir,
                                    getattr(heuristic, 'DEFAULT_FIELDS', {}))