Example #1
0
File: buck.py Project: E-LLP/buck-1
 def exclusion(path):
     relative_to_search_base = path.relative_to(search_base)
     if relative_to_search_base.as_posix() in non_special_excludes:
         return True
     for pattern in match_excludes:
         result = relative_to_search_base.match(pattern, match_entire=True)
         if result:
             return True
     relative_to_project_root = path.relative_to(project_root)
     for pattern in project_root_relative_excludes:
         result = relative_to_project_root.match(pattern, match_entire=True)
         if result:
             return True
     return False
Example #2
0
File: buck.py Project: E-LLP/buck-1
 def exclusion(path):
     relative_to_search_base = path.relative_to(search_base)
     if relative_to_search_base.as_posix() in non_special_excludes:
         return True
     for pattern in match_excludes:
         result = relative_to_search_base.match(pattern, match_entire=True)
         if result:
             return True
     relative_to_project_root = path.relative_to(project_root)
     for pattern in project_root_relative_excludes:
         result = relative_to_project_root.match(pattern, match_entire=True)
         if result:
             return True
     return False
Example #3
0
def template_url(context, template_name):
    '''
    Return a GitHub URL to the source of the given template.
    '''

    path = get_template_path(template_name)
    return github_url_for_path(path.relative_to(ROOT_DIR))
Example #4
0
    def run(self):
        global CMAKE_BUILD_DIR
        self.jobs = multiprocessing.cpu_count()
        plat_specifier = '.{0}-{1}.{2}'.format(self.plat_name, *sys.version_info[:2])
        self.build_temp = os.path.join(self.build_base, 'temp' + plat_specifier, self.config)

        # if setup.py is directly called use CMake to build product
        if CMAKE_BUILD_DIR == '.':
            # set path to the root of OpenVINO CMakeList file
            openvino_root_dir = Path(__file__).resolve().parents[4]
            self.announce(f'Configuring cmake project: {openvino_root_dir}', level=3)
            self.spawn(['cmake', '-H' + str(openvino_root_dir), '-B' + self.build_temp,
                        '-DCMAKE_BUILD_TYPE={type}'.format(type=self.config),
                        '-DENABLE_PYTHON=ON',
                        '-DENABLE_OV_ONNX_FRONTEND=ON'])

            self.announce('Building binaries', level=3)
            self.spawn(['cmake', '--build', self.build_temp,
                        '--config', self.config, '-j', str(self.jobs)])
            CMAKE_BUILD_DIR = self.build_temp
        self.run_command('build_clib')

        build.run(self)
        # Copy extra package_data content filtered by find_packages
        dst = Path(self.build_lib)
        src = Path(get_package_dir(PY_INSTALL_CFG))
        exclude = ignore_patterns('*ez_setup*', '*__pycache__*', '*.egg-info*')
        for path in src.glob('**/*'):
            if path.is_dir() or exclude(str(path)):
                continue
            path_rel = path.relative_to(src)
            (dst / path_rel.parent).mkdir(exist_ok=True, parents=True)
            copyfile(path, dst / path_rel)
Example #5
0
    def _matches_recursive_blob(path, pattern):
        split = pattern.split('**', maxsplit=1)

        try:
            remaining_path = path.relative_to(split[0])
        except ValueError:
            return False

        if len(split) == 1:
            return False

        remaining_pattern = split[1]
        if remaining_pattern.startswith(os_utils.path_sep()):
            remaining_pattern = remaining_pattern[len(os_utils.path_sep()):]

        if remaining_pattern == '':  # this can happen if pattern ends with **
            return True

        if remaining_path.match(remaining_pattern):
            return True
        elif '**' not in remaining_pattern:
            return False

        (remaining_head, remaining_tail
         ) = SingleFileMatcher._split_first_parent(remaining_pattern)
        for parent in remaining_path.parents:
            if parent.name == '':
                continue

            if fnmatch(parent.name, remaining_head):
                if SingleFileMatcher._matches_recursive_blob(
                        remaining_path.relative_to(parent), remaining_tail):
                    return True

        return False
Example #6
0
def _naive_relative_to(path: PosixPath, root: PosixPath) -> PurePosixPath:
    """
    Compute relative PurePosixPath, result may include '..' parts.

    Both arguments must be absolute PurePosixPath's and lack '..' parts.

    Possibility of symlinks is ignored, i. e. arguments are interpreted
    as resolved paths.
    """
    if not path.is_absolute():
        raise ValueError(path)
    if not root.is_absolute():
        raise ValueError(root)
    if '..' in path.parts:
        raise ValueError(path)
    if '..' in root.parts:
        raise ValueError(root)
    upstairs = 0
    while root not in path.parents:
        parent = root.parent
        assert parent != root
        root = parent
        upstairs += 1
    return PurePosixPath(
        * (('..',) * upstairs),
        path.relative_to(root) )
Example #7
0
 def includes_iterator():
     for pattern in includes:
         for path in search_base.glob(pattern):
             # TODO(bhamiltoncx): Handle hidden files on Windows.
             if path.is_file() and (include_dotfiles
                                    or not path.name.startswith('.')):
                 yield path.relative_to(search_base)
Example #8
0
def template_map(source: Path, target: Path, maps: dict):
    loader = FileSystemLoader(str(source))
    environment = Environment(loader=loader, undefined=DebugUndefined)

    for path in [x for x in source.rglob('*') if not x.is_dir()]:
        relative = path.relative_to(source)
        if str(relative).startswith('host_vars'):
            continue
        if str(relative).startswith('custom_role'):
            continue

        context = maps.get(str(relative))
        dest = target / relative
        if not dest.parent.exists():
            dest.parent.mkdir(parents=True)

        if context:
            if isinstance(context, Validators) or isinstance(
                    context, Bootnode):
                context.write(environment, target, relative)
            else:
                template = environment.get_template(str(relative))
                dest.write_text(template.render(context))
        else:
            copy(str(path), str(dest))
Example #9
0
def get_outputfolder_given_path(
        path: Path, dervo_root: Path, output_root: Path):
    """Create output folder, create symlink to it """
    # Create output folder (name defined by relative path wrt root_dervo)
    output_foldername = str(path.relative_to(dervo_root)).replace('/', '.')
    workfolder = vst.mkdir(output_root/output_foldername)
    return workfolder
Example #10
0
def template_url(context, template_name):
    '''
    Return a GitHub URL to the source of the given template.
    '''

    path = get_template_path(template_name)
    return github_url_for_path(path.relative_to(ROOT_DIR))
Example #11
0
def find_prebuilt_extensions(search_dirs):
    """Collect prebuilt python extensions."""
    extensions = []
    ext_pattern = ""
    if sys.platform == "linux":
        ext_pattern = "**/*.so"
    elif sys.platform == "win32":
        ext_pattern = "**/*.pyd"
    elif sys.platform == "darwin":
        ext_pattern = "**/*.so"
    for base_dir in search_dirs:
        for path in Path(base_dir).glob(ext_pattern):
            if path.match("openvino/libs/*"):
                continue
            relpath = path.relative_to(base_dir)
            if relpath.parent != ".":
                package_names = str(relpath.parent).split(os.path.sep)
            else:
                package_names = []
            package_names.append(path.name.split(".", 1)[0])
            name = ".".join(package_names)
            extensions.append(PrebuiltExtension(name, sources=[str(path)]))
    if not extensions:
        extensions.append(
            PrebuiltExtension("openvino", sources=[str("setup.py")]))
    return extensions
Example #12
0
def find_prebuilt_extensions(search_dirs):
    """collect prebuilt python extensions"""
    extensions = []
    ext_pattern = ''
    if sys.platform == 'linux':
        ext_pattern = '**/*.so'
    elif sys.platform == 'win32':
        ext_pattern = '**/*.pyd'
    elif sys.platform == 'darwin':
        ext_pattern = '**/*.so'
    for base_dir in search_dirs:
        for path in Path(base_dir).glob(ext_pattern):
            if path.match('openvino/libs/*'):
                continue
            relpath = path.relative_to(base_dir)
            if relpath.parent != '.':
                package_names = str(relpath.parent).split(os.path.sep)
            else:
                package_names = []
            package_names.append(path.name.split('.', 1)[0])
            name = '.'.join(package_names)
            extensions.append(PrebuiltExtension(name, sources=[str(path)]))
    if not extensions:
        extensions.append(
            PrebuiltExtension('openvino', sources=[str('setup.py')]))
    return extensions
Example #13
0
def _try_relativize_path(path):
    """Make the path relative to current directory if it's under that directory."""
    path = pathlib.Path(path)
    try:
        path = path.relative_to(os.getcwd())
    except ValueError:
        pass
    return str(path)
Example #14
0
def find_dandiset_and_subpath(path: Path) -> tuple[Dandiset, Path]:
    """
    Find the Dandiset rooted at ``path`` or one of its parents, and return the
    Dandiset along with ``path`` made relative to the Dandiset root
    """
    path = path.absolute()
    ds = Dandiset.find(path)
    if ds is None:
        raise ValueError(f"{path}: not a Dandiset")
    return (ds, path.relative_to(ds.path))
Example #15
0
    def html_zip(self, tmp_path, html_path):
        if not hasattr(zipfile, 'Path'):
            pytest.skip("Needs zipfile.Path")

        zip_path = tmp_path / 'qutebrowser.zip'
        with zipfile.ZipFile(zip_path, 'w') as zf:
            for path in html_path.iterdir():
                zf.write(path, path.relative_to(tmp_path))

        yield zipfile.Path(zip_path) / 'qutebrowser'
Example #16
0
 def as_route(dirpath, filepath):
     path = (Path(dirpath) / filepath).absolute()
     if ".py" in path.suffix:
         return
     else:
         url_path = Path(base_url_path) / path.relative_to(topdir)
         target = (url_path.parent if strip_stem is not None
                   and strip_stem == url_path.stem else url_path)
         handler = lambda eg: get(eg, path=path)
         return ((b"GET", str(target).encode("utf-8")),
                 update_wrapper(handler, get))
Example #17
0
def find_prebuilt_extensions(base_dir, ext_pattern):
    extensions = []
    for path in Path(base_dir).glob(ext_pattern):
        relpath = path.relative_to(base_dir)
        if relpath.parent != ".":
            package_names = str(relpath.parent).split(os.path.sep)
        else:
            package_names = []
        package_names.append(path.name.split(".", 1)[0])
        name = ".".join(package_names)
        extensions.append(PrebuiltExtension(name, sources=[str(path)]))
    return extensions
Example #18
0
 def run(self):
     self.run_command('build_clib')
     build.run(self)
     # Copy extra package_data content filtered by find_packages
     dst = Path(self.build_lib)
     src = Path(get_package_dir(PY_INSTALL_CFG))
     exclude = ignore_patterns('*ez_setup*', '*__pycache__*', '*.egg-info*')
     for path in src.glob('**/*'):
         if path.is_dir() or exclude(str(path)):
             continue
         path_rel = path.relative_to(src)
         (dst / path_rel.parent).mkdir(exist_ok=True, parents=True)
         copyfile(path, dst / path_rel)
Example #19
0
def _build_sitemap(site_dir: str) -> None:
    """Builds a minimal sitemap.xml for drake.mit.edu.

    This helps Google, Bing, and other search engines decide which pages on the
    generated drake.mit.edu site should be crawled, and helps determine the
    canonical version of each page.

    https://developers.google.com/search/docs/advanced/sitemaps/build-sitemap

    Args:
        site_dir: The absolute path to the root directory of the generated
          website and the directory to which the built sitemap.xml will be
          written.

    Raises:
        OSError: If the directory to which site_dir refers is not readable or
          writable.
    """

    print("Building sitemap.xml...")
    root_path = Path(site_dir)
    assert root_path.is_absolute(), \
        "Path to generated website is not an absolute path"
    paths = root_path.glob("**/*.html")

    XML_NAMESPACE = "http://www.sitemaps.org/schemas/sitemap/0.9"
    ROOT_URL = "https://drake.mit.edu"

    urlset = ET.Element("urlset", xmlns=XML_NAMESPACE)
    for path in sorted(paths):
        relative_path = path.relative_to(root_path)
        url = ET.SubElement(urlset, "url")
        if relative_path.name == "index.html":
            # sitemap.xml should only include canonical urls.
            location = relative_path.parent.as_posix() + "/"
        else:
            location = relative_path.as_posix()
            location = urllib.parse.urljoin(ROOT_URL,
                                            urllib.parse.quote(location))
        loc = ET.SubElement(url, "loc")
        loc.text = location
    sitemap = ET.ElementTree(urlset)
    sitemap.write(os.path.join(site_dir, "sitemap.xml"),
                  encoding="utf-8",
                  pretty_print=True,
                  xml_declaration=True)
def addAllFiles(targetDir, sourceDir):
    #Get everything recursively.
    rootpath = Path(sourceDir)
    paths = list(rootpath.glob("**/*"))
    print(targetDir)
    print(sourceDir)
    for path in paths:
        rel = str(path.relative_to(sourceDir))
        if(path.is_dir()):
            ensureFolder(targetDir+slash+rel)
        else:
            ensureFolder((targetDir+slash+rel).rpartition(slash)[0])
            
            if not (Path(targetDir+slash+rel).exists()):
                #in dst, src order
                makeHardLink(targetDir+slash+rel, str(path.absolute()))
                addToLinkManifest(targetDir+slash+rel, str(path.absolute()))
Example #21
0
    def html_zip(self, tmp_path, html_path):
        if not hasattr(zipfile, 'Path'):
            pytest.skip("Needs zipfile.Path")

        zip_path = tmp_path / 'qutebrowser.zip'
        with zipfile.ZipFile(zip_path, 'w') as zf:
            for path in html_path.rglob('*'):
                zf.write(path, path.relative_to(tmp_path))

            assert sorted(zf.namelist()) == [
                'qutebrowser/html/README',
                'qutebrowser/html/subdir/',
                'qutebrowser/html/subdir/subdir-file.html',
                'qutebrowser/html/test1.html',
                'qutebrowser/html/test2.html',
                'qutebrowser/html/unrelatedhtml',
            ]

        yield zipfile.Path(zip_path) / 'qutebrowser'
Example #22
0
def get_md_links(folder):
    MD_LINK_RE = r'\[.+?\]\((.+?)(#.+)?\)'

    idf_path = Path(os.getenv('IDF_PATH'))
    links = []

    for path in (idf_path / folder).rglob('*.md'):
        if any([path.relative_to(idf_path).match(exclude_doc) for exclude_doc in EXCLUDE_DOCS_LIST]):
            print('{} - excluded'.format(path))
            continue

        with path.open(encoding='utf8') as f:
            content = f.read()

        for url in re.findall(MD_LINK_RE, content):
            link = Link(path, url[0].lstrip())
            # Ignore "local" links
            if not link.url.startswith('#'):
                links.append(link)

    return links
Example #23
0
def get_job_files(uuid):
    """Get the list of output files. ``GET /task/{task_id}/files``

    :param uuid: task identifier
    :return: JSON response with list of files produced by the task.
    """
    job = documents.JobMetadata.find_one(mongo.slivkadb, uuid=uuid)
    if job is None:
        raise abort(404)
    if job.status == JobStatus.PENDING:
        return JsonResponse({'statuscode': 200, 'files': []}, status=200)

    service_conf = slivka.settings.get_service_configuration(job.service)
    work_dir = pathlib.Path(job.work_dir)
    output_files = [
        OutputFile(uuid='%s/%s' % (job.uuid, path.name),
                   title=path.name,
                   label=key,
                   location=path.relative_to(
                       slivka.settings.TASKS_DIR).as_posix(),
                   media_type=val.get('media-type'))
        for key, val in service_conf.execution_config['outputs'].items()
        for path in work_dir.glob(val['path'])
    ]

    return JsonResponse(
        {
            'statuscode':
            200,
            'files':
            [{
                'uuid': file.uuid,
                'title': file.title,
                'label': file.label,
                'mimetype': file.media_type,
                'URI': url_for('.get_file_metadata', uid=file.uuid),
                'contentURI': url_for('root.outputs', location=file.location)
            } for file in output_files]
        },
        status=200)
Example #24
0
def relative_to(path1: Path, path2: Path) -> Path:
    common_path = path1
    relative = ''
    path = path2.resolve()
    result = path

    while True:
        try:
            result = path.relative_to(common_path)
        except ValueError:
            _common_path = common_path.parent
            relative += '../'
        else:
            if not relative:
                relative = './'
            return Path(relative + str(result))

        if _common_path == common_path:
            break

        common_path = _common_path

    return result
Example #25
0
def refresh_last_update_metadata(logger, session, meta):
    """Update the ``last_update`` and ``status`` meta data value."""
    _ = logger
    # Get path in irods that corresponds to root and update the meta data there.
    path = pathlib.Path(meta["path"])
    root = pathlib.Path(meta["root"])
    target = pathlib.Path(meta["target"])
    logger.info("meta = %s" % meta)
    print("meta = %s" % meta)
    rel_root_path = path.relative_to(root)  # relative to root
    print("... %s" % rel_root_path)
    rel_folder_path = "/".join(
        str(rel_root_path).split("/")[1:])  # relative to run folder
    print("... %s" % rel_folder_path)
    root_target = str(target)[:-(len(str(rel_folder_path)) + 1)]
    print("... %s" % root_target)
    with cleanuping(session) as wrapped_session:
        coll = wrapped_session.collections.get(root_target)
        # Replace ``last_update`` and ``status`` meta data.
        coll.metadata[KEY_LAST_UPDATE] = iRODSMeta(
            KEY_LAST_UPDATE,
            datetime.datetime.now().isoformat(), "")
        coll.metadata[KEY_STATUS] = iRODSMeta(KEY_STATUS, "running", "")
Example #26
0
def shortPath(path):
    '''Returns a 'prettier' path: relative to the cwd rather than root'''
    try:
        return path.relative_to(Path.cwd())
    except Exception:
        return path
Example #27
0
 def resolvePath(self, path):
     '''
     Returns the equivalent path, but in the temporary copy of the nuclide
     root.
     '''
     return self._nuclideTranspilePath / path.relative_to(self.nuclidePath)
Example #28
0
def _group_out_path(group_num, path, out_dir, relative_to, fmt='%d'):
    path = pathlib.Path(path)
    out_dir = pathlib.Path(out_dir)
    group_name = fmt % (group_num, )
    return out_dir / group_name / path.relative_to(relative_to)
Example #29
0
 def reroot_path(self, path: Path) -> Path:
     "Submitters should translate paths to be relative to execution environment"
     return Path(self.remote_root) / path.relative_to(
         Config['nanopore']['path'])
Example #30
0
 def list_templates(self):
     return set(
         str(path.relative_to(base))
         for base, path in yield_all_sources(*self.search_paths)
         if not path.is_symlink() and path.is_file()
         and path.suffix == self.template_suffix)
Example #31
0
    def __init__(self, file_with_info):
        path, inputbase, args = file_with_info
        self.path = path
        self.args = args

        self.cuesheet = cuesheet(path)
        self.tags = flactags(path)
        self.picture = picture(path)
        self.jobs = []

        if args.verbose:
            print("%s: %d tracks in file." % (path, self.cuesheet.lasttrack))

        outdir = args.outputdir
        if args.keepdirs:
            relative = path.relative_to(inputbase) if inputbase \
                       else path
            # If the fatsafe flag is specified, every component that we add
            # to the output path must be fatsafe
            if args.fatsafe:
                for part in relative.parent.parts:
                    outdir = outdir / fatsafe(part)
            else:
                outdir = outdir.joinpath(relative.parent)
        if args.subdir:
            if args.fatsafe:
                outdir = outdir / fatsafe(path.stem)
            else:
                outdir = outdir / path.stem

        self.badtracks = []
        for track in range(1, self.cuesheet.lasttrack + 1):
            if track not in self.cuesheet.tracks:
                self.badtracks.append(
                    "track %d not present" % track)
                continue
            title = self.tags.get_tag('TITLE', track)
            artist = self.tags.get_tag('ARTIST', track)
            if not title:
                self.badtracks.append("track %d is missing TITLE tag" % track)
                continue
            if not artist:
                self.badtracks.append("track %d is missing ARTIST tag" % track)
                continue
            if args.verbose:
                print("%02d: %s by %s" % (
                    track, title, artist))
            outfilename = "%02d %s (%s)" % (
                track, title, artist)
            outfilename = outfilename.replace(os.sep, '')
            if args.fatsafe:
                outfilename = fatsafe(outfilename)
            if args.max_filename_length:
                # This is somewhat clunky, but hopefully won't come up
                # too often!  The limit on filename length is on the
                # number of bytes the filename encodes to, but we can
                # only remove a whole number of characters
                while len(outfilename.encode('utf-8')) > args.max_filename_length:
                    outfilename = outfilename[:-1]
            outfilename = outfilename + ".mp3"
            outputfile = outdir / outfilename
            try:
                output_mtime = outputfile.stat().st_mtime
            except:
                output_mtime = 0
            if args.update:
                input_mtime = self.path.stat().st_mtime
                if output_mtime > input_mtime:
                    if args.verbose:
                        print("  - skipping %s because it is newer" % outputfile)
                    continue
            self.jobs.append((self, track, outputfile))
Example #32
0
 def list_templates(self):
     return set(str(path.relative_to(base))
                for base, path in yield_all_sources(*self.search_paths)
                if not path.is_symlink() and path.is_file()
                and path.suffix == self.template_suffix)
    def cmd_make_dist(self) -> None:

        print("Downloading dependencies")

        def download_dependency(name: str) -> Path:
            url = self.project.get_conf("dependencies", f"{name}_url")
            filename = self.project.get_conf("dependencies", f"{name}_file")
            download_path = self.project.download_dir / filename
            if not download_path.exists():
                downloaded_size = self.download_file(
                    name,
                    lambda: self.http_client.request(HTTPRequest(url)),
                    lambda: download_path.open("wb"),
                )
                print(
                    f"{name} downloaded - net {self.format_bytes(downloaded_size)}"
                )
            else:
                print(f"{name} already downloaded")
            return download_path

        localize_me_file = download_dependency("localize_me")
        ccloader_file = download_dependency("ccloader")
        ultimate_ui_file = download_dependency("ultimate_ui")

        print("Collecting metadata")

        with (self.project.root_dir / "ccmod.json").open(
                "r", encoding="utf8") as manifest_file:
            manifest = json.load(manifest_file)
            mod_id: str = manifest["id"]
            mod_version: str = manifest["version"]

        mod_files: list[Path] = []
        for pattern in self.project.get_conf("distributables",
                                             "mod_files_patterns",
                                             self.project.get_conf_list):
            mod_files.extend(
                path.relative_to(self.project.root_dir)
                for path in self.project.root_dir.glob(pattern))
        # Note that paths here are sorted as lists of their components and not as
        # strings, so the path separators will not be taken into account when
        # sorting.
        mod_files.sort()

        commiter_time = int(
            subprocess.run(
                [
                    "git", "log", "--max-count=1", "--date=unix",
                    "--pretty=format:%cd"
                ],
                check=True,
                stdout=subprocess.PIPE,
                cwd=self.project.root_dir,
            ).stdout)

        print("Making packages")

        def archive_add_mod_files(archived_prefix: Path) -> None:
            print("Adding mod files")
            for file in mod_files:
                archive.add_real_file(
                    str(self.project.root_dir / file),
                    str(archived_prefix / file),
                    recursive=False,
                    mtime=commiter_time,
                )

        def archive_add_dependency(archived_prefix: Path,
                                   dependency_path: Path,
                                   strip_components: int) -> None:
            print(f"Adding files from {dependency_path.name}")
            with TarFile.gzopen(dependency_path) as dependency_archive:
                for file_info in dependency_archive:
                    archived_path = str(
                        Path(
                            archived_prefix,
                            *Path(file_info.name).parts[strip_components:],
                        ))
                    if file_info.isreg():
                        file_reader = dependency_archive.extractfile(file_info)
                        assert file_reader is not None
                        archive.add_file_entry(archived_path,
                                               file_reader.read(),
                                               mtime=file_info.mtime)
                    elif file_info.issym():
                        archive.add_symlink_entry(archived_path,
                                                  file_info.linkname,
                                                  mtime=file_info.mtime)
                    elif file_info.isdir():
                        # Directories are deliberately ignored because the previous setup
                        # didn't put them into resulting archives, and their entries are
                        # useless to us anyway.
                        pass
                    else:
                        # Other file types (character devices, block devices and named
                        # pipes) are UNIX-specific and can't be handled by Zip, but it's
                        # not like they are used in dependencies anyway. Correction: well,
                        # after checking APPNOTE.TXT section 4.5.7 I noticed that these
                        # exotic file types may be supported, but it's not like any modding
                        # projects use those.
                        pass

        all_archive_adapters: list[type[ArchiveAdapter]] = [
            TarGzArchiveAdapter, ZipArchiveAdapter
        ]
        for archive_cls in all_archive_adapters:

            archive_name = f"{mod_id}_v{mod_version}{archive_cls.DEFAULT_EXTENSION}"
            print(f"Making {archive_name}")
            with archive_cls.create(self.project.dist_archives_dir /
                                    archive_name) as archive:
                archive_add_mod_files(Path(mod_id))

            # TODO: Sort all files in quick install archives
            archive_name = f"{mod_id}_quick-install_v{mod_version}{archive_cls.DEFAULT_EXTENSION}"
            print(f"Making {archive_name}")
            with archive_cls.create(self.project.dist_archives_dir /
                                    archive_name) as archive:
                archive_add_mod_files(Path("assets", "mods", mod_id))
                archive_add_dependency(Path("assets", "mods", "Localize-me"),
                                       localize_me_file, 1)
                archive_add_dependency(Path("assets", "mods"),
                                       ultimate_ui_file, 0)
                archive_add_dependency(Path(), ccloader_file, 0)
Example #34
0
 def includes_iterator():
     for pattern in includes:
         for path in search_base.glob(pattern):
             # TODO(bhamiltoncx): Handle hidden files on Windows.
             if path.is_file() and (include_dotfiles or not path.name.startswith('.')):
                 yield path.relative_to(search_base)
Example #35
0
 def __LoadTemplateFiles(self):
     files = []
     for path in self.TemplateDir.glob('*/**/*.*'):
         files.append(str(path.relative_to(self.TemplateDir)))
     files.sort()
     return files