Exemple #1
0
def _set_environ_for_briefcase():
    package = sys.modules["__main__"].__package__
    if package and "Briefcase-Version" in metadata.metadata(package):
        if sys.platform == "linux":
            # Use bundled tesseract binary
            with resources.as_file(resources.files("normcap")) as normcap_path:
                tesseract_path = normcap_path.parent.parent / "bin" / "tesseract"
                os.environ["TESSERACT_CMD"] = str(tesseract_path.resolve())

        if sys.platform == "darwin":
            # Use bundled tesseract binary
            with resources.as_file(resources.files("normcap")) as normcap_path:
                tesseract_path = (
                    normcap_path.parent.parent / "app_packages" / "tesseract"
                )
                os.environ["TESSERACT_CMD"] = str(tesseract_path.resolve())

        elif sys.platform == "win32":
            with resources.as_file(
                resources.files("normcap.resources")
            ) as resource_path:
                # Add openssl shipped with briefcase package to path
                openssl_path = resource_path / "openssl"
                os.environ["PATH"] += os.pathsep + str(openssl_path.resolve())

                # Use bundled tesseract binary
                tesseract_path = resource_path / "tesseract" / "tesseract.exe"
                os.environ["TESSERACT_CMD"] = str(tesseract_path.resolve())
                os.environ["TESSERACT_VERSION"] = "5.0.0"
Exemple #2
0
 def test_entered_path_does_not_keep_open(self):
     # This is what certifi does on import to make its bundle
     # available for the process duration.
     c = resources.as_file(resources.files('ziptestdata') /
                           'binary.file').__enter__()
     self.zip_path.unlink()
     del c
Exemple #3
0
 def test_natural_path(self):
     # Guarantee the internal implementation detail that
     # file-system-backed resources do not get the tempdir
     # treatment.
     target = resources.files(self.data) / 'utf-8.file'
     with resources.as_file(target) as path:
         assert 'data' in str(path)
Exemple #4
0
    def sortldml(self, ldml, infile, alt='', depth=15):
        with pkg_data.joinpath(infile).open(encoding='utf-8') as f:
            indata = f.readlines()
        handler = palaso.collation.tailor.LDMLHandler()
        ldmlparser = xml.sax.make_parser()
        ldmlparser.setContentHandler(handler)
        ldmlparser.setFeature(xml.sax.handler.feature_namespaces, 1)
        # ldmlparser.setFeature(xml.sax.handler.feature_namespace_prefixes, 1)
        with resources.as_file(pkg_data / ldml) as path:
            ldmlparser.parse(path)
        collation = handler.collations[0]
        for c in handler.collations:
            if c.type == alt:
                collation = c
                break

        tailor = collation.asICU()
        outdata = palaso.collation.icu.sorted(tailor,
                                              indata,
                                              level=depth,
                                              preproc=collation.reorders)
        self.assertEqual(indata, outdata,
                         "preprocessed lists do not sort equally")

        collation.flattenOrders()
        errors = collation.testPrimaryMultiple()
        self.assertFalse(
            len(errors),
            "\n".join(f"Reset had multiple elements: {f!s}" for f in errors))
        tailor = collation.asICU()
        outdata = palaso.collation.icu.sorted(tailor, indata, level=depth)
        self.assertEqual(indata, outdata, "lists do not sort equally")
 def runtest(self, fname, keys, output):
     with resources.as_file(pkg_data / fname) as path:
         k = kmfl(path)
     res = k.run_items(keysyms_items(keys))
     self.assertEqual(res, output,
                      ("Keying difference for: {0}\n"
                       "expected:\t{1!r}\n"
                       "     got:\t{2!r}\n").format(keys, output, res))
    def runcoverage(self, fname, testfile):
        with resources.as_file(pkg_data / fname) as path:
            x = Coverage(path)
        with (pkg_data / testfile).open(encoding='utf-8') as f:
            indata = [l.rstrip() for l in f.readlines()]

        res = list(x.coverage_test(mode='all'))
        self.assertEqual(res, indata, "coverage results are not the same")
Exemple #7
0
 def test_reading(self):
     # Path should be readable.
     # Test also implicitly verifies the returned object is a pathlib.Path
     # instance.
     target = resources.files(self.data) / 'utf-8.file'
     with resources.as_file(target) as path:
         self.assertTrue(path.name.endswith("utf-8.file"), repr(path))
         # pathlib.Path.read_text() was introduced in Python 3.5.
         with path.open('r', encoding='utf-8') as file:
             text = file.read()
         self.assertEqual('Hello, UTF-8 world!\n', text)
Exemple #8
0
 def test_compilation(self):
     assert self.reference.endswith('.tec'), \
         'Not a complied reference file (.tec).'
     with resources.as_file(pkg_data / self.reference) as path:
         ref_map = Mapping(path)  # type: ignore
     source = (pkg_data / self.source).read_bytes()
     com_map = compile(source, self.compress)
     self.assertEqual(str(com_map), str(ref_map))
     self.assertEqual(
         com_map, ref_map, f'compiled {self.source!r}'
         f' ({"" if self.compress else "un"}compressed)'
         f' does not match reference {self.reference!r}')
     del com_map
Exemple #9
0
 def analyze_words(self, words, format=None, disambiguate=False):
     """
     Analyze a single word or a (possibly nested) list of words. Return either a list of
     analyses (all possible analyses of the word) or a nested list of lists
     of analyses with the same depth as the original list.
     If format is None, the analyses are Wordform objects.
     If format == 'xml', the analyses for each word are united into an XML string.
     If format == 'json', the analyses are JSON objects (dictionaries).
     Perform CG3 disambiguation if disambiguate == True and CG3 is installed.
     """
     if disambiguate:
         with as_file(files(self.dirName) / 'meadow_mari_disambiguation.cg3') as cgFile:
             cgFilePath = str(cgFile)
             return super().analyze_words(words, format=format, disambiguate=True,
                                          cgFile=cgFilePath)
     return super().analyze_words(words, format=format, disambiguate=False)
Exemple #10
0
    def test_file_resource(self):
        """ file resource """

        rm = ResourceManager()

        # Get the filename of the 'api.py' file.
        resource = files("envisage.resource") / "api.py"
        with as_file(resource) as path:

            # Open a file resource.
            f = rm.file(f"file://{path}")
            self.assertNotEqual(f, None)
            contents = f.read()
            f.close()

            # Open the api file via the file system.
            with open(path, "rb") as g:
                self.assertEqual(g.read(), contents)
Exemple #11
0
def load_scene(scene_files, keypath_overrides=None, paths=[]):
    default_scene_path_resource = as_file(files("gwpv") / "scene_overrides")
    default_scene_path = default_scene_path_resource.__enter__()
    if default_scene_path not in paths:
        paths.append(default_scene_path)
    composition = load_composition(scene_files, paths)
    logger.debug(f"Loading scene composition: {composition}")
    scene = None
    for scene_file in composition:
        logger.debug(f"Loading scene file: '{scene_file}'")
        found_scene_file = find_scene_file(scene_file, paths)
        logger.debug(f"Found scene file: '{found_scene_file}'")
        scene_from_file = yaml.safe_load(open(found_scene_file, "r"))
        if scene is None:
            if "Base" in scene_from_file:
                base = scene_from_file["Base"]
                del scene_from_file["Base"]
                logger.debug(f"Loading base: {base}")
                scene = load_scene(base,
                                   paths=paths +
                                   [os.path.dirname(found_scene_file)])
                logger.debug(f"Applying partial override to base {base}:"
                             f" {scene_from_file}")
                apply_partial_overrides(scene, scene_from_file)
                logger.debug(f"Overridden base {base}: {scene}")
            else:
                scene = scene_from_file
        else:
            if "Base" in scene_from_file:
                logger.debug(
                    f"Ignoring base {scene_from_file['Base']} in nested"
                    f" override '{scene_file}'.")
                del scene_from_file["Base"]
            logger.debug(f"Applying partial override: {scene_from_file}")
            apply_partial_overrides(scene, scene_from_file)
            logger.debug(f"Overriden scene: {scene}")
    if keypath_overrides is not None:
        logger.debug(f"Applying keypath overrides: {keypath_overrides}")
        apply_keypath_overrides(scene, keypath_overrides)
        logger.debug(f"Overriden scene: {scene}")
    apply_defaults(scene)
    default_scene_path_resource.__exit__(None, None, None)
    return scene
Exemple #12
0
 def __init__(self, mode='strict', verbose_grammar=False):
     """
     Initialize the analyzer by reading the grammar files.
     If mode=='strict' (default), load the data as is.
     If mode=='nodiacritics', load the data for (possibly) diacriticless texts.
     """
     super().__init__(verbose_grammar=verbose_grammar)
     self.mode = mode
     if mode not in ('strict', 'nodiacritics'):
         return
     self.dirName = 'uniparser_erzya.data_' + mode
     with as_file(files(self.dirName) / 'paradigms.txt') as self.paradigmFile,\
          as_file(files(self.dirName) / 'lexemes.txt') as self.lexFile,\
          as_file(files(self.dirName) / 'lex_rules.txt') as self.lexRulesFile,\
          as_file(files(self.dirName) / 'derivations.txt') as self.derivFile,\
          as_file(files(self.dirName) / 'stem_conversions.txt') as self.conversionFile,\
          as_file(files(self.dirName) / 'clitics.txt') as self.cliticFile,\
          as_file(files(self.dirName) / 'bad_analyses.txt') as self.delAnaFile:
         self.load_grammar()
Exemple #13
0
    def __new__(
        cls,
        package: Union[str, types.ModuleType],
        filename: str,
    ) -> '_Resource':

        # the type ignores workaround a known mypy issue
        # https://github.com/python/mypy/issues/1021
        try:
            ref = files(package) / filename
        except ModuleNotFoundError:
            rsrc = super().__new__(cls, f'{package}: {filename}')
            rsrc.module_not_found = True
            return rsrc

        file_manager = contextlib.ExitStack()
        rsrc = super().__new__(
            cls,
            file_manager.enter_context(as_file(ref)),
        )
        rsrc.file_manager = file_manager
        return rsrc
Exemple #14
0
 def _fixture_on_path(self, filename):
     pkg_file = resources.files(self.root).joinpath(filename)
     file = self.resources.enter_context(resources.as_file(pkg_file))
     assert file.name.startswith('example-'), file.name
     sys.path.insert(0, str(file))
     self.resources.callback(sys.path.pop, 0)
Exemple #15
0
 def test_remove_in_context_manager(self):
     # It is not an error if the file that was temporarily stashed on the
     # file system is removed inside the `with` stanza.
     target = resources.files(self.data) / 'utf-8.file'
     with resources.as_file(target) as path:
         path.unlink()
Exemple #16
0
 def execute(self, package, path):
     with resources.as_file(resources.files(package).joinpath(path)):
         pass
Exemple #17
0
 def test_as_file_does_not_keep_open(self):  # pragma: no cover
     c = resources.as_file(resources.files('ziptestdata') / 'binary.file')
     self.zip_path.unlink()
     del c
Exemple #18
0
def main(url, baseline, verbose=False):
    """Scan URL's cipher suites and compares it to the BASELINE.

    Cipher suites are retrieved with the testssl.sh shell script
    (https://github.com/drwetter/testssl.sh)

    Examples :
        sslcompare https://mytargetsite.com -b /path/to/baseline.yaml
        sslcompare 127.0.0.1:8080
    """
    strip_ansi = partial(re.compile(r"\x1b\[\d*m").sub, "")

    with open(baseline) as baseline_path:
        baseline_suites = yaml.safe_load(baseline_path)

    with resources.as_file(resources.files("sslcompare")) as sslcompare_path:
        with subprocess.Popen(
                shlex.split(
                    f"{sslcompare_path / 'testssl.sh/testssl.sh'} -E -U {url}"
                ),
                bufsize=1,
                universal_newlines=True,
                stdout=subprocess.PIPE,
                stderr=subprocess.STDOUT,
        ) as testssl:

            current_protocol = None
            interesting_lines = False

            for line in testssl.stdout:
                line = line.rstrip()

                if "Start" in line:
                    click.echo(line)

                elif "Done" in line:
                    click.echo(line)
                    break

                elif "Testing" in line:
                    if not (verbose or interesting_lines):
                        click.echo("")

                    click.echo(line)
                    interesting_lines = True

                elif "Cipher Suite Name (IANA/RFC)" in line:
                    click.echo(
                        f" {'Cipher Suite Name (IANA/RFC)':41} Evaluation")

                elif strip_ansi(line) in [
                        "SSLv2",
                        "SSLv3",
                        "TLS 1",
                        "TLS 1.1",
                        "TLS 1.2",
                        "TLS 1.3",
                ]:
                    current_protocol = strip_ansi(line)
                    click.echo(line)
                    continue

                elif current_protocol is not None and line != "":
                    if line == " -":
                        if current_protocol in [
                                "SSLv2",
                                "SSLv3",
                                "TLS 1",
                                "TLS 1.1",
                        ]:
                            click.echo(
                                f" {'NOT OFFERED':<41} " +
                                click.style("[RECOMMENDED]", fg="green"))
                        else:
                            click.echo(f" {'NOT OFFERED':<41}")

                    else:
                        cipher_suite = line.split()[-1]

                        try:
                            click.echo(f" {cipher_suite:<41} " + click.style(
                                **baseline_suites[current_protocol]
                                [cipher_suite]))

                        except KeyError:
                            click.echo(f" {cipher_suite: <41} " +
                                       click.style("[DEPRECATED ]", fg="red"))

                elif interesting_lines or verbose:
                    click.echo(line)
Exemple #19
0
    def _generate_files_in_dir(self, destination_dir: str) -> None:
        if self.verbose >= 3:
            print(f'  Constructing files into temp dir: {destination_dir}')

        # generate syntax file
        with pkg_resources.path(resources, 'sublime-syntax.yaml') as fp:
            with open(fp, 'r') as syntax_file:
                try:
                    syntax_dict = yaml.safe_load(syntax_file)
                except yaml.YAMLError as exc:
                    sys.exit(f'ERROR: {exc}')

        # handle instructions
        syntax_dict['file_extensions'] = [self.code_extension]
        syntax_dict['contexts']['instructions'][0][
            'match'] = self._replace_token_with_regex_list(
                syntax_dict['contexts']['instructions'][0]['match'],
                '##INSTRUCTIONS##', self.model.instruction_mnemonics)

        # handle registers
        if len(self.model.registers) > 0:
            # update the registers syntax
            syntax_dict['contexts']['registers'][0][
                'match'] = self._replace_token_with_regex_list(
                    syntax_dict['contexts']['registers'][0]['match'],
                    '##REGISTERS##', self.model.registers)
        else:
            # remove the registers syntax
            del syntax_dict['contexts']['registers']

        # handle compiler predefined labels
        predefined_labels = self.model.predefined_labels
        if len(predefined_labels) > 0:
            # update the registers syntax
            syntax_dict['contexts']['compiler_labels'][0][
                'match'] = self._replace_token_with_regex_list(
                    syntax_dict['contexts']['compiler_labels'][0]['match'],
                    '##COMPILERCONSTANTS##',
                    predefined_labels,
                )
        else:
            # remove the registers syntax
            del syntax_dict['contexts']['compiler_labels']

        # compiler directives
        directives_regex = '|'.join(
            ['\\.' + d for d in COMPILER_DIRECTIVES_SET])
        directives_str = syntax_dict['contexts']['compiler_directives'][0][
            'match']
        syntax_dict['contexts']['compiler_directives'][0]['match'] = \
            directives_str.replace('##DIRECTIVES##', directives_regex)

        # data types
        datatypes_regex = '|'.join(
            ['\\.' + d for d in BYTECODE_DIRECTIVES_SET])
        datatypes_str = syntax_dict['contexts']['data_types_directives'][0][
            'match']
        syntax_dict['contexts']['data_types_directives'][0][
            'match'] = datatypes_str.replace('##DATATYPES##', datatypes_regex)

        # preprocessor directives
        preprocessor_regex = '|'.join(PREPROCESSOR_DIRECTIVES_SET)
        updated = False
        for rule in syntax_dict['contexts']['preprocessor_directives'][0][
                'push']:
            if 'match' in rule and '##PREPROCESSOR##' in rule['match']:
                preprocesspr_str = rule['match']
                rule['match'] = preprocesspr_str.replace(
                    '##PREPROCESSOR##', preprocessor_regex)
                updated = True
                break
        if not updated:
            sys.exit(
                'ERROR - INTERNAL - did not find correct preprocessor rule for Sublime systax file.'
            )

        # save syntax file
        syntax_fp = os.path.join(destination_dir,
                                 self.language_name + '.sublime-syntax')
        with open(syntax_fp, 'w', encoding='utf-8') as f:
            yaml.dump(syntax_dict, f)
        # now reinsert the YAML prefix. This is required due to an odditity in Sublime's package loading.
        # I don't know a better way to do this.
        with open(syntax_fp, "r") as f:
            file_txt = f.read()
        updated_file_txt = '%YAML 1.2\n---\n' + file_txt
        with open(syntax_fp, "w") as f:
            f.write(updated_file_txt)
            if self.verbose > 1:
                print(f'  generated {os.path.basename(syntax_fp)}')

        # copy color files over
        color_scheme_fp = os.path.join(
            destination_dir, self.language_name + '.sublime-color-scheme')
        with pkg_resources.path(resources, 'sublime-color-scheme.json') as fp:
            shutil.copy(str(fp), color_scheme_fp)
            if self.verbose > 1:
                print(f'  generated {os.path.basename(color_scheme_fp)}')

        # copy keymap files over
        keymap_fp = os.path.join(destination_dir, 'Default.sublime-keymap')
        with pkg_resources.path(resources, 'sublime-keymap.json') as fp:
            shutil.copy(str(fp), keymap_fp)
            if self.verbose > 1:
                print(f'  generated {os.path.basename(keymap_fp)}')

        # copy all snippet, macro, and preference files
        for filename in pkg_resources.contents(resources):
            if filename.endswith('.sublime-snippet.xml'):
                file_obj = pkg_resources.files(resources).joinpath(filename)
                with pkg_resources.as_file(file_obj) as fp:
                    snippet_name = filename.partition('.')[0]
                    snippet_fp = os.path.join(
                        destination_dir, self.language_name + '__' +
                        snippet_name + '.sublime-snippet')
                    shutil.copy(fp, snippet_fp)
                    if self.verbose > 1:
                        print(f'  generated {os.path.basename(snippet_fp)}')
            elif filename.endswith('.sublime-macro.json'):
                file_obj = pkg_resources.files(resources).joinpath(filename)
                with pkg_resources.as_file(file_obj) as fp:
                    macro_filename = filename.partition(
                        '.')[0] + '.sublime-macro'
                    macro_fp = os.path.join(destination_dir, macro_filename)
                    shutil.copy(fp, macro_fp)
                    if self.verbose > 1:
                        print(f'  generated {os.path.basename(macro_fp)}')
            elif filename.endswith('.tmPreferences.xml'):
                file_obj = pkg_resources.files(resources).joinpath(filename)
                with pkg_resources.as_file(file_obj) as fp:
                    pref_filename = filename.partition(
                        '.')[0] + '.tmPreferences'
                    pref_fp = os.path.join(destination_dir, pref_filename)
                    shutil.copy(fp, pref_fp)
                    if self.verbose > 1:
                        print(f'  generated {os.path.basename(pref_fp)}')
Exemple #20
0
# to load plugins, but then we have to get the client proxy somehow. Here's an
# attempt using ParaView-internal functions:
# WaveformDataReader = pv._create_func("WaveformDataReader", pvserver.sources)
# WaveformToVolume = pv._create_func("WaveformToVolume", pvserver.filters)
logger.info("Loading ParaView plugins...")
plugins_dir = files("gwpv.paraview_plugins")
load_plugins = [
    "WaveformDataReader.py",
    "WaveformToVolume.py",
    "TrajectoryDataReader.py",
    "FollowTrajectory.py",
    "TrajectoryTail.py",
    # 'SwshGrid.py'
]
for plugin in load_plugins:
    with as_file(plugins_dir / plugin) as plugin_path:
        pv.LoadPlugin(str(plugin_path), remote=False, ns=globals())
logger.info("ParaView plugins loaded.")


def render_frames(
    scene,
    frames_dir=None,
    frame_window=None,
    render_missing_frames=False,
    save_state_to_file=None,
    no_render=False,
    show_preview=False,
    show_progress=False,
    job_id=None,
):
Exemple #21
0
try:
    import importlib.resources as importlib_resources
    from importlib.resources import files as importlib_resources_files
except ImportError:
    # Try backported to PY<37 `importlib_resources`.
    import importlib_resources as importlib_resources
    from importlib_resources import files as importlib_resources_files

from contextlib import ExitStack
import atexit

# We use an exit stack and register it at interpreter exit to cleanup anything needed
file_manager = ExitStack()
atexit.register(file_manager.close)
ref = importlib_resources_files('pygaps.data') / 'default.db'
DATABASE = file_manager.enter_context(importlib_resources.as_file(ref))

# Lists of pygaps data
MATERIAL_LIST = []
ADSORBATE_LIST = []


def load_data():
    """Will proceed with filling the data store."""

    from ..parsing.sqlite import adsorbates_from_db
    from ..parsing.sqlite import materials_from_db

    global MATERIAL_LIST
    global ADSORBATE_LIST
Exemple #22
0
    purge_old_versions,
    set_serial,
)
from .models import EntryPointGroup, OrphanWheel, Wheel, db
from .process import process_queue
from .pypi_api import PyPIAPI
from .scan import scan_changelog, scan_pypi

if sys.version_info >= (3, 9):
    from importlib.resources import as_file, files
else:
    from importlib_resources import as_file, files

log = logging.getLogger(__name__)

with as_file(files("wheelodex") / "data" / "entry_points.ini") as ep_path:
    # Violating the context manager like this means that wheelodex can't be run
    # from within a zipfile.
    ep_path = str(ep_path)


# FlaskGroup causes all commands to be run inside an application context,
# thereby letting `db` do database operations.  This does require that
# `ctx.obj` be left untouched, though.
@click.group(cls=FlaskGroup, create_app=create_app)
@click.option(
    "-l",
    "--log-level",
    type=click.Choice(["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"]),
    default="INFO",
    show_default=True,