def _load_all_graphs(self, progress, trip_prog): import transaction from rdflib import plugin from rdflib.parser import Parser, create_input_source idx_fname = pth_join(self.powdir, 'graphs', 'index') triples_read = 0 if exists(idx_fname): dest = self._conf()['rdf.graph'] with open(idx_fname) as index_file: cnt = 0 for l in index_file: cnt += 1 index_file.seek(0) progress.total = cnt with transaction.manager: for l in index_file: fname, ctx = l.strip().split(' ') parser = plugin.get('nt', Parser)() with open(pth_join(self.powdir, 'graphs', fname), 'rb') as f, \ _BatchAddGraph(dest.get_context(ctx), batchsize=4000) as g: parser.parse(create_input_source(f), g) progress.update(1) triples_read += g.count trip_prog.update(g.count) progress.write('Finalizing writes to database...') progress.write('Loaded {:,} triples'.format(triples_read))
def commit(self, message): """ Write the graph to the local repository Parameters ---------- message : str commit message """ from rdflib import plugin from rdflib.serializer import Serializer g = self._conf()['rdf.graph'] repo = self.repository_provider repo.base = self.powdir graphs_base = pth_join(self.powdir, 'graphs') if exists(graphs_base): repo.remove(['graphs'], recursive=True) shutil.rmtree(graphs_base) mkdir(graphs_base) files = [] ctx_data = [] for x in g.contexts(): ident = x.identifier hs = hashlib.sha256(ident.encode()).hexdigest() fname = pth_join(graphs_base, hs + '.nt') i = 1 while exists(fname): fname = pth_join(graphs_base, hs + '-' + i + '.nt') i += 1 files.append(fname) serializer = plugin.get('nt', Serializer)(sorted(x)) with open(fname, 'wb') as gfile: serializer.serialize(gfile) ctx_data.append((relpath(fname, graphs_base), ident)) index_fname = pth_join(graphs_base, 'index') with open(index_fname, 'w') as index_file: for l in sorted(ctx_data): print(*l, file=index_file) files.append(index_fname) if repo.is_dirty: repo.reset() repo.add([relpath(f, self.powdir) for f in files] + [relpath(self.config_file, self.powdir), 'graphs']) repo.commit(message)
def __call__(self, data_source): ''' Load the data source. Calls `load` Parameters ---------- data_source : .DataSource The data source to load files for Returns ------- str A path to the loaded resource Raises ------ LoadFailed If `load`: * throws an exception * doesn't return anything * returns a path that isn't under `base_directory` * returns a path that doesn't exist ''' # Call str(·) to give a more uniform interface to the sub-class ``load`` # Conventionally, types that tag or "enhance" a string have the base string representation as their __str__ try: s = self.load(data_source) except LoadFailed: raise LoadFailed(data_source, self, 'Loader erred') if not s: raise LoadFailed(data_source, self, 'Loader returned an empty string') # N.B.: This logic is NOT intended as a security measure against directory traversal: it is only to make the # interface both flexible and unambiguous for implementers # Relative paths are allowed if not isabs(s): s = pth_join(self.base_directory, s) # Make sure the loader isn't doing some nonsense with symlinks or non-portable paths rpath = realpath(s) if not rpath.startswith(self.base_directory): msg = 'Loader returned a file path, "{}",' \ ' outside of the base directory, "{}"'.format(rpath, self.base_directory) raise LoadFailed(data_source, self, msg) if not exists(rpath): msg = 'Loader returned a non-existant file {}'.format(rpath) raise LoadFailed(data_source, self, msg) if not isdir(rpath): msg = 'Loader did not return a directory, but returned {}'.format( rpath) raise LoadFailed(data_source, self, msg) return rpath
def cm(): rel_fname = source.csv_file_name.one() fname = pth_join(source.basedir(), rel_fname) with open(fname) as f: reader = csv.reader(f, **params) if skipheader: next(reader) yield reader
def _conf(self): from PyOpenWorm.data import Data dat = getattr(self, '_dat', None) if not dat or self._dat_file != self.config_file: dat = Data.open(self.config_file) stored_conf = dat.get('rdf.store_conf', None) if stored_conf and not isabs(stored_conf): dat['rdf.store_conf'] = abspath(pth_join(self.basedir, stored_conf)) dat.init_database() self._dat = dat self._dat_file = self.config_file return dat
def _conf(self): from PyOpenWorm.data import Data dat = getattr(self, '_dat', None) if not dat or self._dat_file != self.config_file: dat = Data.open(self.config_file) stored_conf = dat.get('rdf.store_conf', None) if stored_conf and not isabs(stored_conf): dat['rdf.store_conf'] = abspath( pth_join(self.basedir, stored_conf)) dat.init_database() self._dat = dat self._dat_file = self.config_file return dat
def __call__(self, ident): ''' Load the data source Parameters ---------- ident : str The identifier of the data source to load data for Returns ------- A path to the loaded resource Raises ------ LoadFailed ''' # Call str(·) to give a more uniform interface to the sub-class ``load`` # Conventionally, types that tag or "enhance" a string have the base string representation as their __str__ s = self.load(str(ident)) if not s: raise LoadFailed(ident, self, 'Loader returned an empty string') # N.B.: This logic is NOT intended as a security measure against directory traversal: it is only to make the # interface both flexible and unambiguous for implementers # Relative paths are allowed if not isabs(s): s = pth_join(self._basedir, s) # Make sure the loader isn't doing some nonsense with symlinks or non-portable paths rpath = realpath(s) if not rpath.startswith(self._basedir): msg = 'Loader returned a file path outside of the base directory, {}'.format( self._basedir) raise LoadFailed(ident, self, msg) if not exists(rpath): msg = 'Loader returned a non-existant file {}'.format(rpath) raise LoadFailed(ident, self, msg) if not isdir(rpath): msg = 'Loader did not return a directory, but returned {}'.format( rpath) raise LoadFailed(ident, self, msg) return rpath
def _default_config(self): return pth_join(self._package_path(), 'default.conf')
from os.path import join as pth_join import subprocess salib_cli = "./src/SALib/scripts/salib.py" ishigami_fp = "./src/SALib/test_functions/params/Ishigami.txt" test_data = pth_join('tests', 'data', 'test.txt') def test_cli_entry(): cmd = 'python {cli} -h'.format(cli=salib_cli).split() result = subprocess.check_output(cmd) assert 'Errno' not in str(result), "Error occurred when trying to use CLI!" def test_ff(): cmd = "python {cli} sample ff -p {fn} -o {test_data} -n 100".format( cli=salib_cli, fn=ishigami_fp, test_data=test_data).split() result = subprocess.check_output(cmd) assert len(result) == 0, "Error occurred!" def test_fast(): cmd = "python {cli} sample fast_sampler -p {fn} -o {test_data} -n 100".format( cli=salib_cli, fn=ishigami_fp, test_data=test_data).split() result = subprocess.check_output(cmd) assert len(result) == 0, "Error occurred!" def test_finite_diff(): cmd = "python {cli} sample finite_diff -p {fn} -o {test_data} -n 100".format( cli=salib_cli, fn=ishigami_fp, test_data=test_data).split()
def store_name(self): ''' The file name of the database store ''' if isabs(self._store_name): return self._store_name return pth_join(self.powdir, self._store_name)
def config_file(self): ''' The config file name ''' if isabs(self._config_file): return self._config_file return pth_join(self.powdir, self._config_file)
def powdir(self): if isabs(self._powdir): return self._powdir return pth_join(self.basedir, self._powdir)
# -*- coding: utf-8 -*- from os import walk from os.path import sep, join as pth_join from setuptools import setup, Extension from setuptools.command.build_ext import build_ext from platform import system root_path = 'pyslvs' bfgs_path = pth_join(root_path, 'bfgs_solver') tinycadlib_path = pth_join(root_path, 'tinycadlib') macros = [('_USE_MATH_DEFINES', None)] compile_args_msvc = ['/O2', '/std:c++17'] compile_args = ['-Wno-cpp', '-std=c++17'] link_args = [] link_args_msvc = [] link_args_static = [ '-static-libgcc', '-static-libstdc++', '-Wl,-Bstatic,--whole-archive', '-lwinpthread', '-Wl,--no-whole-archive', ] if system() == 'Windows': # Disable format warning compile_args.append('-Wno-format') # Disable NumPy warning macros.append(('NPY_NO_DEPRECATED_API', 'NPY_1_7_API_VERSION')) # Special modules mixed with C++ (language must be unified) ext_modules = [ Extension(root_path.replace(sep, '.') + '.bfgs', [
import subprocess from SALib.test_functions import Ishigami import numpy as np import pandas as pd from io import StringIO import re import os from os.path import join as pth_join salib_cli = "./src/SALib/scripts/salib.py" ishigami_fp = "./src/SALib/test_functions/params/Ishigami.txt" test_dir = 'tests/data' input_file = 'model_input.txt' output_file = 'model_output.txt' input_path = pth_join(test_dir, input_file) output_path = pth_join(test_dir, output_file) if sys.version_info[0] == 2: subprocess.run = subprocess.call def teardown_function(func): # Removes the test file if it was created. files = os.listdir(test_dir) if input_file in files: os.remove(input_path) if output_file in files: os.remove(output_path)
from os.path import join as pth_join import subprocess salib_cli = "./src/SALib/scripts/salib.py" ishigami_fp = "./src/SALib/test_functions/params/Ishigami.txt" test_data = pth_join('tests', 'data', 'test.txt') def test_cli_entry(): cmd = 'python {cli} -h'.format(cli=salib_cli).split() result = subprocess.check_output(cmd) assert 'Errno' not in str(result), "Error occurred when trying to use CLI!" def test_ff(): cmd = "python {cli} sample ff -p {fn} -o {test_data} -n 100".format( cli=salib_cli, fn=ishigami_fp, test_data=test_data).split() result = subprocess.check_output(cmd) assert len(result) == 0, "Error occurred!" def test_fast(): cmd = "python {cli} sample fast_sampler -p {fn} -o {test_data} -n 100".format( cli=salib_cli, fn=ishigami_fp, test_data=test_data).split() result = subprocess.check_output(cmd) assert len(result) == 0, "Error occurred!"
def read(path: str): with open(path, 'r', encoding='utf-8') as f: return f.read() def find_version(path: str): m = search(r"^__version__ = ['\"]([^'\"]*)['\"]", read(path), MULTILINE) if m: return m.group(1) raise RuntimeError("Unable to find version string.") src_path = 'pyslvs' graph_path = pth_join(src_path, 'graph') bfgs_path = pth_join(src_path, 'bfgs_solver') tinycadlib_path = pth_join(src_path, 'tinycadlib') metaheuristics_path = pth_join(src_path, 'metaheuristics') macros = [('_USE_MATH_DEFINES', None)] compile_args_msvc = ['/O2', '/std:c++17'] # MSVC disabled OpenMP compile_args = ['-Wno-cpp', '-std=c++17', '-fopenmp'] link_args = ['-fopenmp'] link_args_msvc = [] link_args_static = [ '-static-libgcc', '-static-libstdc++', '-Wl,-Bstatic,--whole-archive', '-lwinpthread', '-lgomp', '-Wl,--no-whole-archive',
import os import logging import uuid from os.path import join as pth_join LOGGER = logging.getLogger(__name__) LOGGER.addHandler(logging.NullHandler()) BASE_SCHEMA_URL = 'http://schema.openworm.org/2020/07' BASE_DATA_URL = 'http://data.openworm.org' # The c extensions are incompatible with our code... os.environ['WRAPT_DISABLE_EXTENSIONS'] = '1' OWMETA_PROFILE_DIR = os.environ.get('OWMETA_PROFILE_DIR', pth_join('~', '.owmeta')) ''' Base directory in the user's profile for owmeta (e.g., shared configuration, bundle cache) ''' from .configure import Configurable from .context import Context, ClassContext __all__ = [ "get_data", "disconnect", "connect", "Configurable", ] DEF_CTX = Context()
from setuptools import setup, find_packages def read(path: str): with open(path, 'r') as f: return f.read() def find_version(path: str): m = search(r"^__version__ = ['\"]([^'\"]*)['\"]", read(path), MULTILINE) if m: return m.group(1) raise RuntimeError("Unable to find version string.") version = find_version(pth_join('pyslvs', 'pyslvs', '__init__.py')) setup( name='pyslvs_ui', version=version, author=__author__, author_email=__email__, license=__license__, description= "An open source planar linkage mechanism simulation and mechanical synthesis system.", long_description=read("README.md"), long_description_content_type='text/markdown', url="https://github.com/KmolYuan/Pyslvs-UI", packages=find_packages(), package_data={'pyslvs_ui': ['py.typed']}, entry_points={'console_scripts': ['pyslvs=pyslvs_ui.__main__:main']}, zip_safe=False,
def read(*parts): with codecs.open(pth_join(here, *parts), 'r') as f: return f.read()
import os from os.path import join as pth_join import subprocess salib_cli = "./src/SALib/scripts/salib.py" ishigami_fp = "./src/SALib/test_functions/params/Ishigami.txt" test_file = 'test.txt' test_data = pth_join('./tests', 'data', test_file) def teardown_function(func): # Removes the test file if it was created. files = os.listdir('./tests/data') if test_file in files: os.remove(test_data) def test_cli_entry(): cmd = 'python {cli} -h'.format(cli=salib_cli).split() result = subprocess.check_output(cmd) assert 'Errno' not in str(result), "Error occurred when trying to use CLI!" def test_ff(): cmd = "python {cli} sample ff -p {fn} -o {test_data} -n 100".format( cli=salib_cli, fn=ishigami_fp, test_data=test_data).split() result = subprocess.check_output(cmd) assert len(result) == 0, "Error occurred!"
def read(path: str): with open(path, 'r') as f: return f.read() def find_version(path: str): m = search(r"^__version__ = ['\"]([^'\"]*)['\"]", read(path), MULTILINE) if m: return m.group(1) raise RuntimeError("Unable to find version string.") setup( name='apimd', version=find_version(pth_join('apimd', '__init__.py')), author=__author__, author_email=__email__, license=__license__, description="A Python API compiler for universal Markdown syntax.", long_description=read("README.md"), long_description_content_type='text/markdown', url="https://github.com/KmolYuan/apimd", packages=find_packages(), package_data={'apimd': ['py.typed']}, entry_points={'console_scripts': ['apimd=apimd.__main__:main']}, zip_safe=False, python_requires=">=3.7", options={'bdist_wheel': {'python_tag': 'cp37.cp38'}}, classifiers=[ "Programming Language :: Python :: 3.7",