import os
from ctypes import *

os.add_dll_directory('D:\django projects\hy\znmx_project-master\Debug')
dll = cdll.LoadLibrary('api.dll')

ret = dll.setAlarm(2, 0)
print(ret)
Exemplo n.º 2
0
#===============================================================================

import platform
if "Windows" in platform.system():
    import os
    import sys
    import site
    current_path = os.path.dirname(__file__)
    path_to_env = site.getsitepackages()[0]
    path_to_libs = os.path.join(path_to_env, "Library", "bin")
    path_to_oneapi_backend = os.path.join(current_path, "oneapi")
    if sys.version_info.minor >= 8:
        if 'DALROOT' in os.environ:
            dal_root_redist = os.path.join(os.environ['DALROOT'], "redist", "intel64")
            if os.path.exists(dal_root_redist):
                os.add_dll_directory(dal_root_redist)
                os.environ['PATH'] = dal_root_redist + os.pathsep + os.environ['PATH']
        os.add_dll_directory(path_to_libs)
        os.add_dll_directory(path_to_oneapi_backend)
    os.environ['PATH'] = path_to_libs + os.pathsep + os.environ['PATH']

try:
    from daal4py._daal4py import *
    from daal4py._daal4py import (
        _get__version__,
        _get__daal_link_version__,
        _get__daal_run_version__,
        __has_dist__)
except ImportError as e:
    s = str(e)
    if 'libfabric' in s:
Exemplo n.º 3
0
__version__ = '0.3.5'

# Windows support
import os
import sys

extra_dll_dir = os.path.join(os.path.dirname(__file__), '.libs')
if sys.platform == 'win32' and os.path.isdir(extra_dll_dir):
    if sys.version_info >= (3, 8):
        os.add_dll_directory(extra_dll_dir)
    else:
        os.environ.setdefault('PATH', '')
        os.environ['PATH'] += os.pathsep + extra_dll_dir

from wfnsympy.WFNSYMLIB import mainlib, overlap_mat
from wfnsympy.QSYMLIB import denslib, center_charge, build_density
from wfnsympy.errors import MultiplicityError, ChangedAxisWarning, LabelNotFound
from wfnsympy.optimize import minimize_axis, minimize_axis2, rotation_xy, rotation_axis
from itertools import combinations
import numpy as np

_bohr_to_angstrom = 0.529177249

# define assignation of shell type to number and number of functions by shell
shell_type_list = {
    '-1': ['sp', 4],
    '0': ['s', 1],
    '1': ['p', 3],
    '2': ['d', 6],
    '3': ['f', 10],
    '-2': ['d_', 5],  # pure
"""SnapshotRequestTemplateExample.py"""
from __future__ import print_function
from __future__ import absolute_import

import datetime
from optparse import OptionParser, OptionValueError

import os
import platform as plat
import sys
if sys.version_info >= (3, 8) and plat.system().lower() == "windows":
    # pylint: disable=no-member
    with os.add_dll_directory(os.getenv('BLPAPI_LIBDIR')):
        import blpapi
else:
    import blpapi


def authOptionCallback(option, opt, value, parser):
    """Parse authorization options from user input"""

    vals = value.split('=', 1)

    if value == "user":
        authUser = blpapi.AuthUser.createWithLogonName()
        authOptions = blpapi.AuthOptions.createWithUser(authUser)
    elif value == "none":
        authOptions = None
    elif vals[0] == "app" and len(vals) == 2:
        appName = vals[1]
        authOptions = blpapi.AuthOptions.createWithApp(appName)
Exemplo n.º 5
0
'''
Created on 31.08.2020

@author: Georg Maubach
'''

# Ab Python 3.8 müssen dlls als vertrauenswürdig markiert werden.
# Deshalb ab 3.8.x von den unteren beiden Anweisungen den
# Kommentar entfernen und den Pfad zum Projektverzeichnis
# korrekt angeben
import os
os.add_dll_directory("D:\Selfcoding\WillmsBis040920\scara64anaconda")

import ctypes
Scara = ctypes.cdll.scara

from my_scara_funcs import dreheRechts
# alle Funktionen einbinden mit "*"

Scara.starte(b"UndRechtsHerum")

Scara.gehe()

dreheRechts()
Scara.gehe()
Scara.gehe()
dreheRechts()

Scara.gehe()

Scara.beende()
Exemplo n.º 6
0
#Aim of this project is to learn communicating with APIs by creating an app that has access to my liked youtube videos to be played in tkinter GUI randomly.
#Sadly, as this was first project I have done from scratch, I didn't think about how many URLs can youtube api return. Turns out, it's only 50 per page.
import pafy
import os

os.add_dll_directory(
    r'C:\Program Files\VideoLAN\VLC')  #path to VLC installation folder
import yt
import vlc
import tkinter as tk


def play():
    url = yt.get_url()
    video = pafy.new(url)
    best = video.getbest()
    playurl = best.url
    Media = Instance.media_new(playurl)
    Media.get_mrl()
    player.set_media(Media)
    player.play()


def stop():
    player.stop()


Instance = vlc.Instance()
player = Instance.media_player_new()
root = tk.Tk()
tk.Button(root, text="play", command=play).pack()
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

# Starting from Python 3.8 DLL search policy has changed.
# We need to add path to CUDA DLLs explicitly.
import sys
import os

if os.name == 'nt':
    # Add CUDA_PATH env variable
    cuda_path = os.environ["CUDA_PATH"]
    if cuda_path:
        os.add_dll_directory(cuda_path)
    else:
        print("CUDA_PATH environment variable is not set.", file=sys.stderr)
        print("Can't set CUDA DLLs search path.", file=sys.stderr)
        exit(1)

    # Add PATH as well for minor CUDA releases
    sys_path = os.environ["PATH"]
    if sys_path:
        paths = sys_path.split(';')
        for path in paths:
            if os.path.isdir(path):
                os.add_dll_directory(path)
    else:
        print("PATH environment variable is not set.", file=sys.stderr)
        exit(1)
Exemplo n.º 8
0
import os, openpyxl, time, pafy, vimeo_dl, validators, datetime, moviepy.editor, threading

### If you added vlc folder to your PATH just ncomment this
try:
    os.add_dll_directory(
        openpyxl.load_workbook('PlayList.xlsx',
                               data_only=True)['config'].cell(1, 2).value)
except:
    print('no config sheet found or no path to vlc found in excel cell (1,2)')
    exit()
###

import vlc

YOUTUBE_PREFIX = 'https://www.youtube.com/'
VIMEO_PREFIX = 'https://vimeo.com/'
PATH_TO_EXCEL_FILE = 'PlayList.xlsx'
VLC_INSTANCE = vlc.Instance(
    '--avcodec-hw=none --video-on-top --no-directx-hw-yuv')
MEDIA_PLAYER = VLC_INSTANCE.media_player_new()
DEFAULT_VLC_PATH = r'C:\Program Files (x86)\VideoLAN\VLC'
DEFAULT_STATIC_PICTURE = 'https://i.ytimg.com/vi/2vv9IxwFVJY/maxresdefault.jpg'
CONFIG_SHEET_NAME = 'config'
WAITING_TIME_FOR_PLAYER_OPENING = 1.5
DATE_TIME_COLUMN = 1
TITLE_COLUMN = 2
LOCATION_COLUMN = 3
CONFIG_TITLE_COLUMN = 1
CONFIG_VALUE_COLUMN = 2
FIVE_DAYS = 5
Exemplo n.º 9
0
#!/usr/bin/env python
import ctypes
import os
from ctypes import *

print("testing")
dllname = 'kernel32.dll'
dll = windll.LoadLibrary(dllname)
print("load dll", dll)
currpath = os.path.dirname(os.path.realpath(__file__))
print(currpath)
os.add_dll_directory(currpath)
dllname = "./native_lib.dll"
dll = windll.LoadLibrary(dllname)
print("load dll", dll)

a = dll.testprint(b"abcdefg\n")
print(type(a))
print(a)
ret = c_int(0)
param = c_int(199)

pt = ctypes.c_void_p

dll.create.restype = ctypes.c_void_p
pt = dll.create(param, pointer(ret))
print("create ret {},pt {}:type{}".format(ret.value, pt, type(pt)))
dll.call.argtypes = [ctypes.c_void_p, ctypes.c_char_p]
ret = dll.call(pt, b"this is a test ")
print("call: ", ret)
dll.release.argtypes = [ctypes.c_void_p]
Exemplo n.º 10
0
 def _add_dll_dir(self, path):
     if path not in self._cached_dll_dirs:
         self._cached_dll_dirs.append(path)
         self._cached_added_dll_dirs.append(os.add_dll_directory(path))
Exemplo n.º 11
0
    of audio data and output the data to the soundcard yourself.
    FluidSynth works on all major platforms, so pyFluidSynth should also.

================================================================================
"""

from ctypes import *
from ctypes.util import find_library
import os
# A short circuited or expression to find the FluidSynth library
# (mostly needed for Windows distributions of libfluidsynth supplied with QSynth)

# DLL search method changed in Python 3.8
# https://docs.python.org/3/library/os.html#os.add_dll_directory
if hasattr(os, 'add_dll_directory'):
    os.add_dll_directory(os.getcwd())

lib = find_library('fluidsynth') or \
    find_library('libfluidsynth') or \
    find_library('libfluidsynth-2') or \
    find_library('libfluidsynth-1')
    
if lib is None:
    raise ImportError("Couldn't find the FluidSynth library.")

# Dynamically link the FluidSynth library
# Architecture (32-/64-bit) must match your Python version
_fl = CDLL(lib)

# Helper function for declaring function prototypes
def cfunc(name, result, *args):
Exemplo n.º 12
0
            "CUDNN_HOME"] if "CUDNN_HOME" in os.environ else os.environ[
                cuda_env_variable]
        cudnn_bin_dir = os.path.join(cudnn_path, "bin")

        if not os.path.isfile(
                os.path.join(cudnn_bin_dir,
                             f"cudnn64_{version_info.cudnn_version}.dll")):
            raise ImportError(
                f"cuDNN {version_info.cudnn_version} not installed in {cudnn_bin_dir}. "
                f"Set the CUDNN_HOME environment variable to the path of the 'cuda' directory "
                f"in your CUDNN installation if necessary.")

        if sys.version_info >= (3, 8):
            # Python 3.8 (and later) doesn't search system PATH when loading DLLs, so the CUDA location needs to be
            # specified explicitly using the new API introduced in Python 3.8.
            os.add_dll_directory(cuda_bin_dir)
            cuda_root = os.path.join(cuda_bin_dir, "..", "..")
            for root, _, files in os.walk(cuda_root):
                for f in files:
                    if f == "cupti.lib":
                        os.add_dll_directory(root)
        else:
            # Python 3.7 (and earlier) searches directories listed in PATH variable.
            # Make sure that the target CUDA version is at the beginning (important if multiple CUDA versions are
            # installed on the machine.)
            os.environ["PATH"] = cuda_bin_dir + os.pathsep + os.environ["PATH"]

    if version_info.vs2019 and platform.architecture()[0] == "64bit":
        if not os.path.isfile("C:\\Windows\\System32\\vcruntime140_1.dll"):
            raise ImportError(
                "Microsoft Visual C++ Redistributable for Visual Studio 2019 not installed on the machine."
Exemplo n.º 13
0
def _setupQtDirectories():
    # On Windows we need to explicitly import the shiboken6 module so
    # that the libshiboken.dll dependency is loaded by the time a
    # Qt module is imported. Otherwise due to PATH not containing
    # the shiboken6 module path, the Qt module import would fail
    # due to the missing libshiboken dll.
    # In addition, as of Python 3.8, the shiboken package directory
    # must be added to the DLL search paths so that shiboken6.dll
    # is found.
    # We need to do the same on Linux and macOS, because we do not
    # embed rpaths into the PySide6 libraries that would point to
    # the libshiboken library location. Importing the module
    # loads the libraries into the process memory beforehand, and
    # thus takes care of it for us.

    pyside_package_dir = os.path.abspath(os.path.dirname(__file__))

    if sys.platform == 'win32' and sys.version_info[
            0] == 3 and sys.version_info[1] >= 8:
        for dir in _additional_dll_directories(pyside_package_dir):
            os.add_dll_directory(dir)

    try:
        import shiboken6
    except Exception:
        paths = ', '.join(sys.path)
        print(f"PySide6/__init__.py: Unable to import shiboken6 from {paths}",
              file=sys.stderr)
        raise

    #   Trigger signature initialization.
    try:
        # PYSIDE-829: Avoid non-existent attributes in compiled code (Nuitka).
        # We now use an explicit function instead of touching a signature.
        _init_pyside_extension()
    except AttributeError:
        stars = 79 * "*"
        print(dedent(f'''\
            {stars}
            PySide6/__init__.py: The `signature` module was not initialized.
            This libshiboken module was loaded from

            "{shiboken6.__file__}".

            Please make sure that this is the real shiboken6 binary and not just a folder.
            {stars}
            '''),
              file=sys.stderr)
        raise

    if sys.platform == 'win32':
        # PATH has to contain the package directory, otherwise plugins
        # won't be able to find their required Qt libraries (e.g. the
        # svg image plugin won't find Qt5Svg.dll).
        os.environ[
            'PATH'] = pyside_package_dir + os.pathsep + os.environ['PATH']

        # On Windows, add the PySide6\openssl folder (created by setup.py's
        # --openssl option) to the PATH so that the SSL DLLs can be found
        # when Qt tries to dynamically load them. Tell Qt to load them and
        # then reset the PATH.
        openssl_dir = os.path.join(pyside_package_dir, 'openssl')
        if os.path.exists(openssl_dir):
            path = os.environ['PATH']
            try:
                os.environ['PATH'] = openssl_dir + os.pathsep + path
                try:
                    from . import QtNetwork
                except ImportError:
                    pass
                else:
                    QtNetwork.QSslSocket.supportsSsl()
            finally:
                os.environ['PATH'] = path
Exemplo n.º 14
0
class Network:
    if os.name == 'nt':
        os.add_dll_directory("${CUDAToolkit_BIN_DIR}")
        os.add_dll_directory("${GPUPoly_BINARY_DIR}")
        _lib = ctypes.cdll.LoadLibrary(ctypes.util.find_library('gpupoly'))
    else:
        # _lib=ctypes.cdll.LoadLibrary('${GPUPoly_BINARY_DIR}/dpGPUlib.so.0.10')
        _lib = ctypes.cdll.LoadLibrary('libgpupoly.so')

    def _nullable_ndptr(*args, **kwargs):
        base = np.ctypeslib.ndpointer(*args, **kwargs)

        def from_param(cls, obj):
            if obj is None:
                return obj
            return base.from_param(obj)

        return type(base.__name__, (base, ),
                    {'from_param': classmethod(from_param)})

    _lib.create.argtypes = [ctypes.c_int]
    _lib.create.restype = ctypes.c_void_p

    _lib.test_d.argtypes = [
        ctypes.c_void_p,
        np.ctypeslib.ndpointer(dtype=np.float64, ndim=1),
        np.ctypeslib.ndpointer(dtype=np.float64, ndim=1), ctypes.c_int,
        ctypes.c_bool
    ]
    _lib.test_d.restype = ctypes.c_bool
    _lib.test_s.argtypes = [
        ctypes.c_void_p,
        np.ctypeslib.ndpointer(dtype=np.float32, ndim=1),
        np.ctypeslib.ndpointer(dtype=np.float32, ndim=1), ctypes.c_int,
        ctypes.c_bool
    ]
    _lib.test_s.restype = ctypes.c_bool

    _lib.setLayerBox_d.argtypes = [
        ctypes.c_void_p,
        np.ctypeslib.ndpointer(dtype=np.float64, ndim=1),
        np.ctypeslib.ndpointer(dtype=np.float64, ndim=1), ctypes.c_int
    ]
    _lib.setLayerBox_d.restype = None
    _lib.setLayerBox_s.argtypes = [
        ctypes.c_void_p,
        np.ctypeslib.ndpointer(dtype=np.float32, ndim=1),
        np.ctypeslib.ndpointer(dtype=np.float32, ndim=1), ctypes.c_int
    ]
    _lib.setLayerBox_s.restype = None

    _lib.relax_s.argtypes = [
        ctypes.c_void_p, ctypes.c_int, ctypes.c_bool, ctypes.c_bool
    ]
    _lib.relax_s.restype = None
    _lib.relax_d.argtypes = [
        ctypes.c_void_p, ctypes.c_int, ctypes.c_bool, ctypes.c_bool
    ]
    _lib.relax_d.restype = None

    _lib.evalAffineExpr_d.argtypes = [
        ctypes.c_void_p,
        np.ctypeslib.ndpointer(dtype=np.float64, ndim=2), ctypes.c_int,
        ctypes.c_int,
        _nullable_ndptr(dtype=np.float64, ndim=2, flags='C_CONTIGUOUS'),
        _nullable_ndptr(dtype=np.float64, ndim=1), ctypes.c_int, ctypes.c_bool
    ]
    _lib.evalAffineExpr_d.restype = None
    _lib.evalAffineExpr_s.argtypes = [
        ctypes.c_void_p,
        np.ctypeslib.ndpointer(dtype=np.float32, ndim=2), ctypes.c_int,
        ctypes.c_int,
        _nullable_ndptr(dtype=np.float32, ndim=2, flags='C_CONTIGUOUS'),
        _nullable_ndptr(dtype=np.float32, ndim=1), ctypes.c_int, ctypes.c_bool
    ]
    _lib.evalAffineExpr_s.restype = None

    _lib.getOutputSize.argtypes = [ctypes.c_void_p, ctypes.c_int]
    _lib.getOutputSize.restype = ctypes.c_int

    _lib.clean.argtypes = [ctypes.c_void_p]
    _lib.clean.restype = None

    _lib.addLinear_d.argtypes = [
        ctypes.c_void_p, ctypes.c_int, ctypes.c_int,
        np.ctypeslib.ndpointer(dtype=np.float64, ndim=2, flags='C_CONTIGUOUS')
    ]
    _lib.addLinear_s.argtypes = [
        ctypes.c_void_p, ctypes.c_int, ctypes.c_int,
        np.ctypeslib.ndpointer(dtype=np.float32, ndim=2, flags='C_CONTIGUOUS')
    ]
    _lib.addConv2D_d.argtypes = [
        ctypes.c_void_p, ctypes.c_int, ctypes.c_bool, ctypes.c_int,
        ctypes.c_int * 2, ctypes.c_int * 4, ctypes.c_int * 2, ctypes.c_int * 2,
        np.ctypeslib.ndpointer(dtype=np.float64, ndim=4, flags='C_CONTIGUOUS')
    ]
    _lib.addConv2D_s.argtypes = [
        ctypes.c_void_p, ctypes.c_int, ctypes.c_bool, ctypes.c_int,
        ctypes.c_int * 2, ctypes.c_int * 4, ctypes.c_int * 2, ctypes.c_int * 2,
        np.ctypeslib.ndpointer(dtype=np.float32, ndim=4, flags='C_CONTIGUOUS')
    ]
    _lib.addBias_d.argtypes = [
        ctypes.c_void_p, ctypes.c_int,
        np.ctypeslib.ndpointer(dtype=np.float64, ndim=1)
    ]
    _lib.addBias_s.argtypes = [
        ctypes.c_void_p, ctypes.c_int,
        np.ctypeslib.ndpointer(dtype=np.float32, ndim=1)
    ]
    _lib.addReLU.argtypes = [ctypes.c_void_p, ctypes.c_int]
    _lib.addMaxPool2D.argtypes = [
        ctypes.c_void_p, ctypes.c_int, ctypes.c_bool, ctypes.c_int * 2,
        ctypes.c_int * 4, ctypes.c_int * 2, ctypes.c_int * 2
    ]
    _lib.addParSum.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_int]
    _lib.addConcat.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_int]

    NO_BACKSUBSTITUTION = 0
    FULL_BACKSUBSTITUTION = 1
    BACKSUBSTITUTION_WHILE_CONTAINS_ZERO = 2

    ## Constructor
    #
    #  Construct a new neural network to verify.
    #  @param inputSize Number of elements of the input layer.
    def __init__(self, input_size):
        self._nn = self._lib.create(input_size)
        self.input_size = input_size
        self._last_layer_id = 0

    ## Removes all layers of the network.
    def clean(self):
        if self._nn is not None:
            self._lib.clean(self._nn)
            self._nn = None

    ## Destructor
    def __del__(self):
        self.clean()

    ## Verifies an image
    #
    #  Checks if an image (or more precisely, an input box) entirely classifies as a given label
    #  Once this function has been called with a network, no additional layer should be added (i.e. it is necessary to clean and recreate a network)
    #  It is of course possible to call this function multiple times on the same network.
    #
    #  \param down A numpy array of inputSize doubles that represent the lower bound of the box
    #  \param up A numpy array of inputSize doubles that represent the upper bound of the box
    #  \param label Label in which the image is supposed to classify
    #  \param soundness Whether to use sound arithmetic.
    #  \returns whether the difference between the output logits wrt L_oo ball.
    def test(self, down, up, label, soundness=True):
        self.setLayerBox(down, up)  # Set input layer concrete bounds

        # creates a matrix that computes the difference with the expected layer.
        diffMatrix = np.delete(
            -np.eye(self._lib.getOutputSize(self._nn, self._last_layer_id)),
            label, 0)
        diffMatrix[:, label] = 1
        diffMatrix = diffMatrix.astype(self._last_dtype)

        # relax all layers, using simple interval analysis first.
        for i in range(self._last_layer_id):
            self.relax(i + 1,
                       soundness=soundness,
                       refineActivationsInput=False)

        # Evaluates an expression that computes the difference between expected label and each element of the output layer
        res = self.evalAffineExpr(
            diffMatrix,
            back_substitute=self.BACKSUBSTITUTION_WHILE_CONTAINS_ZERO,
            sound=soundness)
        #print("res1 ", res)
        if (res > 0).all():  # Expected layer is higher than all others
            return res

        # We failed to verify, so we redo the analysis with backsubstitution before activation layer.
        for i in range(self._last_layer_id):
            self.relax(i + 1, soundness=soundness)
        res = self.evalAffineExpr(
            diffMatrix,
            back_substitute=self.BACKSUBSTITUTION_WHILE_CONTAINS_ZERO,
            sound=soundness)
        #print("res2 ", res)
        return res

    def test_old(self, down, up, label, soundness=True):
        down = down.flatten()
        up = up.flatten()
        assert down.shape == (self.input_size, )
        assert up.shape == (self.input_size, )
        assert up.dtype == down.dtype
        if up.dtype == np.float64:
            return self._lib.test_d(self._nn, down, up, label, soundness)
        return self._lib.test_s(self._nn, down, up, label, soundness)

    ## Sets the concrete bounds of a layer.
    #
    #  \param down A numpy array represents the lower bound of the box. Must have the size of the layer.
    #  \param up A numpy array represents the upper bound of the box. Must have the size of the layer.
    #  \param layer Index of the layer (by default, sets the input box).
    def setLayerBox(self, down, up, layer=0):
        down = down.flatten()
        up = up.flatten()
        assert up.dtype == down.dtype
        self._last_dtype = up.dtype
        if up.dtype == np.float64:
            return self._lib.setLayerBox_d(self._nn, down, up, layer)
        return self._lib.setLayerBox_s(self._nn, down, up, layer)

    ## Propagates forward the concrete bounds by interval analysis. Activation layers have their approximation models computed.
    #
    # \param layer Index of the layer
    # \param refineActivationsInput If true, and layer is an activation layer, then the input is first refined first via the appropriate back-substitution.
    # \param soundness Whether to use sound (but slower) arithmetic.
    # \param dtype Datatype of the concrete bounds to be used. Can be np.float32, np.float64, or None, in which case it uses the same type as the last setLayerBox.
    def relax(self,
              layer,
              refineActivationsInput=True,
              soundness=True,
              dtype=None):
        if dtype is None:
            dtype = self._last_dtype
        if dtype == np.float64:
            self._lib.relax_d(self._nn, layer, refineActivationsInput,
                              soundness)
        else:
            self._lib.relax_s(self._nn, layer, refineActivationsInput,
                              soundness)

    ## Evaluate the concrete bounds of a list of affine expressions.
    #
    # Evaluate the concrete bounds of a list of m affine expressions of the neurons of a given layer via back-substitution.
    # The affine expressions have the form Ax+b, where A is a m*n matrix, b a vector of size m, and x represents the n neurons of the layer layerId.
    # \param a A numpy array of dimension [m,n]. If None, equivalent to the identity.
    # \param b A numpy array of dimension [m]. If None, equivalent to a zero vector.
    # \param layer Index of the layer
    # \param back_substitute If set to FULL_BACKSUBSITUTION, back-substitution is always performed back to the inputs. If set to BACKSUBSTITUTION_WHILE_CONTAINS_ZERO, the backsubstitution is stopped as soon as 0 is not within the concrete bounds anymore. If set to NO_BACKSUBSTITUTION, only use the current concrete bounds of the layer.
    # \param sound If True, use floating-point sound arithmetic (slower).
    # \param dtype Datatype of the concrete bounds to be used. Can be np.float32, np.float64, or None, in which case it uses the same type as the last setLayerBox.
    # \returns A numpy array of size [m,2] containing the concrete bounds.
    def evalAffineExpr(self,
                       a=None,
                       b=None,
                       layer=None,
                       back_substitute=NO_BACKSUBSTITUTION,
                       sound=True,
                       dtype=None):
        if layer is None:
            layer = self._last_layer_id
        if dtype is None:
            dtype = self._last_dtype
        n = self._lib.getOutputSize(self._nn, layer)
        if a is None:
            m = n
        else:
            assert a.ndim == 2
            assert a.dtype == dtype
            m = a.shape[0]
            a = np.ascontiguousarray(a)
        if b is not None:
            assert b.shape == (m, )
            assert b.dtype == dtype
        res = np.ascontiguousarray(np.ndarray((m, 2), dtype=dtype))
        if dtype == np.float64:
            self._lib.evalAffineExpr_d(self._nn, res, layer, m, a, b,
                                       back_substitute, sound)
        else:
            self._lib.evalAffineExpr_s(self._nn, res, layer, m, a, b,
                                       back_substitute, sound)
        return np.reshape(res, (m, 2))

    ## Fully connected linear layer
    #
    #  Adds a dense linear layer (without activation nor bias). If x is a column vector representing the input of the layer, the output is A*x.
    #
    #  \param A Numpy matrix that represents A. A has outputSize rows, and its number of columns equals the parent outputSize.
    #  \param parent Index of the parent layer (or 0 for the input layer). It can be None, in which case the parent is the last added layer.
    #  \returns the index of the newly created layer.
    def add_linear(self, a, parent=None):
        if parent is None:
            parent = self._last_layer_id
        assert a.ndim == 2
        output_size = a.shape[0]
        if a.dtype == np.float64:
            self._last_layer_id = self._lib.addLinear_d(
                self._nn, parent, output_size, np.ascontiguousarray(a))
        else:
            self._last_layer_id = self._lib.addLinear_s(
                self._nn, parent, output_size, np.ascontiguousarray(a))
        return self._last_layer_id

    ## ReLU layer
    #
    #  Adds a ReLU layer to the network.
    #
    #  \param parent Index of the parent layer (or 0 for the input layer). It can be None, in which case the parent is the last added layer.
    #  \returns the index of the newly created layer.
    def add_relu(self, parent=None):
        if parent is None:
            parent = self._last_layer_id
        self._last_layer_id = self._lib.addReLU(self._nn, parent)
        return self._last_layer_id

    ## Bias layer
    #
    #  Adds a Bias layer to the network, i.e. a layer that adds a constant vector to its input.
    #
    #  \param parent Index of the parent layer (or 0 for the input layer). It can be None, in which case the parent is the last added layer.
    #  \returns the index of the newly created layer.
    def add_bias(self, b, parent=None):
        if parent is None:
            parent = self._last_layer_id
        assert b.ndim == 1
        if b.dtype == np.float64:
            self._last_layer_id = self._lib.addBias_d(self._nn, parent, b)
        else:
            self._last_layer_id = self._lib.addBias_s(self._nn, parent, b)
        return self._last_layer_id

    ## Convolution layer
    #
    #  Adds a convolution layer, without activation nor bias.
    #
    #  \param input_rows Dimension of the input.
    #  \param input_cols Dimension of the input.
    #  \param conv Convolution coefficients, given as a 4 dimensions numpy array  (in order [row, column, channel, filter].
    #  \param channels_first If true, the layer expects input with the shape [batch, channel, row, col], and its output has the shape [batch, filter, row, col]. If false, the layer expects input with the shape [batch, row, col, channel], and its output has the shape [batch, row, col, filter].
    #  \param batches Number of batches.
    #  \param strides An integer or a list of two integers, indicating the stride shape (respectively the number of rows and columns if a list).
    #  \param padding An integer or a list of two integers, indicating the padding (respectively the number of pixels to add at the top and bottom, and the number of pixels to add on the left and right).
    #  \param parent Index of the parent layer (or 0 for the input layer). It can be None, in which case the parent is the last added layer.
    #  \returns the index of the newly created layer.
    def add_conv_2d(self,
                    input_rows,
                    input_cols,
                    conv,
                    channel_first=True,
                    batches=1,
                    strides=1,
                    padding=0,
                    parent=None):
        if parent is None:
            parent = self._last_layer_id
        assert conv.ndim == 4
        kernel = conv.shape[0:2]
        input_shape = [batches, input_rows, input_cols, conv.shape[2]]
        filters = conv.shape[3]
        if not isinstance(strides, list):
            strides = [strides, strides]
        if not isinstance(padding, list):
            padding = [padding, padding]
        if conv.dtype == np.float64:
            self._last_layer_id = self._lib.addConv2D_d(
                self._nn, parent, channel_first, filters,
                (ctypes.c_int * 2)(*kernel), (ctypes.c_int * 4)(*input_shape),
                (ctypes.c_int * 2)(*strides), (ctypes.c_int * 2)(*padding),
                np.ascontiguousarray(conv))
        else:
            self._last_layer_id = self._lib.addConv2D_s(
                self._nn, parent, channel_first, filters,
                (ctypes.c_int * 2)(*kernel), (ctypes.c_int * 4)(*input_shape),
                (ctypes.c_int * 2)(*strides), (ctypes.c_int * 2)(*padding),
                np.ascontiguousarray(conv))
        return self._last_layer_id

    ## MaxPool2D layer.
    #
    #  Adds a max pooling layer.
    #
    #  \param pool Pool shape (Python list of 2 integers, respectively the number of rows and columns).
    #  \param input_rows Dimension of the input.
    #  \param input_cols Dimension of the input.
    #  \param channels Number of channels.
    #  \param channels_first If true, the layer expects input with the shape [batch, channel, row, col], and its output has the shape [batch, filter, row, col]. If false, the layer expects input with the shape [batch, row, col, channel], and its output has the shape [batch, row, col, filter].
    #  \param batches Number of batches.
    #  \param strides An integer or a list of two integers, indicating the stride shape (respectively the number of rows and columns if a list).
    #  \param padding An integer or a list of two integers, indicating the padding (respectively the number of pixels to add at the top and bottom, and the number of pixels to add on the left and right).
    #  \param parent Index of the parent layer (or 0 for the input layer). It can be None, in which case the parent is the last added layer.
    #  \returns the index of the newly created layer.
    def add_maxpool_2d(self,
                       pool,
                       input_rows,
                       input_cols,
                       channels,
                       channel_first=True,
                       batches=1,
                       strides=None,
                       padding=0,
                       parent=None):
        if parent is None:
            parent = self._last_layer_id
        if not isinstance(pool, list):
            pool = [pool, pool]
        if strides is None:
            strides = pool
        elif not isinstance(strides, list):
            strides = [strides, strides]
        if not isinstance(padding, list):
            padding = [padding, padding]
        input_shape = [batches, input_rows, input_cols, channels]
        self._last_layer_id = self._lib.addMaxPool2D(
            self._nn, parent, channel_first, (ctypes.c_int * 2)(*pool),
            (ctypes.c_int * 4)(*input_shape), (ctypes.c_int * 2)(*strides),
            (ctypes.c_int * 2)(*padding))
        return self._last_layer_id

    ## ParSum layer.
    #
    #  Adds a "ParSum" layer, i.e. a layer that sums up the result of two previous layers.
    #  \param parent1 Index of the first parent layer.
    #  \param parent2 Index of the second parent layer.
    #  \returns the index of the newly created layer.
    def add_parsum(self, parent1, parent2):
        self._last_layer_id = self._lib.addParSum(self._nn, parent1, parent2)
        return self._last_layer_id

    ## Concatenation layer.
    #
    #  Adds a concatenation layer (like the one we can find in skipnets). It concatenates the result of two previous layers.
    #  \param parent1 Index of the first parent layer.
    #  \param parent2 Index of the second parent layer.
    #  \returns the index of the newly created layer.
    def add_concat(self, parent1, parent2):
        self._last_layer_id = self._lib.addConcat(self._nn, parent1, parent2)
        return self._last_layer_id
Exemplo n.º 15
0
if os.path.exists(current_path + os.sep + 'core_avx.' + core_suffix):
    has_avx_core = True

if os.path.exists(current_path + os.sep + 'core_noavx.' + core_suffix):
    has_noavx_core = True

try:
    if os.name == 'nt':
        third_lib_path = current_path + os.sep + '..' + os.sep + 'libs'
        os.environ['path'] = third_lib_path + ';' + os.environ['path']
        sys.path.insert(0, third_lib_path)
        # Note: from python3.8, PATH will not take effect
        # https://github.com/python/cpython/pull/12302
        # Use add_dll_directory to specify dll resolution path
        if sys.version_info[:2] >= (3, 8):
            os.add_dll_directory(third_lib_path)

except ImportError as e:
    from .. import compat as cpt
    if os.name == 'nt':
        executable_path = os.path.abspath(os.path.dirname(sys.executable))
        raise ImportError(
            """NOTE: You may need to run \"set PATH=%s;%%PATH%%\"
        if you encounters \"DLL load failed\" errors. If you have python
        installed in other directory, replace \"%s\" with your own
        directory. The original error is: \n %s""" %
            (executable_path, executable_path, cpt.get_exception_message(e)))
    else:
        raise ImportError(
            """NOTE: You may need to run \"export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH\"
        if you encounters \"libmkldnn.so not found\" errors. If you have python
Exemplo n.º 16
0
#!python
import os
import sys
import subprocess
import numpy
from math import *

pathToTrelis = "/home/christopher/trelis/cubit_build/claro/bin"
sys.path.append(pathToTrelis)

if os.name == 'nt':
    binPath = pathToTrelis  #os.path.dirname(os.path.abspath(__file__))
    acisPath = r"/acis/code/bin"
    try:
        os.add_dll_directory(binPath + acisPath)
    except AttributeError:
        os.environ['path'] += ';' + binPath + acisPath

import cubit
cubit.init(['cubit', '-nobanner', '-nographics'])


def main(paramFile, objFile):
    x, y = readParamFile(paramFile)
    status, bc_xyz, num_elem, nlcon = makeGeometry(x, y)
    if status != False:
        error_handle(objFile, nlcon, "makeGeometry")
        return

    status = buildUSpline(2, 1)
    if status != False:
Exemplo n.º 17
0
(through custom forces and integrators), openness, and high performance
(especially on recent GPUs) that make it truly unique among simulation codes.
"""
from __future__ import absolute_import
__author__ = "Peter Eastman"

import os, os.path
import sys
from . import version

if sys.platform == 'win32':
    _path = os.environ['PATH']
    os.environ['PATH'] = '%(lib)s;%(lib)s\plugins;%(path)s' % {
        'lib': version.openmm_library_path, 'path': _path}
    try:
        with os.add_dll_directory(version.openmm_library_path):
            from . import _openmm
    except:
        pass

from openmm.openmm import *
from openmm.vec3 import Vec3
from openmm.mtsintegrator import MTSIntegrator, MTSLangevinIntegrator
from openmm.amd import AMDIntegrator, AMDForceGroupIntegrator, DualAMDIntegrator

if os.getenv('OPENMM_PLUGIN_DIR') is None and os.path.isdir(version.openmm_library_path):
    pluginLoadedLibNames = Platform.loadPluginsFromDirectory(os.path.join(version.openmm_library_path, 'plugins'))
else:
    pluginLoadedLibNames = Platform.loadPluginsFromDirectory(Platform.getDefaultPluginsDirectory())

if sys.platform == 'win32':
Exemplo n.º 18
0
            + "\n".join(msg[-2:]))
    try:
        f = open("errorLog.txt", "a")
        f.writelines(msg)
        f.close()
    except:
        pass
    os._exit(1)


sys.excepthook = exchandler

#Python3.8対応
#dllやモジュールをカレントディレクトリから読み込むように設定
if sys.version_info.major >= 3 and sys.version_info.minor >= 8:
    os.add_dll_directory(os.path.dirname(os.path.abspath(__file__)))
    sys.path.append(os.path.dirname(os.path.abspath(__file__)))

import app as application
import globalVars


def main():
    try:
        if os.path.exists("errorLog.txt"):
            os.remove("errorLog.txt")
    except:
        pass
    app = application.Main()
    globalVars.app = app
    app.initialize()
Exemplo n.º 19
0
import rospkg
import os
if "add_dll_directory" in dir(os):
    rospack = rospkg.RosPack()
    rosbinpaths = set([
        os.path.abspath(
            os.path.join(rospack.get_path(pkgname), os.pardir, os.pardir,
                         "bin")) for pkgname in ["cv_bridge", "rosbag"]
    ])
    for path in rosbinpaths:
        os.add_dll_directory(path)
    vcpkg_bin_dir = os.getenv("VCPKG_BIN_DIR")
    if vcpkg_bin_dir is not None:
        os.add_dll_directory(vcpkg_bin_dir)
import rosbag
import yaml
from ackermann_msgs.msg import AckermannDrive, AckermannDriveStamped
from nav_msgs.msg import Odometry
from sensor_msgs.msg import LaserScan, LaserEcho, Image, CompressedImage
import argparse
import TimestampedImage_pb2, TimestampedPacketMotionData_pb2, Image_pb2, FrameId_pb2, MultiAgentLabel_pb2, LaserScan_pb2
import Pose3d_pb2, Vector3dStamped_pb2

from tqdm import tqdm as tqdm
import rospy
from rospy import Time
import numpy as np, scipy
import scipy, scipy.integrate, scipy.interpolate
from scipy.spatial.transform import Rotation as Rot, RotationSpline as RotSpline
import matplotlib
from matplotlib import pyplot as plt
Exemplo n.º 20
0
        # however, we also started to receive reports of problems with DLL
        # resolution with Python 3.7 that were sometimes alleviated with
        # inclusion of the _distributor_init.py module; see SciPy main
        # repo gh-11826

        # we noticed in scipy-wheels repo gh-70 that inclusion of
        # _distributor_init.py in 32-bit wheels for Python 3.7 resulted
        # in failures in DLL resolution (64-bit 3.7 did not)
        # as a result, we decided to combine both the old (working directory)
        # and new (absolute path to DLL location) DLL resolution mechanisms
        # to improve the chances of resolving DLLs across a wider range of
        # Python versions

        # we did not experiment with manipulating the PATH environment variable
        # to include libs_path; it is not immediately clear if this would have
        # robustness or security advantages over changing working directories
        # as done below

        # for python 3.8, the WinDLL loading doesn't work. Try add_dll_directory
        if sys.version_info[:2] == (3, 8):
            os.add_dll_directory(libs_path)
        else:
            try:
                owd = os.getcwd()
                os.chdir(libs_path)
                for filename in glob.glob(os.path.join(libs_path, '*dll')):
                    WinDLL(os.path.abspath(filename))
            finally:
                os.chdir(owd)
Exemplo n.º 21
0
def find_lib_path(prefix='libmxnet'):
    """Find MXNet dynamic library files.

    Returns
    -------
    lib_path : list(string)
        List of all found path to the libraries.
    """
    lib_from_env = os.environ.get('MXNET_LIBRARY_PATH')
    if lib_from_env:
        if os.path.isfile(lib_from_env):
            if not os.path.isabs(lib_from_env):
                logging.warning(
                    "MXNET_LIBRARY_PATH should be an absolute path, instead of: %s",
                    lib_from_env)
            else:
                if os.name == 'nt':
                    os.environ['PATH'] = os.environ[
                        'PATH'] + ';' + os.path.dirname(lib_from_env)
                return [lib_from_env]
        else:
            logging.warning("MXNET_LIBRARY_PATH '%s' doesn't exist",
                            lib_from_env)

    curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
    api_path = os.path.join(curr_path, '../../lib/')
    cmake_build_path = os.path.join(curr_path, '../../build/')
    dll_path = [curr_path, api_path, cmake_build_path]
    if os.name == 'nt':
        dll_path.append(os.path.join(curr_path, '../../build'))
        vs_configuration = 'Release'
        if platform.architecture()[0] == '64bit':
            dll_path.append(
                os.path.join(curr_path, '../../build', vs_configuration))
            dll_path.append(
                os.path.join(curr_path, '../../windows/x64', vs_configuration))
        else:
            dll_path.append(
                os.path.join(curr_path, '../../build', vs_configuration))
            dll_path.append(
                os.path.join(curr_path, '../../windows', vs_configuration))
    elif os.name == "posix" and os.environ.get('LD_LIBRARY_PATH', None):
        dll_path[0:0] = [
            p.strip() for p in os.environ['LD_LIBRARY_PATH'].split(":")
        ]
    if os.name == 'nt':
        os.environ['PATH'] = os.path.dirname(
            __file__) + ';' + os.environ['PATH']
        dll_path = [os.path.join(p, prefix + '.dll') for p in dll_path]
    elif platform.system() == 'Darwin':
        dll_path = [os.path.join(p, prefix + '.dylib') for p in dll_path] + \
                   [os.path.join(p, prefix + '.so') for p in dll_path]
    else:
        dll_path.append('../../../')
        dll_path = [os.path.join(p, prefix + '.so') for p in dll_path]
    lib_path = [p for p in dll_path if os.path.exists(p) and os.path.isfile(p)]
    if len(lib_path) == 0:
        raise RuntimeError('Cannot find the MXNet library.\n' +
                           'List of candidates:\n' + str('\n'.join(dll_path)))
    if os.name == 'nt':
        os.environ['PATH'] = os.environ['PATH'] + ';' + os.path.dirname(
            lib_path[0])
        if sys.version_info >= (3, 8):
            if 'CUDA_PATH' not in os.environ:
                raise RuntimeError(
                    'Cannot find the env CUDA_PATH.Please set CUDA_PATH env with cuda path'
                )
            os.add_dll_directory(os.path.dirname(lib_path[0]))
            os.add_dll_directory(os.path.join(os.environ['CUDA_PATH'], 'bin'))
    return lib_path
Exemplo n.º 22
0
from .get_winfsp_dir import get_winfsp_bin_dir, get_winfsp_library_name

# WinFSP's DLL is not available system-wide, so we have to first retrieve it
# (using either user-provided environ variable or the infamous windows
# registry) and use the dedicated python call `os.add_dll_directory`
# to add the `bin` path to the DLL search path. If the python version is lower
# than 3.8, `os.add_dll_directory` is not available and we have to fallback
# to the old way of adding a dll directory: customize the PATH environ
# variable.

WINFSP_BIN_DIR = get_winfsp_bin_dir()

# Modifiy %PATH% in any case as it is used by `ctypes.util.find_library`
os.environ["PATH"] = f"{WINFSP_BIN_DIR};{os.environ.get('PATH')}"
if sys.version_info >= (3, 8):
    os.add_dll_directory(WINFSP_BIN_DIR)

if not find_library(get_winfsp_library_name()):
    raise RuntimeError(
        f"The WinFsp DLL could not be found in {WINFSP_BIN_DIR}")

try:
    from ._bindings import ffi, lib  # noqa
except Exception as exc:
    raise RuntimeError(f"The winfsp binding could not be imported\n{exc}")


def enable_debug_log():
    stderr_handle = lib.GetStdHandle(lib.STD_ERROR_HANDLE)
    lib.FspDebugLogSetHandle(stderr_handle)
Exemplo n.º 23
0
    if not os.path.isfile(sofa_file_test):
        print(
            "Warning: environment variable SOFA_ROOT is set but seems invalid.",
            "Loading SOFA libraries will likely fail.")
        print("SOFA_ROOT is currently: " + sofa_root)
    if not os.path.isfile(sofapython3_file_test):
        print("Warning: cannot find SofaPython3.dll at path: " +
              sofapython3_bin_path)
        print("This path will NOT be added to the DLL search path.",
              "Loading SofaPython3 python modules will likely fail.")

    if sys.version_info.minor >= 8:
        # Starting from python3.8 we need to explicitly find SOFA libraries
        if os.path.isfile(sofa_file_test):
            os.add_dll_directory(sofa_bin_path)
        if os.path.isfile(sofapython3_file_test):
            os.add_dll_directory(sofapython3_bin_path)
    else:
        # Add temporarily the bin/lib path to the env variable PATH
        if os.path.isfile(sofa_file_test):
            os.environ[
                'PATH'] = sofa_bin_path + os.pathsep + os.environ['PATH']
        if os.path.isfile(sofapython3_file_test):
            os.environ[
                'PATH'] = sofapython3_bin_path + os.pathsep + os.environ['PATH']

print("---------------------------------------")
sys.stdout.flush()

import Sofa.constants
Exemplo n.º 24
0
#%%
import sys, os, math
os.add_dll_directory(os.path.join(os.getcwd(), '..', 'install'))
sys.path.append(os.path.join(os.getcwd(), "..", "install"))

import visii

SAMPLES_PER_PIXEL = 128
WIDTH = 1024
HEIGHT = 1024

NUM_OBJECTS = 90
NUM_LIGHTS = 20
L_DIST = 10
O_DIST = 5

visii.initialize_interactive()
# visii.initialize_headless()
camera_entity = visii.entity.create(
    name="my_camera_entity",
    transform=visii.transform.create("my_camera_transform"),
    camera=visii.camera.create_perspective_from_fov(name="my_camera",
                                                    field_of_view=0.785398,
                                                    aspect=1.,
                                                    near=.1))

visii.set_camera_entity(camera_entity)
camera_entity.get_transform().set_position(0, 0.0, -5.)
camera_entity.get_camera().use_perspective_from_fov(0.785398, 1.0, .01)
camera_entity.get_camera().set_view(
    visii.lookAt(
Exemplo n.º 25
0
# __init__ for osgeo package.

# making the osgeo package version the same as the gdal version:
from sys import platform, version_info
if version_info >= (3, 8, 0) and platform == 'win32':
    import os
    if 'USE_PATH_FOR_GDAL_PYTHON' in os.environ and 'PATH' in os.environ:
        for p in os.environ['PATH'].split(';'):
            if p:
                try:
                    os.add_dll_directory(p)
                except (FileNotFoundError, OSError):
                    continue


def swig_import_helper():
    import importlib
    from os.path import dirname, basename
    mname = basename(dirname(__file__)) + '._gdal'
    try:
        return importlib.import_module(mname)
    except ImportError:
        if version_info >= (3, 8, 0) and platform == 'win32':
            import os
            if not 'USE_PATH_FOR_GDAL_PYTHON' in os.environ:
                msg = 'On Windows, with Python >= 3.8, DLLs are no longer imported from the PATH.\n'
                msg += 'If gdalXXX.dll is in the PATH, then set the USE_PATH_FOR_GDAL_PYTHON=YES environment variable\n'
                msg += 'to feed the PATH into os.add_dll_directory().'

                import sys
                import traceback
Exemplo n.º 26
0
#This is a fix for python3.8
#The dll search path has to be manually added
import os
try: os.add_dll_directory(os.path.join(os.path.dirname(__file__), '..', '..', '..', 'bin'))
except: pass
Exemplo n.º 27
0
# Linux only
os.listxattr("path")  # $ getAPathArgument="path"
os.listxattr(path="path")  # $ getAPathArgument="path"

# Linux only
os.removexattr("path", "attribute")  # $ getAPathArgument="path"
os.removexattr(path="path", attribute="attribute")  # $ getAPathArgument="path"

# Linux only
os.setxattr("path", "attribute", "value")  # $ getAPathArgument="path"
os.setxattr(path="path", attribute="attribute",
            value="value")  # $ getAPathArgument="path"

# Windows only
os.add_dll_directory("path")  # $ getAPathArgument="path"
os.add_dll_directory(path="path")  # $ getAPathArgument="path"

# for `os.exec*`, `os.spawn*`, and `os.posix_spawn*` functions, see the
# `SystemCommandExecution.py` file.

# Windows only
os.startfile("path")  # $ getAPathArgument="path"
os.startfile(path="path")  # $ getAPathArgument="path"

# ------------------------------------------------------------------------------
# tempfile
# ------------------------------------------------------------------------------

# _mkstemp_inner does `_os.path.join(dir, pre + name + suf)`
Exemplo n.º 28
0
import json
import logging
import os
import socket
import subprocess
import sys
import tempfile
import time
import threading

LOGGER = logging.getLogger('ycmd')
ROOT_DIR = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
DIR_OF_THIRD_PARTY = os.path.join(ROOT_DIR, 'third_party')
LIBCLANG_DIR = os.path.join(DIR_OF_THIRD_PARTY, 'clang', 'lib')
if hasattr(os, 'add_dll_directory'):
    os.add_dll_directory(LIBCLANG_DIR)

from collections.abc import Mapping
from urllib.parse import urljoin, urlparse, unquote, quote  # noqa
from urllib.request import pathname2url, url2pathname  # noqa

# We replace the re module with regex as it has better support for characters
# on multiple code points. However, this module has a compiled component so we
# can't import it in YCM if it is built for a different version of Python. We
# fall back to the re module in that case.
try:
    import regex as re
except ImportError:  # pragma: no cover
    import re  # noqa

# Creation flag to disable creating a console window on Windows. See
Exemplo n.º 29
0
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

import os, sys, platform
if sys.version_info >= (3, 8, 0) and platform.system() == "Windows":
    for path in [x.strip() for x in os.environ['PATH'].split(';') if x]:
        os.add_dll_directory(path)

import opengeode_py_geometry as geom

if __name__ == '__main__':
    box = geom.BoundingBox2D()
    box.add_point(geom.Point2D([-1, -1]))
    box.add_point(geom.Point2D([1, 1]))

    box2 = box
    box2.add_point(geom.Point2D([-2, -2]))
    box2.add_point(geom.Point2D([0, 0]))

    box2.add_box(box)
    if box2.min() != geom.Point2D([-2, -2]):
        raise ValueError("[Test] Error in BoundingBox union computation")
Exemplo n.º 30
0
if lib_path is None:
    raise ModuleNotFoundError("{} was not found in {}".format(lib_name, search_paths))

if os.path.getsize(lib_path) < 1000:
    raise ModuleNotFoundError("{} is too small. Did you forget to init git lfs? Try this:\n"
        " 1. Install git lfs (https://git-lfs.github.com/)\n"
        " 2. Run `cd {}`\n"
        " 3. Run `git lfs install`\n"
        " 4. Run `git lfs pull`".format(lib_path, os.path.dirname(lib_path)))

if os.name == 'nt':
  dll_dir = os.path.dirname(lib_path)
  try:
    # New way in python 3.8+
    os.add_dll_directory(dll_dir)
  except:
    os.environ['PATH'] = dll_dir + os.pathsep + os.environ['PATH']
  lib = windll.LoadLibrary(lib_path)
else:
  lib = cdll.LoadLibrary(lib_path)

# libfibre definitions --------------------------------------------------------#

PostSignature = CFUNCTYPE(c_int, CFUNCTYPE(None, c_void_p), POINTER(c_int))
RegisterEventSignature = CFUNCTYPE(c_int, c_int, c_uint32, CFUNCTYPE(None, c_void_p, c_int), POINTER(c_int))
DeregisterEventSignature = CFUNCTYPE(c_int, c_int)
CallLaterSignature = CFUNCTYPE(c_void_p, c_float, CFUNCTYPE(None, c_void_p), POINTER(c_int))
CancelTimerSignature = CFUNCTYPE(c_int, c_void_p)

OnFoundObjectSignature = CFUNCTYPE(None, c_void_p, c_void_p, c_void_p)