def default_cb_fct (self, evt, usrdata): libname = evt.contents.fname step = evt.contents.step if libname is None: return 0 n = _basename (_realpath (libname)) pid = os.getpid() def vmem(): from os import sysconf PAGE_SIZE = sysconf ('SC_PAGE_SIZE') # in bytes from string import split as ssplit m = 0 with open('/proc/self/statm') as f: m = int(ssplit (f.readlines()[0])[0]) return m * PAGE_SIZE # in bytes if step == 0: self._data[pid][n] = [vmem(), None] else: data = self._data[pid][n] data[1] = vmem() vmem_start = data[0]/1024. vmem_stop = data[1]/1024. dvmem = vmem_stop - vmem_start self.msg.info ( "[%d] loading lib: vmem=(%10.1f + %10.1f) kb [%s]", pid, vmem_start, dvmem, n ) self.out.writerow ([pid, n, vmem_start, vmem_stop, dvmem]) #del self._data[pid][n] return 0
def default_cb_fct(self, evt, usrdata): import os from os.path import realpath as _realpath from os.path import basename as _basename libname = evt.fname step = evt.step if libname is None: return 0 n = _basename(_realpath(libname)) pid = os.getpid() def vmem(): from os import sysconf PAGE_SIZE = sysconf('SC_PAGE_SIZE') # in bytes return PAGE_SIZE if step == 0: self._data[pid][n] = [vmem(), None] else: data = self._data[pid][n] data[1] = vmem() vmem_start = data[0] / 1024. vmem_stop = data[1] / 1024. dvmem = vmem_stop - vmem_start self.msg.info( "[%d] loading lib: vmem=(%10.1f + %10.1f) kb [%s]", pid, vmem_start, dvmem, n) self.out.writerow([pid, n, vmem_start, vmem_stop, dvmem]) #del self._data[pid][n] return 0
def default_cb_fct (self, evt, usrdata): import os from os.path import realpath as _realpath from os.path import basename as _basename libname = evt.fname step = evt.step if libname is None: return 0 n = _basename (_realpath (libname)) pid = os.getpid() def vmem(): from os import sysconf PAGE_SIZE = sysconf ('SC_PAGE_SIZE') # in bytes return PAGE_SIZE if step == 0: self._data[pid][n] = [vmem(), None] else: data = self._data[pid][n] data[1] = vmem() vmem_start = data[0]/1024. vmem_stop = data[1]/1024. dvmem = vmem_stop - vmem_start self.msg.info ( "[%d] loading lib: vmem=(%10.1f + %10.1f) kb [%s]", pid, vmem_start, dvmem, n ) self.out.writerow ([pid, n, vmem_start, vmem_stop, dvmem]) #del self._data[pid][n] return 0
def install_packages(packages_dir, quiet=False): """ Install packages. Args: packages_dir (str): Directory containing packages. quiet (bool): Hide packages manager output. """ # Get packages packages_dir = _realpath(packages_dir) packages = [ _join(packages_dir, package_file) for package_file in _listdir(packages_dir) if (_splitext(package_file)[-1].lower() in (".deb", ".rpm") and # Skip "-dev"/"-devel" packages "-dev" not in package_file) ] # Run command if quiet: run_kwargs = dict(stdout=_PIPE, stderr=_PIPE, universal_newlines=True) else: run_kwargs = dict() _run(detect_package_manager() + " ".join(packages), shell=True, **run_kwargs).check_returncode()
def _validate_file(self, file_name): """function _validate_file Input: file_name - The file to be validated Output: validated file_name _validate_file verifies the file exists and the extension matches the parser extension(s) before proceeding. This hook can be overwritten to remove or perform different file checks as long as it returns the file_name. """ if not isinstance(file_name, _string_types): raise IOError('{}: file_name must be a string'.format(file_name)) file_name = _realpath(_expanduser(file_name)) if not _isfile(file_name): raise IOError('{}: File not found.'.format(file_name)) if file_name.split('.')[-1] not in self.extensions: raise IOError('{name}: Unsupported extension. Supported ' 'extensions are {exts}'.format( name=file_name, exts='(' + ', '.join(self.extensions) + ')' )) return file_name
def getDriver(cls, driverType, connType): """take driverType and connType to return the corresponding class imported from the file system. Used internally and can be used for the connections where no authentification is needed (sqlite for instance) USAGE: with a context manager: drv = connectionManager.ConnectionManager.getDriver(driverType="sql", connType="sqlite") with drv(test="test") as c: c.exec("execTest") without a context manager: drv = connectionManager.ConnectionManager.getDriver(driverType="sql", connType="sqlite") d = drv(test="test") d.connect() d.exec("execTest") d.close() """ calcPath = _join(_realpath(_dirname(__file__)), "drivers", driverType, connType + ".py") # we first check if the driver has already been imported and stored in the class to buy time try: try: return cls.drivers[driverType][connType] except KeyError: driver = _utils.importFromPath(calcPath) driver = driver.Driver if not cls.drivers.get(driverType): cls.drivers[driverType] = {} cls.drivers[driverType][connType] = driver return driver except FileNotFoundError: raise FileNotFoundError("aucun driver trouvé dans " + calcPath)
def _validate_file(self, file_name): """function _validate_file Input: file_name - The file to be validated Output: validated file_name _validate_file verifies the file exists and the extension matches the parser extension(s) before proceeding. This hook can be overwritten to perform different file checks or remove the checks entirely as long as it returns the file_name. """ if not isinstance(file_name, _string_types): raise ParseError( '{}: file_name must be a string'.format(file_name)) file_name = _realpath(_expanduser(file_name)) if not _isfile(file_name): raise ParseError('{}: File not found.'.format(file_name)) fnsplit = file_name.split('.') if (len(fnsplit) < 2 or fnsplit[-1].upper() not in [ext.upper() for ext in self.extensions]): raise ParseError('{name}: Unsupported extension. Supported ' 'extensions are {exts}'.format( name=file_name, exts='(' + ', '.join(self.extensions) + ')')) return file_name
def factory(arbalet): if arbalet.config["controller"] in ["arduino"]: return _ArduinoLink(arbalet) if arbalet.config["controller"] in ["rpi", "raspberrypi", "pi"]: return _RPiLink(arbalet) raise NotImplementedError( "{} knows no implementation of link type \"{}\" specified in config file" .format(_realpath(__file__), arbalet.config["controller"]))
def get_python_package_entry_point(package, entry_point): """ Find an CLI entry point from a Python package. Args: package (str): Package name. entry_point (str): Entry point name. Returns: str or None: Path to entry point, or None if nothing found. """ site_packages_path = _dirname(_import_module(package).__path__[0]) # Find package info # Can be a directory ending by ".dist-info" or ".egg-info" with _scandir(site_packages_path) as entries: for entry in entries: if (entry.name.startswith(f'{package}-') and _splitext(entry.name)[1] in ('.dist-info', '.egg-info')): package_info_path = entry.path break else: # Package is not installed or do not have package info return None # Find manifest file # Can be a "RECORD" or a "installed-files.txt" file in package info folder for name in ('RECORD', 'installed-files.txt'): manifest_path = _join(package_info_path, name) if _isfile(manifest_path): break else: # Package do not have manifest file return None # Find entry point relative path in manifest file # Possibles manifest file lines formats: "path\n" or "path,checksum\n" with open(manifest_path, 'rt') as manifest: for line in manifest: entry_point_rel_path = line.strip().split(',', 1)[0] if _basename(entry_point_rel_path) == entry_point: break else: # Entry point is not present in manifest return None # Convert to absolute path # Paths in manifest are relative to site-packages or package info for prefix in (site_packages_path, package_info_path): entry_point_path = _realpath(_join(prefix, entry_point_rel_path)) if _isfile(entry_point_path): return entry_point_path
def get_sources_dirs(*src): """ Return sources directories. Args: *src: Directories paths. Returns: list of str: Sources directories """ paths = [HOME_DIR, '.'] paths.extend(src) return [_realpath(_fsdecode(path)) for path in paths if path]
def _check_path(self, path): """ Check path and ensure it is absolute. Args: path (str): path Returns: str: Absolute path """ with self._handle_exception(OSError): path = _realpath(_expanduser(path)) if not _isfile(path): raise FileNotFoundError('No such file: ' + path) return path
def run_command(): """Sign packages""" from os import environ from argparse import ArgumentParser parser = ArgumentParser(prog="sign_packages", description="Sign RPM or DEB packages.") parser.add_argument("packages_dir", help="Input packages directory") parser.add_argument("--quiet", "-q", help="Disable verbosity", action="store_true") args = parser.parse_args() private_key = environ.get("GPG_PRIVATE_KEY", "") if not private_key: parser.exit(1, message="No private key\n") return packages_dir = _realpath(args.packages_dir) for file in _listdir(packages_dir): ext = _splitext(file)[1].lower() if ext == ".rpm": sign = sign_rpm_packages break elif ext == ".deb": sign = sign_deb_packages break else: parser.exit(1, "No package to sign\n") return try: sign( packages_dir=packages_dir, private_key=private_key, public_key=environ.get("GPG_PUBLIC_KEY", ""), pass_phrase=environ.get("GPG_PASS_PHRASE", ""), quiet=args.quiet, ) except _CalledProcessError as exception: parser.exit(exception.returncode, exception.stdout) except RuntimeError as exception: parser.exit(1, str(exception)) if not args.quiet: parser.exit(message="Signature successful\n")
def _build_bitmap_data(): ''' Build an SFrame from 10 saved drawings. ''' from os.path import join as _join, realpath as _realpath from os.path import splitext as _splitext, basename as _basename from os.path import dirname as _dirname drawings_dir = _join(_dirname(_realpath(__file__)), "drawings") sf = _tc.image_analysis.load_images(drawings_dir, with_path=True) sf = sf.rename({"image": "drawing", "path": "label"}) sf["label"] = sf["label"].apply( lambda filepath: _splitext(_basename(filepath))[0][:-1] # Extract the class name from the filename, "check1.png" -> "check" # [:-1] is to get "check" out of "check1" ) return sf
def symlink(src, dst): """ Extended "os.symlink" that: - Autodetect if target is directory. - Ignore error if file already exists. - Ensure to link to real absolute path of the source. Args: src (path-like object): Source path. dst (path-like object): Destination path. """ src = _realpath(_fsdecode(src)) try: _symlink(src, _fsdecode(dst), target_is_directory=_isdir(src)) except FileExistsError: pass
def run_command(): """Sign packages""" from argparse import ArgumentParser parser = ArgumentParser(prog='sign_packages', description='Sign RPM or DEB packages.') parser.add_argument('packages_dir', help='Input packages directory') parser.add_argument('--private_key', '-k', help='Private GPG key') parser.add_argument('--public_key', '-b', help='Public GPG key') parser.add_argument('--pass_phrase', '-p', help='GPG key pass phrase') parser.add_argument('--quiet', '-q', help='Disable verbosity', action='store_true') args = parser.parse_args() packages_dir = _realpath(args.packages_dir) if args.private_key is not None and not args.private_key: # Passed and empty value: command called from build script but # not private key is defined. In this case, signature is disabled. return for file in _listdir(packages_dir): ext = _splitext(file)[1].lower() if ext == '.rpm': sign = sign_rpm_packages break elif ext == '.deb': sign = sign_deb_packages break else: parser.exit(1, 'No package to sign.') return try: sign(packages_dir=packages_dir, private_key=args.private_key, public_key=args.public_key, pass_phrase=args.pass_phrase, quiet=args.quiet) except _CalledProcessError as exception: parser.exit(exception.returncode, exception.stdout) except RuntimeError as exception: parser.exit(1, str(exception)) if not args.quiet: parser.exit(message='Signature successful')
def yaml_read(path): """ Read a YAML file. Args: path (path-like object): Path to file to load. Returns: dict or list: Un-serialized content """ path = _realpath(_fsdecode(path)) with open(path, 'rt') as file: try: return _yaml_load(file, Loader=_Loader) except _YAMLError as exception: raise _ConfigurationException( f'Unable to read "{path}": {str(exception)}')
def json_read(path, **kwargs): """ Read a JSON file. Args: path (path-like object): Path to file to load. kwargs: "json.load" kwargs. Returns: dict or list: Un-serialized content """ path = _realpath(_fsdecode(path)) with open(path, 'rt') as file: try: return _json_load(file, **kwargs) except _JSONDecodeError as exception: raise _ConfigurationException( f'Unable to read "{path}": {str(exception)}')
def default_cb_fct (self, evt, usrdata): import os from os.path import realpath as _realpath from os.path import basename as _basename libname = evt.contents.fname step = evt.contents.step if libname is None: return 0 n = _basename (_realpath (libname)) pid = os.getpid() def vmem(): from os import sysconf PAGE_SIZE = sysconf ('SC_PAGE_SIZE') # in bytes from string import split as ssplit m = 0 from sys import platform with open('/proc/self/statm') as f: m = int(ssplit (f.readlines()[0])[0]) return m * PAGE_SIZE # in bytes if step == 0: self._data[pid][n] = [vmem(), None] else: data = self._data[pid][n] data[1] = vmem() vmem_start = data[0]/1024. vmem_stop = data[1]/1024. dvmem = vmem_stop - vmem_start self.msg.info ( "[%d] loading lib: vmem=(%10.1f + %10.1f) kb [%s]", pid, vmem_start, dvmem, n ) self.out.writerow ([pid, n, vmem_start, vmem_stop, dvmem]) #del self._data[pid][n] return 0
def __init__(self, fpga_slot_id=0, fpga_image=None, drm_ctrl_base_addr=0, log_dir='.', no_clear_fpga=False, **kwargs): self._fpga_slot_id = fpga_slot_id self._fpga_image = fpga_image self._drm_ctrl_base_addr = drm_ctrl_base_addr self._log_dir = _realpath(_fsdecode(log_dir)) for k, v in kwargs.items(): setattr(self, k, v) # FPGA read/write low level functions ans associated locks self._fpga_read_register = None self._fpga_write_register = None self._fpga_register_lock = self._get_lock() self._fpga_read_register_lock = self._fpga_register_lock self._fpga_write_register_lock = self._fpga_register_lock if not no_clear_fpga: self.clear_fpga() # Device and library handles self._fpga_handle = None self._fpga_library = self._get_driver() # Initialize FPGA if fpga_image: self.program_fpga(fpga_image) with self._augment_exception('initialize'): self._init_fpga() # Call backs self._read_register_callback = self._get_read_register_callback() self._write_register_callback = self._get_write_register_callback()
def _program_fpga(self, fpga_image): """ Program the FPGA with the specified image. Args: fpga_image (str): FPGA image. """ # Vitis does not reprogram a FPGA that has already the bitstream. # So to force it we write another bitstream first. clear_image = _join(SCRIPT_DIR, 'clear.awsxclbin') load_image = _run([ self._xbutil, 'program', '-d', str(self._fpga_slot_id), '-p', clear_image ], stderr=_STDOUT, stdout=_PIPE, universal_newlines=True, check=False) if load_image.returncode: raise RuntimeError(load_image.stdout) print('Cleared AWS XRT slot #%d' % self._fpga_slot_id) # Now load the real image fpga_image = _realpath(_fsdecode(fpga_image)) load_image = _run([ self._xbutil, 'program', '-d', str(self._fpga_slot_id), '-p', fpga_image ], stderr=_STDOUT, stdout=_PIPE, universal_newlines=True, check=False) if load_image.returncode: raise RuntimeError(load_image.stdout) print('Programmed AWS XRT slot #%d with FPGA image %s' % (self._fpga_slot_id, fpga_image))
def install_accelize_drm_library(packages_dir, quiet=False): """ Install Accelize DRM library packages. Args: packages_dir (str): Directory containing packages. quiet (bool): Hide packages manager output. """ # Get packages packages_dir = _realpath(packages_dir) packages = [ _join(packages_dir, package_file) for package_file in _listdir(packages_dir) if (_splitext(package_file)[-1].lower() in ('.deb', '.rpm') and '-dev' not in package_file)] # Run command if quiet: run_kwargs = dict(stdout=_PIPE, stderr=_PIPE, universal_newlines=True) else: run_kwargs = dict() _run(detect_package_manager() + ' '.join(packages), shell=True, **run_kwargs).check_returncode()
""" This library provides some DMR library specific CI/CD pipeline utilities. """ from os.path import ( basename as _basename, dirname as _dirname, realpath as _realpath, join as _join, ) DEPLOYMENT_DIR = _realpath(_dirname(__file__)) PROJECT_DIR = _dirname(DEPLOYMENT_DIR) def setvariable(name, value, is_output=True): """ Set a Azure pipeline variable. Args: name (str): Variable name. value (str): Variable value. is_output (bool): Make variable available to future jobs. """ print(f"##vso[task.setvariable variable={name}" f'{";isOutput=true" if is_output else ""}]{value}') def render_template(src, dst, show=True, **kwargs): """ Render a file from a template using Jinja2.
def open_html(f_name, method="r"): html = _join(_dirname(_realpath(__file__)), "html") test_dir(html) return open(_join(html, f_name), method)
def open_local(f_name, method="r"): try: return open(_join(_expanduser("~"), ".klb/" + f_name), method) except: folder = _dirname(_realpath(__file__)) return open(_join(folder, "blank_file"))
Requires XRT: https://github.com/Xilinx/XRT """ from ctypes import (cdll as _cdll, POINTER as _POINTER, c_char_p as _c_char_p, c_uint as _c_uint, c_uint64 as _c_uint64, c_int as _c_int, c_void_p as _c_void_p, c_size_t as _c_size_t) from os import environ as _environ, fsdecode as _fsdecode from os.path import isfile as _isfile, join as _join, realpath as _realpath, basename as _basename, dirname as _dirname from re import match as _match from subprocess import run as _run, PIPE as _PIPE, STDOUT as _STDOUT from threading import Lock as _Lock from tests.fpga_drivers import FpgaDriverBase as _FpgaDriverBase __all__ = ['FpgaDriver'] SCRIPT_DIR = _dirname(_realpath(__file__)) class XrtLock(): def __init__(self, driver): self.driver = driver # Define Lock function self.xcl_lock = driver._fpga_library.xclLockDevice self.xcl_lock.restype = _c_int self.xcl_lock.argtype = _c_void_p # Define Unlock function self.xcl_unlock = driver._fpga_library.xclLockDevice self.xcl_unlock.restype = _c_int self.xcl_unlock.argtype = _c_void_p def __enter__(self):
def open_local(f_name, method="r"): try: return open(_join(_expanduser("~"), ".klb/" + f_name),method) except: folder = _dirname(_realpath(__file__)) return open(_join(folder, "blank_file"))
from os.path import join as _path_join, dirname as _dirname, realpath as _realpath from os import system as _system, getcwd as _getcwd, remove as _remove from shutil import rmtree as _rmtree NAME = 'tosdb' VERSION = '0.9' DESCRIPTION = "Python Front-End / Wrapper for TOSDataBridge" AUTHOR = "Jonathon Ogden" AUTHOR_EMAIL = "*****@*****.**" PACKAGES = ['tosdb','tosdb/cli_scripts','tosdb/intervalize', 'tosdb/streaming'] _AUTO_EXT = '_tosdb' # everything should be relative to the python/setup.py _OUR_PATH = _dirname(_realpath(__file__)) _HEADER_NAME = 'tos_databridge.h' _HEADER_PATH = _path_join(_OUR_PATH, '..', 'include', _HEADER_NAME) _OUTPUT_PATH = _path_join(_OUR_PATH, NAME, _AUTO_EXT + '.py') if _OUR_PATH != _getcwd(): _sys.stderr.write("fatal: setup.py must be run from its own directory(python/)\n") exit(1) #string that should bookmark the topics in Topic_Enum_Wrapper::TOPICS<T> _MAGIC_TOPIC_STR = 'ksxaw9834hr84hf;esij?><' #regex for finding our header #define consts #TODO: adjust so we can pull non-ints _REGEX_HEADER_CONST = "#define[\s]+([\w]+)[\s]+.*?(-?[\d][\w]*)"
from re import match as _match, search as _search from time import asctime as _asctime from os.path import join as _path_join, dirname as _dirname, realpath as _realpath from os import system as _system, getcwd as _getcwd, remove as _remove from shutil import rmtree as _rmtree NAME = 'tosdb' VERSION = '0.8' DESCRIPTION = "Python Front-End / Wrapper for TOSDataBridge" AUTHOR = "Jonathon Ogden" AUTHOR_EMAIL = "*****@*****.**" PACKAGES = ['tosdb', 'tosdb/cli_scripts', 'tosdb/intervalize'] _AUTO_EXT = '_tosdb' # everything should be relative to the python/setup.py _OUR_PATH = _dirname(_realpath(__file__)) _HEADER_NAME = 'tos_databridge.h' _HEADER_PATH = _path_join(_OUR_PATH, '..', 'include', _HEADER_NAME) _OUTPUT_PATH = _path_join(_OUR_PATH, NAME, _AUTO_EXT + '.py') if _OUR_PATH != _getcwd(): _sys.stderr.write( "fatal: setup.py must be run from its own directory(python/)\n") exit(1) #string that should bookmark the topics in Topic_Enum_Wrapper::TOPICS<T> _MAGIC_TOPIC_STR = 'ksxaw9834hr84hf;esij?><' #regex for finding our header #define consts #TODO: adjust so we can pull non-ints _REGEX_HEADER_CONST = "#define[\s]+([\w]+)[\s]+.*?(-?[\d][\w]*)"