def __init__(self, keyspace_name, table_name, record_schema, cassandra_session, replication_strategy=None): title = '%s.__init__' % self.__class__.__name__ # construct fields model from jsonmodel.validators import jsonModel self.fields = jsonModel(self._class_fields) # validate inputs input_fields = { 'keyspace_name': keyspace_name, 'table_name': table_name, 'record_schema': record_schema, 'replication_strategy': replication_strategy } for key, value in input_fields.items(): if value: object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # validate cassandra session from sys import path as sys_path sys_path.append(sys_path.pop(0)) from cassandra.cluster import Session sys_path.insert(0, sys_path.pop()) if not isinstance(cassandra_session, Session): raise ValueError('%s(cassandra_session) must be a cassandra.cluster.Session datatype.' % title) self.session = cassandra_session
def submit(self, depend=None): self.reservations = ["host.processors=1+"] # add license registration for each in self.licenses: self.reservations.append("license.%s" % each) from sys import path path.insert(0, pipe.apps.qube().path("qube-core/local/pfx/qube/api/python/")) import qb # print dir(qb) self.qb = qb.Job() self.qb["name"] = self.name self.qb["prototype"] = "cmdrange" self.qb["package"] = { # 'cmdline' : 'echo "Frame QB_FRAME_NUMBER" ; echo $HOME ; echo $USER ; cd ; echo `pwd` ; sleep 10', "cmdline": self.cmd, "padding": self.pad, "range": str(self.range())[1:-1], } self.qb["agenda"] = qb.genframes(self.qb["package"]["range"]) self.qb["cpus"] = self.cpus self.qb["hosts"] = "" self.qb["groups"] = self.group self.qb["cluster"] = "/pipe" self.qb["priority"] = self.priority self.qb["reservations"] = ",".join(self.reservations) self.qb["retrywork"] = self.retry self.qb["retrywork_delay"] = 15 self.qb["retrysubjob"] = self.retry self.qb["flags"] = "auto_wrangling,migrate_on_frame_retry" return qb.submit(self.qb)[0]["id"]
def test(user, data): path.insert(0, "src") from utils import * from globals import * i = 0 (conn, cursor) = getDbCursor(getUserDb(user)) print data while True: if str(i) not in data: break if i == 0 and data[str(i)].lower() == "yes": cursor.execute('INSERT into SPECIAL_FLAG values("%s", "%s");' % (user, SF_PROGRAMMED)) elif i == 1 and data[str(i)].lower() == "yes": cursor.execute('INSERT into SPECIAL_FLAG values("%s", "%s");' % (user, SF_SCHEME)) elif i == 2 and data[str(i)].lower() == "yes": cursor.execute('INSERT into SPECIAL_FLAG values("%s", "%s");' % (user, SF_RECURSION)) i += 1 conn.commit() cursor.close()
def _install_updates(self): updated_modules = 0 if path.exists(self.UPDATE_DIR): sys_path.insert(0, self.UPDATE_DIR) for m in self.UPDATE_MODULES: modulefile = path.join(self.UPDATE_DIR, "%s%spy" % (m.__name__, extsep)) if path.exists(modulefile): v_dict = {"VERSION": -1} with open(modulefile) as f: for line in f: if line.startswith("VERSION"): exec line in v_dict break if v_dict["VERSION"] > m.VERSION: pass # logging.info("Reloading Module '%s', current version number: %d, new version number: %d" % (m.__name__, v_dict['VERSION'], m.VERSION)) reload(m) updated_modules += 1 else: logging.info( "Not reloading Module '%s', current version number: %d, version number of update file: %d" % (m.__name__, v_dict["VERSION"], m.VERSION) ) else: logging.info( "Skipping nonexistant update from" + path.join(self.UPDATE_DIR, "%s%spy" % (m.__name__, extsep)) ) return updated_modules
def extension(buildout): def setup(name, *args, **kw): buildout["buildout"].setdefault("package-name", name) # monkey-patch `setuptools.setup` with the above... import setuptools original = setuptools.setup setuptools.setup = setup # now try to import `setup.py` from the current directory, extract # the package name using the helper above and set `package-name` # in the buildout configuration... here = abspath(curdir) path.insert(0, here) import setup # mention `setup` again to make pyflakes happy... :p setup # reset `sys.path` and undo the above monkey-patch path.remove(here) setuptools.setup = original # if there's a `setup.py` in the current directory # we also want to develop this egg... # print buildout['buildout'] buildout["buildout"].setdefault("develop", ".") return buildout
def _install_updates(self): """ Installs updated modules. Checks the updates directory for new versions of one of the modules listed in self.UPDATE_MODULES and reloads the modules. Version check is performed by comparing the VERSION variable stored in the module. """ updated_modules = 0 if path.exists(self.UPDATE_DIR): sys_path.insert(0, self.UPDATE_DIR) for m in self.UPDATE_MODULES: modulefile = path.join(self.UPDATE_DIR, "%s%spy" % (m.__name__, extsep)) if path.exists(modulefile): v_dict = {'VERSION': -1} with open(modulefile) as f: for line in f: if line.startswith('VERSION'): exec line in v_dict break else: logger.error("Could not find VERSION string in file %s!" % modulefile) continue if v_dict['VERSION'] > m.VERSION: logging.info("Reloading Module '%s', current version number: %d, new version number: %d" % (m.__name__, v_dict['VERSION'], m.VERSION)) reload(m) if m == cachedownloader and self.cachedownloader != None: self.__install_cachedownloader() updated_modules += 1 else: logging.info("Not reloading Module '%s', current version number: %d, version number of update file: %d" % (m.__name__, m.VERSION, v_dict['VERSION'])) else: logging.info("Skipping nonexistant update from" + path.join(self.UPDATE_DIR, "%s%spy" % (m.__name__, extsep))) return updated_modules
def __init__(self, len_argv=len(argv)): path.insert(0, os.path.join('static', 'modules')) if len_argv > 1: def few_arg_test(func, num1, num2): if len_argv > num1: start_module_function = getattr(import_module('functions'), func) start_module_function(icy) else: raise SystemExit('\nWARNING "{0}" requires {1} arguments,'\ '\nyou have given me {2} !\n'.format(lenny, num2, len(icy))) dash_clash = lambda crash: '{0} -{0} --{0} {0[0]} -{0[0]} --{0[0]}'\ .format(crash).split() lenny = argv[1] icy = argv[2:] if lenny in dash_clash('replace'): few_arg_test('replace_str', 3, 2) if lenny in dash_clash('new'): few_arg_test('create_new_post', 2, 1) if lenny in dash_clash('format'): few_arg_test('format_post', 2, 1) if lenny in dash_clash('optimize'): few_arg_test('optimize_modules', 1, 0) else: from blogfy import GenerateBlog GenerateBlog()
def notify(self, clusterNode) : ''' TBD ''' from lib.api.api_service_client import APIClient, makeTimestamp for hostNode in clusterNode: ''' Available hostNode keys: gmond_started, name, tags, ip, tmax, tn, reported, location, dmax ''' # Cache object attributes using an ordered dictionary path.insert(0, self.path) from lib.api.api_service_client import * try : vm, obj = self.find_object(hostNode.getAttr('ip'), self, hostNode.getAttr('name')) except APIException, obj : logging.error("Problem with API connectivity: " + str(obj)) continue except Exception, obj2 : logging.error("Problem with API object lookup: " + str(obj2)) continue
def __init__(self, path=None): self.__dict__ = self.__state if not path: if not self.path: raise KitError('No path specified') else: path = abspath(path) if self.path and path != self.path: raise KitError('Invalid path specified: %r' % path) elif not self.path: self.path = path with open(path) as handle: self.config = load(handle) if self.root not in sys_path: sys_path.insert(0, self.root) for module in self._modules: __import__(module) # Session removal handlers task_postrun.connect(_remove_session) request_tearing_down.connect(_remove_session)
def determineT(): def pystones(*args, **kwargs): from warnings import warn warn("Python module 'test.pystone' not available. Will assume T=1.0") return 1.0, "ignored" pystonesValueFile = expanduser('~/.seecr-test-pystones') if isfile(pystonesValueFile): age = time() - stat(pystonesValueFile).st_mtime if age < 12 * 60 * 60: return float(open(pystonesValueFile).read()) temppath = [] while len(path) > 0: try: if 'test' in sys.modules: del sys.modules['test'] from test.pystone import pystones break except ImportError: temppath.append(path[0]) del path[0] for temp in reversed(temppath): path.insert(0, temp) del temppath T, p = pystones(loops=50000) try: with open(pystonesValueFile, 'w') as f: f.write(str(T)) except IOError: pass return T
def abrir_factura(self, tv, path, view_column): model = tv.get_model() puid = model[path][-1] if puid: objeto = pclases.getObjetoPUID(puid) if isinstance(objeto, pclases.FacturaVenta): fra = objeto try: import facturas_venta except ImportError: from os.path import join as pathjoin from sys import path path.insert(0, pathjoin("..", "formularios")) import facturas_venta ventana = facturas_venta.FacturasVenta(fra, self.usuario) elif isinstance(objeto, pclases.Cliente): cliente = objeto try: import clientes except ImportError: from os.path import join as pathjoin from sys import path path.insert(0, pathjoin("..", "formularios")) import clientes ventana_clientes = clientes.Clientes(cliente, self.usuario)
def _load(self, project): Repository._load(self, project) ppath = project.config.get(self.name, 'python-path') if ppath: from sys import path if ppath not in path: path.insert(0, ppath)
def __call__(self, parser, args, values, option = None): args.profile = values global profile # append profiles subdir to the path variable, so we can import dynamically the profile file path.insert(0, "./profiles") # import a module with a dynamic name profile = __import__(args.profile)
def loadObject( self, obj_type, obj_name, build_var ): path.insert( 1, self.garden_dir + obj_type.lower() + "s" ) obj_name = obj_type + obj_name __import__( obj_name ) if build_var: obj = getattr( sys.modules[ "%s" % obj_name ], "%s" % obj_name )( build_var ) else: obj = getattr( sys.modules[ "%s" % obj_name ], "%s" % obj_name )() return obj
def test_rewrite_pyc_check_code_name(self): # This one is adapted from cpython's Lib/test/test_import.py from os import chmod from os.path import join from sys import modules, path from shutil import rmtree from tempfile import mkdtemp code = """if 1: import sys code_filename = sys._getframe().f_code.co_filename module_filename = __file__ constant = 1 def func(): pass func_filename = func.func_code.co_filename """ module_name = "unlikely_module_name" dir_name = mkdtemp(prefix="pypy_test") file_name = join(dir_name, module_name + ".py") with open(file_name, "wb") as f: f.write(code) compiled_name = file_name + ("c" if __debug__ else "o") chmod(file_name, 0777) # Setup sys_path = path[:] orig_module = modules.pop(module_name, None) assert modules.get(module_name) == None path.insert(0, dir_name) # Test import py_compile py_compile.compile(file_name, dfile="another_module.py") __import__(module_name, globals(), locals()) mod = modules.get(module_name) try: # Ensure proper results assert mod != orig_module assert mod.module_filename == compiled_name assert mod.code_filename == file_name assert mod.func_filename == file_name finally: # TearDown path[:] = sys_path if orig_module is not None: modules[module_name] = orig_module else: try: del modules[module_name] except KeyError: pass rmtree(dir_name, True)
def update_python_environment_with(module_path): from os.path import dirname module_folder = dirname(module_path) from sys import path if not (module_folder in path): path.insert(0, module_folder) from os import environ, pathsep, putenv python_path = pathsep.join(path) environ["PYTHONPATH"] = python_path putenv("PYTHONPATH", python_path) return
def __init__(self, out): self.out = out self.built = set() path.insert(0, os.path.join(os.path.dirname(__file__), 'python.js')) self.blacklist = ( 'os.path', 'main', 'os', 'array', 'importlib', 'pkgutil', 'collections', 'tokenize', 'argparse', 'wsgiref.simple_server', 'time', 'types', 'copy' )
def run(p): global app, render parent_folder = abspath('..') if parent_folder not in path: path.insert(0, parent_folder) app = web.application(urls, globals(), True) render = web.template.render(join(p, 'templates/')) app.run()
def getPluginHelp( data ): plugin, params = data.module[0], data.module[1:] logger.info('Loading "%s" module help' % plugin) from sys import path path.insert(0, "./modules") plugin_name = "plugin_"+plugin plugin = __import__(plugin_name, globals(), locals(), ['object'], -1) try: plugin.help(params) except AttributeError as err: logger.error('\nIt is not possible to show help for "%s" module. ERROR:\n\t%s\n' % (plugin_name, err) )
def classFactory(class_name): class_lower_name = class_name.lower() if class_name in SUBTITLE_SITE_LIST: engines_path = os.path.dirname(__file__) if engines_path not in sys_path: sys_path.insert(0, engines_path) subtitle_module = import_module(class_lower_name) subtitle_class = getattr(subtitle_module, class_name) return subtitle_class()
def evalQuiz(user, data): f = open(join(quiz_requests, '%s.quiz-%s.%d' % (user, data['page'].split('-')[-1], int(time() * 1000))), 'w') f.write(str(data)) f.close() path.insert(0, join(course_material, str(data['page']))) #test_mod = __import__(join(course_material, str(data['page']), 'test.py')) import test as test_mod test_mod.test(user, data) del test_mod
def loads_json(self, json_str): '将json的字符串,解析为列表' try: from json import loads except ImportError: '''由于py2.6以上才内置了JSON模块, 故如果没有内置则采用第三方模块: Contact mailto:[email protected] ''' path.insert(0, self.dcc_def.const.PLUS_PATH) from minjson import read as loads return loads(json_str)
def dumps_json(self, obj): '将列表,解析为json的字符串' try: from json import dumps except ImportError: '''由于py2.6以上才内置了JSON模块, 故如果没有内置则采用第三方模块: Contact mailto:[email protected] ''' path.insert(0, self.dcc_def.const.PLUS_PATH) from minjson import write as dumps return dumps(obj)
def __addPluginsToPath(self, directory): """Add the plugins directory to the path. * directory -- The plugins directory """ baseDirectory = split(directory)[0] # Only add the base directory (directory which contains the plugins # directory) to the path once if baseDirectory not in path: path.insert(0, baseDirectory) import plugins
def boot(): """Configure application startup flow""" from os import path from os.path import dirname as d, join as j, abspath as a from sys import path as p if p[0] == d(__file__): del p[0] p.insert(0, a(j(d(__file__), '..'))) import deployer
def application(environ, start_response): if "VIRTUALENV_PATH" in environ: path.insert(0, environ["VIRTUALENV_PATH"]) for key in environment_settings: if key in environ: os.environ[key] = str(environ[key]) import django.core.handlers.wsgi _application = django.core.handlers.wsgi.WSGIHandler() return _application(environ, start_response)
def addPaths(): myPaths = ['C:\\Workspace\\Chassis.git', 'C:\\Workspace\\Chassis.git\\tests', 'C:\\Workspace\\Chassis.git\\examples'] for myPath in myPaths: for pyPath in path: exists = False print(pyPath, myPath) if pyPath == myPath: exists = True break if not exists: path.insert(1, myPath)
def __init__(self,File): import datetime import configobj from CCSUtils import SimpleLogger from CCSUtils import send_mail from sys import path from sys import exit import imp self.__email_subject = "" self.__email_body = "" self.StartTime = datetime.datetime.now() self.EndTimie = datetime.datetime.now() # Load application main config file appConfig = configobj.ConfigObj("/etc/CCSBackupEngine/app.conf")['main'] # Install application Logger appLogger = SimpleLogger() self.L =appLogger.append self.D = appLogger.DEBUG try : if len(appConfig['LogLevel']) > 1 and len(appConfig['LogFile']) > 1 : appLogger.setup(appConfig('LogLevel'),appConfig('LogFile')) elif len(appConfig.get('LogFile')) > 1 : appLogger.setup(appLogger.WARNING, appConfig('LogFile')) else : appLogger.setup() except : print "Could n`t parse app.conf LogLevel or LogFile values\n" exit(-1) self.L("Loading instance configuration file {0} : ".format(File), self.D) Iconf = configobj.ConfigObj(File) # Get backup type self.L("Checking backup type driver.\n", self.D) if Iconf['type'] not in appConfig['AllowedBackupTypes'] : self.L("Unkown backup type : {0} ,\n Supported backup types are {1}\n Exit..\n".format(Iconf['type'], appConfig['AllowedBackupTypes']), appLogger.ERROR) exit(-1) L("Load Driver of this module if exist") try : path.insert(0,"/etc/CCSBackupEngine/Drivers/") except : self.L("Could not insert driver path /etc/CCSBackupEngine/Drivers", appLogger.ERROR) exit(-2) L("Loading driver file TD(Sub)") Backup = __import__(Iconf['type']) #T.D Add Sanity check for the imported module. modConfig = configobj.ConfigObj("/etc/CCSBackupEngine/app.conf")[Iconf['type']] self._backupAgent = Backup.Backup(modConfig) if not self._checkConfig(Iconf) : self.L("Failed while executing driver checkConfig()") ; #exit(-3)
def desiutil_module(command, *arguments): """Call the Modules command. Parameters ---------- command : :class:`str` Command passed to the base module command. arguments : :class:`list` Arguments passed to the module command. Returns ------- None Notes ----- The base module function does not update :data:`sys.path` to reflect any additional directories added to :envvar:`PYTHONPATH`. The wrapper function takes care of that (and uses set theory!). This module also avoids potential Python 3 conflicts. """ import os import subprocess from sys import path try: old_python_path = set(os.environ['PYTHONPATH'].split(':')) except KeyError: old_python_path = set() cmd = modulecmd + [command] + list(arguments) p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() status = p.returncode # exec out in globals(), locals() exec(out, globals(), locals()) try: new_python_path = set(os.environ['PYTHONPATH'].split(':')) except KeyError: new_python_path = set() add_path = new_python_path - old_python_path for p in add_path: path.insert(int(path[0] == ''), p) return
def __load( self, type, name, args = None, callable = False ): if '/' in name: folder = name[ 0 : name.find('/') ] name = name[ name.find('/') + 1 : len( name ) ] path.insert( 1, self.app_dir + type.lower() + 's/' + folder ) else: path.insert( 1, self.app_dir + type.lower() + 's' ) item_name = type + name __import__( item_name ) if args: item = getattr( sys.modules[ "%s" % item_name ], "%s" % item_name )( args ) else: if callable: item = getattr( sys.modules[ "%s" % item_name ], "%s" % item_name ) else: item = getattr( sys.modules[ "%s" % item_name ], "%s" % item_name )( ) return item
#!/usr/bin/env python from datetime import time from os.path import abspath, dirname, join from random import choice, uniform from sys import maxsize from sys import path from unittest import main, TestCase from pyvtt import WebVTTTime, InvalidTimeString path.insert(0, abspath(join(dirname(__file__), '..'))) class TestSimpleTime(TestCase): def setUp(self): self.time = WebVTTTime() def test_default_value(self): self.assertEqual(self.time.ordinal, 0) def test_micro_seconds(self): self.time.milliseconds = 1 self.assertEqual(self.time.milliseconds, 1) self.time.hours += 42 self.assertEqual(self.time.milliseconds, 1) self.time.milliseconds += 1000 self.assertEqual(self.time.seconds, 1) def test_seconds(self): self.time.seconds = 1 self.assertEqual(self.time.seconds, 1)
Analysis for Huginn (mAEWing2) FLT XX """ #%% # Import Libraries import os import numpy as np import matplotlib.pyplot as plt # Hack to allow loading the Core package if __name__ == "__main__" and __package__ is None: from sys import path, argv from os.path import dirname, abspath, join path.insert(0, abspath(join(dirname(argv[0]), ".."))) path.insert(0, abspath(join(dirname(argv[0]), "..", 'Core'))) del path, argv, dirname, abspath, join from Core import Loader from Core import OpenData # Constants hz2rps = 2 * np.pi rps2hz = 1 / hz2rps pathBase = os.path.join('/home', 'rega0051', 'FlightArchive') ac = 'Huginn' flt = 'FLT03'
r""" An experimental "on-line" policy, using algorithms from black-box Bayesian optimization, using [scikit-optimize](https://scikit-optimize.github.io/). - It uses an iterative black-box Bayesian optimizer, with two methods :meth:`ask` and :meth:`tell` to be used as :meth:`choice` and :meth:`getReward` for our Multi-Armed Bandit optimization environment. - See https://scikit-optimize.github.io/notebooks/ask-and-tell.html for more details. .. warning:: This is still **experimental**! It is NOT efficient in terms of storage, and **highly** NOT efficient either in terms of efficiency against a Bandit problem (i.e., regret, best arm identification etc). """ from __future__ import division, print_function # Python 2 compatibility __author__ = "Lilian Besson" __version__ = "0.9" # WARNING: this is a HUGE hack to fix a mystery bug on importing this policy from sys import path from os.path import dirname path.insert(0, '/'.join(dirname(__file__).split('/')[:-1])) import numpy as np # Ignore the UserWarning skopt/optimizer/optimizer.py:208: # UserWarning: The objective has been evaluated at this point before. from warnings import filterwarnings # simplefilter("ignore", UserWarning) filterwarnings("ignore", message="The objective has been evaluated at this point before", category=UserWarning) # Cf. https://scikit-optimize.github.io/ try: import skopt.learning from skopt import Optimizer except ImportError as e:
import ROOT from os import path as os_path from sys import path as sys_path from array import array Curr_DIR = os_path.expandvars('$EOPPlottingDir') sys_path.insert(1, Curr_DIR) from PlottingTools.HistogramManager import HistogramManager import argparse parser = argparse.ArgumentParser(description='Create a root file that has a histogram required to reweight events') parser.add_argument('--file', '-f', dest="file", type=str, required=True, help='Create a histogram required for reweighting') parser.add_argument('--selectionName', '-selname', dest="selname", type=str, required=True, help='The name of the selection') parser.add_argument('--histogramName', '-hm', dest="hist_name", type=str, required=True, help='The name of the histogram to use for reweighting') parser.add_argument('--outputFile', '-on', dest="output_name", type=str, required=True, help='the name of the output file. the tree inside will have the same name') args = parser.parse_args() filename = args.file selname = args.selname output_name = args.output_name hist_name = args.hist_name hm = HistogramManager(filename) f=ROOT.TFile(filename, "READ") tree = f.Get(selname + "BinningTree") for bins in tree: break eta_bins_low = getattr(bins, selname+"EtaBinsLow")
from robot_interface import Robot from encoder_interface import Encoders from sys import path path.insert(0, '../Algo_conditions') from single_nothing import Nothing from triple_max_rotational import TP_Predict class Algorithm(Robot, Encoders): def __init__(self, BigEncoder, SmallEncoders, values, positions, ALProxy, period): Encoders.__init__(self, BigEncoder, SmallEncoders, small_encoders_required=False) Robot.__init__(self, values, positions, ALProxy, masses=False, acc_required=False, gyro_required=False) self.order = [{ 'algo': Nothing, 'duration': 20 }, { 'algo': TP_Predict, 'duration': 600
def __init__(self, logger, input_file=None, args=None): self.export_format = 'pdf' if input_file: self.input_file = input_file self.input_no_ext = os.path.splitext(input_file)[0] # # As soon as we init pcbnew the following files are modified: # if os.path.isfile(self.input_no_ext+'.pro'): self.start_pro_stat = os.stat(self.input_no_ext+'.pro') else: self.start_pro_stat = None if os.path.isfile(self.input_no_ext+'.kicad_pro'): self.start_kicad_pro_stat = os.stat(self.input_no_ext+'.kicad_pro') else: self.start_kicad_pro_stat = None if os.path.isfile(self.input_no_ext+'.kicad_prl'): self.start_kicad_prl_stat = os.stat(self.input_no_ext+'.kicad_prl') else: self.start_kicad_prl_stat = None if args: # Session debug self.use_wm = args.use_wm # Use a Window Manager, dialogs behaves in a different way self.start_x11vnc = args.start_x11vnc self.rec_width = args.rec_width self.rec_height = args.rec_height self.record = args.record self.video_dir = args.output_dir self.wait_for_key = args.wait_key # Others if hasattr(args, 'file_format'): self.export_format = args.file_format.lower() else: # Session debug self.use_wm = False self.start_x11vnc = False self.rec_width = REC_W self.rec_height = REC_H self.record = False self.video_dir = None self.wait_for_key = False self.colordepth = 24 self.video_name = None # Executable and dirs self.eeschema = 'eeschema' self.pcbnew = 'pcbnew' self.kicad_conf_dir = 'kicad' ng_ver = os.environ.get('KIAUS_USE_NIGHTLY') if ng_ver: self.eeschema += '-'+NIGHTLY self.pcbnew += '-'+NIGHTLY self.kicad_conf_dir += os.path.join(NIGHTLY, ng_ver) # Path to the Python module path.insert(0, '/usr/lib/kicad-nightly/lib/python3/dist-packages') # Detect KiCad version try: import pcbnew except ImportError: logger.error("Failed to import pcbnew Python module." " Is KiCad installed?" " Do you need to add it to PYTHONPATH?") exit(NO_PCBNEW_MODULE) m = re.match(r'(\d+)\.(\d+)\.(\d+)', pcbnew.GetBuildVersion()) self.kicad_version_major = int(m.group(1)) self.kicad_version_minor = int(m.group(2)) self.kicad_version_patch = int(m.group(3)) self.kicad_version = self.kicad_version_major*1000000+self.kicad_version_minor*1000+self.kicad_version_patch logger.debug('Detected KiCad v{}.{}.{} ({})'.format(self.kicad_version_major, self.kicad_version_minor, self.kicad_version_patch, self.kicad_version)) # Config file names self.kicad_conf_path = os.path.join(os.environ['HOME'], '.config/'+self.kicad_conf_dir) # - eeschema config self.conf_eeschema = os.path.join(self.kicad_conf_path, 'eeschema') self.conf_eeschema_bkp = None # - pcbnew config self.conf_pcbnew = os.path.join(self.kicad_conf_path, 'pcbnew') self.conf_pcbnew_bkp = None # - kicad config self.conf_kicad = os.path.join(self.kicad_conf_path, 'kicad_common') self.conf_kicad_bkp = None # Config files that migrated to JSON # Note that they remain in the old format until saved if self.kicad_version >= KICAD_VERSION_5_99: self.conf_eeschema += '.json' self.conf_pcbnew += '.json' self.conf_kicad += '.json' self.conf_kicad_json = True self.conf_eeschema_json = True self.conf_pcbnew_json = True self.pro_ext = 'kicad_pro' self.prl_ext = 'kicad_prl' else: self.conf_kicad_json = False self.conf_eeschema_json = False self.conf_pcbnew_json = False self.pro_ext = 'pro' self.prl_ext = None # - hotkeys self.conf_hotkeys = os.path.join(self.kicad_conf_path, 'user.hotkeys') self.conf_hotkeys_bkp = None # - sym-lib-table self.user_sym_lib_table = os.path.join(self.kicad_conf_path, 'sym-lib-table') self.user_fp_lib_table = os.path.join(self.kicad_conf_path, 'fp-lib-table') self.sys_sym_lib_table = [KICAD_SHARE+'template/sym-lib-table'] self.sys_fp_lib_table = [KICAD_SHARE+'template/fp-lib-table'] if ng_ver: # 20200912: sym-lib-table is missing self.sys_sym_lib_table.insert(0, KICAD_NIGHTLY_SHARE+'template/sym-lib-table') self.sys_fp_lib_table.insert(0, KICAD_NIGHTLY_SHARE+'template/fp-lib-table') # Some details about the UI if self.kicad_version >= KICAD_VERSION_5_99: # KiCad 5.99.0 self.ee_window_title = r'\[.*\] — Eeschema$' # "PROJECT [HIERARCHY_PATH] - Eeschema" else: # KiCad 5.1.6 self.ee_window_title = r'Eeschema.*\.sch' # "Eeschema - file.sch" # Collected errors and unconnecteds (warnings) self.errs = [] self.wrns = [] # Error filters self.err_filters = []
from os.path import abspath, join, dirname from sys.path import insert insert(0, abspath(join(dirname(__file__), '../pypevue/')))
from sys import path from os.path import join, exists, dirname path.insert(0, join(dirname(__file__), '..')) import holyGrail import pytest import numpy as np np.random.seed(42) @pytest.mark.parametrize("elements,thesum", [([1, 2, 3], 6), (['a', 'b', 'c'], 'abc')]) def test_sum(elements, thesum): assert holyGrail.sum(elements) == thesum @pytest.mark.parametrize( "elements,themean,tol", [([1, 2], 1.5, 1e-6), ((1e2 + 1.0 * np.random.randn(1000)).astype(np.float16), 1e2, 0.1)]) def test_mean(elements, themean, tol): ans = holyGrail.mean(elements) assert np.abs(ans - themean) < tol, ans
'''not used but later if we want things more organized''' from sys import path path.insert(-1, "./baseline_distances") path.insert(-1, "./preprocess") path.insert(-1, "./SVM") path.insert(-1, "./utils")
from sys import path path.insert(0, '/git/master/Ferramentas/AWS') import create_ec2 import terminate_ec2 import execute_command from airflow import DAG from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import datetime, timedelta import configuracao as conf default_args = { 'owner': 'airflow', 'depends_on_past': False, 'start_date': datetime(2020, 2, 3, 17, 00), # format -> YYYY-M-D HH:MM 'retries': 0 } dag = DAG( 'DAGExample', default_args=default_args, catchup=False, schedule_interval='*/15 14-17 * * *', ) tagname = 'ETL_Example' def verifyS3Func(*args, **kwargs): s3_updated = ons_files.verify_db('TESTE') if s3_updated == False:
from io import StringIO from pathlib import Path from sys import argv, path, stderr def die(limit=None): import traceback o = StringIO() o.write('\n') traceback.print_exc(limit=limit, file=o, chain=False) print(o.getvalue()[:-1], end='', file=stderr) exit(1) LAST_RESULT = Path('~/.cache/p_last.pkl').expanduser() path.insert(0, '') try: ast = parse(argv[1], mode='single').body except SyntaxError: die(limit=0) g = {'Path': Path} if LAST_RESULT.is_file(): with LAST_RESULT.open('rb') as f: with suppress(EOFError): g['_'] = pickle.load(f) o = StringIO() out = '' with redirect_stdout(o): try:
from sys import path path.insert(0, "../Utility") from utility_functions import last_maxima, last_zero_crossing, moving_average, sign_zero import numpy as np class TripleIncrease(): def __init__(self, values, all_data, **kwargs): self.start_time = values['time'] self.previous_time = values['time'] self.switch_time = 100 self.offset = -0.21 self.period = kwargs.get('period', 0.005) self.last_maximum = last_maxima(all_data['time'], all_data['be'], time_values='values', dt=self.period) self.duration = kwargs.get('duration', float('inf')) self.previous_be = values['be'] def algo(self,values,all_data): if sign_zero(values['be']) != sign_zero(self.previous_be): self.min_time = last_zero_crossing(values, self.previous_time, self.previous_be) self.max_time, self.last_maximum = last_maxima(all_data['time'], all_data['be'], time_values='both', dt=self.period) # quarter period difference between time at maxima and minima self.quart_period = np.abs(self.min_time - self.max_time) # set time for position to switch self.switch_time = self.min_time + self.quart_period + self.offset self.previous_be = values['be']
from platform import machine from unittest import main, TestCase from subprocess import Popen from struct import pack from os.path import split, join, splitext test_dir = split(__file__)[0] parent_dir = split(test_dir)[0] # adjust PYTHONPATH to import pyrsp from sys import path as PYTHONPATH PYTHONPATH.insert(0, parent_dir) from pyrsp.rsp import archmap from pyrsp.utils import find_free_port, wait_for_tcp_port, QMP def run(*args, **kw): return Popen([a for a in args if a], **kw).wait() class GCCBuilder(object): # SRC - path to source file, must be specified by child classes # EXE - path to executable file with debug info for both # RSP server & client, defined by that helper DEFS = {} LIBS = [] EXTRA_CFLAGS = "" GCC_PREFIX = ""
# -*- coding: utf-8 -*- # generated from catkin/cmake/template/__init__.py.in # keep symbol table as clean as possible by deleting all unnecessary symbols from os import path as os_path from sys import path as sys_path from pkgutil import extend_path __extended_path = "/home/ros/catkin_ws/src/joystick_drivers/wiimote/src".split( ";") for p in reversed(__extended_path): sys_path.insert(0, p) del p del sys_path __path__ = extend_path(__path__, __name__) del extend_path __execfiles = [] for p in __extended_path: src_init_file = os_path.join(p, __name__ + '.py') if os_path.isfile(src_init_file): __execfiles.append(src_init_file) else: src_init_file = os_path.join(p, __name__, '__init__.py') if os_path.isfile(src_init_file): __execfiles.append(src_init_file) del src_init_file del p del os_path
from sys import path, argv path.insert(0, '..') from core import initialize from core.vi.lister import Lister from core.application import APPLICATION as APP, start from core.vi.value import Value def tick(status): from time import sleep status["description"] = "Some meaningful text" for p in range(100): sleep(.1) status["part"] = p * .01 if status.get("stop"): break status["complete"] = True def test(): styles = {"one": (None, "red"), "two": ("blue", None)} val = Value(list) val.update([ tuple(map(str, range(i, i + 3))) + ((None, "one", "two")[i % 3], ) for i in range(1, 100) ]) APP.runtime_data["MainWindow"] = p = \ Lister("XRCEA", [("A", ("a", "b", "c"))], [val], styles) p.show() p.bg_process(tick) p.print_information("test done")
# for localization if environ.get("STREAMLINK_USE_PYCOUNTRY"): deps.append("pycountry") else: deps.append("iso-639") deps.append("iso3166") # When we build an egg for the Win32 bootstrap we don"t want dependency # information built into it. if environ.get("NO_DEPS"): deps = [] this_directory = path.abspath(path.dirname(__file__)) srcdir = path.join(this_directory, "src/") sys_path.insert(0, srcdir) with codecs.open(path.join(this_directory, "README.md"), 'r', "utf8") as f: long_description = f.read() def is_wheel_for_windows(): if "bdist_wheel" in argv: names = ["win32", "win-amd64", "cygwin"] length = len(argv) for pos in range(argv.index("bdist_wheel") + 1, length): if argv[pos] == "--plat-name" and pos + 1 < length: return argv[pos + 1] in names elif argv[pos][:12] == "--plat-name=": return argv[pos][12:] in names return False
from sys import path from importlib import import_module from telethon.errors.rpcerrorlist import PhoneNumberInvalidError from pagermaid import bot, logs, working_dir from pagermaid.modules import module_list, plugin_list try: from pagermaid.interface import server except TypeError: logs.error("出错了呜呜呜 ~ Web 界面配置绑定到了一个无效地址。") server = None except KeyError: logs.error("出错了呜呜呜 ~ 配置文件中缺少 Web 界面配置。") server = None path.insert(1, f"{working_dir}/plugins") try: bot.start() except PhoneNumberInvalidError: print('出错了呜呜呜 ~ 输入的电话号码无效。 请确保附加国家代码。') exit(1) for module_name in module_list: try: import_module("pagermaid.modules." + module_name) except BaseException as exception: logs.info(f"模块 {module_name} 加载出错: {type(exception)}: {exception}") for plugin_name in plugin_list: try: import_module("plugins." + plugin_name) except BaseException as exception:
try: DEFAULT_ENCODING # except: from locale import getpreferredencoding DEFAULT_ENCODING: str = getpreferredencoding(do_setlocale=True) or "utf-8" # put a name here to ignore the default ... module_name: str = "" # the default package name is the name of the parent folder ... if not module_name: module_name = Path(__file__).resolve().parent.name here = Path(__file__).resolve().parent if here not in PYTHONPATH: PYTHONPATH.insert(0, here) def table_print(data: (Dict, Sequence), **kwargs): tmp: List = [] if isinstance(data, dict): tmp.extend( [f"{str(k):<15.15} : {repr(v):<45.45}" for k, v in data.items()]) elif isinstance(data, (list, tuple, set)): for x in data: try: tmp.append(f"{str(x):<15.15} : {repr(f'{x}'):<45.45}") except: tmp.append(f"{str(x)}") else: raise TypeError('Parameter must be an iterable Mapping or Sequence.')
# # Copyright (c) 2017-2019 Ian Burgwin # This file is licensed under The MIT License (MIT). # You can find the full license text in LICENSE.md in the root of this project. from sys import argv, exit, path from os.path import dirname, realpath # noinspection PyUnresolvedReferences,PyProtectedMember def _(): # lazy way to get PyInstaller to detect the libraries, since this won't run at runtime import _gui import fmt_detect import reg_shell from mount import _common, cci, cdn, cia, exefs, nandctr, nandhac, nandtwl, ncch, romfs, sd, srl, threedsx, titledir from pyctr.types import crypto, exefs, ncch, romfs, smdh, tmd, util path.insert(0, dirname(realpath(__file__))) if len(argv) < 2 or argv[1] in {'gui', 'gui_i_want_to_be_an_admin_pls'}: from _gui import main admin = False if len(argv) > 1: admin = argv.pop(1) == 'gui_i_want_to_be_an_admin_pls' exit(main(_pyi=True, _allow_admin=admin)) else: from main import mount exit(mount(argv.pop(1).lower()))
# 2016-01-09 wm Playing with data # ################################################################################ """ from __future__ import print_function DEBUG = False __all__ = [] __version__ = "0.0.2" __date__ = '2016-01-02' __updated__ = '2016-01-09' from sys import path as sys_path sys_path.insert(0, './mlpipes') sys_path.insert(0, './Pipe') import pipe as P def work(): from h5pipes import h5open from pypipes import getitem, as_key from nppipes import as_array from skll import kappa data = (('raw-data.h5', ) | h5open | as_key('file') | as_key(
from copy import copy from pathlib import Path from sys import path import matplotlib.pyplot as plt import numpy as np path.insert(0, str(Path(__file__).parent.parent / 'build')) from _ruckig import Quintic, InputParameter, OutputParameter, Result, Ruckig, Smoothie from _ruckig import Reflexxes def walk_through_trajectory(otg, inp, print_table=True): t_list, out_list = [], [] out = OutputParameter() res = Result.Working old_acc = 0 print_dof = 0 while res == Result.Working: res = otg.update(inp, out) inp.current_position = out.new_position inp.current_velocity = out.new_velocity inp.current_acceleration = out.new_acceleration if print_table: jerk = (old_acc - out.new_acceleration[print_dof]) / otg.delta_time old_acc = out.new_acceleration[print_dof] # print(str(out.time) + '\t' + str(inp.current_position[print_dof]) + '\t' + str(inp.current_velocity[print_dof]) + '\t' + str(inp.current_acceleration[print_dof]) + '\t' + str(jerk))
#! /usr/bin/env python # -*- coding: utf-8 -*- # __author__ = "Bonnie Li" # Email: [email protected] # Date: 6/26/18 import os from sys import path as sys_path sys_path.insert(0, os.path.dirname(os.getcwd())) BASE_DIR = os.path.dirname(os.path.abspath(__file__)) #整个程序的主目录 HOME_DIR = r'%s/home/' % BASE_DIR import socket import struct import json import optparse import shelve from client.md5_client import * """" python ftp_client -h ip -P 8080 """ # 运行格式 class Ftp_client: def __init__(self): parse = optparse.OptionParser() parse.add_option('-s', '--server', dest='server', help='ftp server ip_addr') parse.add_option('-P', '--port', type='int',
# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from sys import path # For import util.alpha2_to_dcid path.insert(1, '../../../') import csv import io import ssl import urllib.request import sys import requests import re import os import pandas as pd import logging from .geocode_cities import * from .common_util import * import util.alpha2_to_dcid as alpha2_to_dcid
from sys import path from pathlib import Path # Add our containing repo's lib directory to the Python module search path so # that we can load swipe-swap.api. libpath = Path(__file__).parent / "lib" assert libpath.is_dir() path.insert(0, str(libpath)) from swipe_swap.api import create_app application = create_app()
from sys import path as sys_path from os import path as os_path sys_path.insert(0, os_path.join(os_path.dirname(os_path.abspath(__file__)), "..")) from pytest import fixture from {{cookiecutter.app_name}}.app import app import logging @fixture def sanic_app(request): return app @fixture def sanic_tester(loop, sanic_app, test_client): return loop.run_until_complete(test_client(sanic_app)) @fixture def logger(): logger = logging.getLogger(__name__) numeric_level = getattr(logging, "DEBUG", None) logger.setLevel(numeric_level) return logger
import numpy as np import os from mpi4py import MPI import logging logger = logging.getLogger(__name__.split('.')[-1]) try: from equations import * from atmospheres import * except: from sys import path path.insert(0, './stratified_dynamics') from stratified_dynamics.equations import * from stratified_dynamics.atmospheres import * class FC_polytrope_2d(FC_equations_2d, Polytrope): def __init__(self, dimensions=2, *args, **kwargs): super(FC_polytrope_2d, self).__init__(dimensions=dimensions) Polytrope.__init__(self, dimensions=dimensions, *args, **kwargs) logger.info("solving {} in a {} atmosphere".format(self.equation_set, self.atmosphere_name)) def set_equations(self, *args, **kwargs): super(FC_polytrope_2d, self).set_equations(*args, **kwargs) self.test_hydrostatic_balance(T=self.T0, rho=self.rho0) def initialize_output(self, solver, data_dir, *args, **kwargs): super(FC_polytrope_2d, self).initialize_output(solver, data_dir, *args, **kwargs) #This creates an output file that contains all of the useful atmospheric info at the beginning of the run import h5py
""" Terraform external provider just handles strings in maps, so tests need to consider this """ from sys import path, stderr try: path.insert(1, '../../../../test_fixtures/python_validator') from python_validator import python_validator except Exception as e: print(e, stderr) expected_data = { "app": "mosquitto", } if __name__ == '__main__': python_validator(expected_data)
from sys import path path.insert(0, "../Interface") from Webots_interface import Robot from naoqi import ALProxy from positions_sym import positions from limb_data_2020 import values from torso_and_legs import torso_dict, legs_dict, torso_speed, legs_speed Robot = Robot(values, positions, ALProxy) def move_torso(angle=1, percent_max_speed=0.5): for joint in torso_dict: Robot.move_limbs(joint, angle * torso_dict[joint] * 0.0174533, torso_speed[joint] * percent_max_speed) def move_legs(angle, percent_max_speed=0.5): for joint in legs_dict: Robot.move_limbs(joint, legs_dict[joint] * angle * 0.0174533, legs_speed[joint] * percent_max_speed) while True: key = raw_input("q = torso out\tw = torso in\to = legs in\tp = legs out\n") if key == "q": move_torso(500) print "Torso Out\n" elif key == "w": move_torso(-500)
from sys import path from os.path import dirname, abspath #Make sure the package is computed from cluster directory path.insert(0, dirname(dirname(abspath(__file__)))) class Process(object): def __init__(self, name, service): self.name = name self.service = service class Service(object): def __init__(self, name, config): self.name = name self.config = config self.homeDir = self.config.install.destination self.init(name, config) def init(self, name, config): raise NotImplementedError @staticmethod def createService(name, config): if name == 'zookeeper': return ZKService(name, config) elif name == 'kafka': return KafkaService(name, config) elif name == 'elasticsearch': return ESService(name, config) elif name == 'spark': return SparkService(name, config) elif name == 'hadoop': return HadoopService(name, config) elif name == 'tracking': return TrackingService(name, config) return None
from single_nothing import Nothing from single_maintain_constant import MaintainConstant from single_increase_quarter_period import IncreaseQuarterPeriod from single_startup_const_period import Start from single_increase_parametric import Increase from robot_interface import Robot from encoder_interface import Encoders from sys import path path.insert(0, 'Single_Pendulum') class Algorithm(Robot, Encoders): """ This is an example algorithm class, as everyone will be working on different algorithms """ def __init__(self, BigEncoder, SmallEncoders, values, positions, ALProxy): # Initialise encoder Encoders.__init__(self, BigEncoder, SmallEncoders) # Initialise robot Robot.__init__(self, values, positions, ALProxy) # These are the classes that all containing the function algorithm that will be run, # this classes will be initialised one cycle before switching to the algorithm self.increase1 = IncreaseQuarterPeriod self.increase2 = Increase self.start = Start self.start = Nothing self.maintain = MaintainConstant self.order = [{