def run(doc_id, sent_id, words, lemmas, poses, ners, dep_paths, dep_parents, wordidxs, relation_id, wordidxs_1, wordidxs_2): try: import ddlib except: import os DD_HOME = os.environ['DEEPDIVE_HOME'] from sys import path path.append('%s/ddlib' % DD_HOME) import ddlib obj = dict() obj['lemma'] = [] obj['words'] = [] obj['ner'] = [] obj['pos'] = [] obj['dep_graph'] = [] for i in xrange(len(words)): obj['lemma'].append(lemmas[i]) obj['words'].append(words[i]) obj['ner'].append(ners[i]) obj['pos'].append(poses[i]) obj['dep_graph'].append( str(int(dep_parents[i])) + "\t" + dep_paths[i] + "\t" + str(i)) word_obj_list = ddlib.unpack_words( obj, lemma='lemma', pos='pos', ner='ner', words='words', dep_graph='dep_graph') gene_span = ddlib.get_span(wordidxs_1[0], len(wordidxs_1)) pheno_span = ddlib.get_span(wordidxs_2[0], len(wordidxs_2)) features = set() for feature in ddlib.get_generic_features_relation(word_obj_list, gene_span, pheno_span): features.add(feature) for feature in features: yield doc_id, relation_id, feature
def startMenu(): py.init() v.screen = py.display.set_mode((920, 630), py.DOUBLEBUF) buttons = py.sprite.Group() buttons.add(mapMenuItems.button("New Map", (460, 270), 50, (255, 255, 0), (0, 0, 255), "../Resources/Fonts/RunicSolid.ttf", "NM", True, (220, 50))) buttons.add(mapMenuItems.button("Load Map", (460, 360), 50, (255, 255, 0), (0, 0, 255), "../Resources/Fonts/RunicSolid.ttf", "LM", True, (220, 50))) texts = py.sprite.Group() texts.add(mapMenuItems.textLabel("Legend Of Aiopa RPG", (460, 150), (255, 0, 255), "../Resources/Fonts/RunicSolid.ttf", 50, variable = False, centred = True)) texts.add(mapMenuItems.textLabel("Map Editor", (460, 200), (200, 0, 200), "../Resources/Fonts/RunicSolid.ttf", 40, variable = False, centred = True)) v.textNum = 1 while True: py.event.pump() v.events = [] v.events = py.event.get() v.screen.fill((0, 255, 255)) texts.update() buttons.update() py.display.flip() for event in v.events: if event.type == py.MOUSEBUTTONDOWN: for b in buttons: if b.pressed(): if b.ID == "NM": #setup() npcEdit.createNPC() if b.ID == "LM": from sys import path path.append('../Saves') import mapFile # @UnresolvedImport v.totalMap = mapFile.map load()
def run(self, process, action_value, *args, **kwargs): """ Will process the information passed in action_value. """ python_path = format_value(process, "path", default_value=self.python_path) if not exists(python_path): raise ValueError("The directory [%s] not exist." % self.python_path) path_exist = python_path in path try: if not path_exist: path.append(python_path) # Check the module module = format_value(process, "module", default_value=None) if module is None: raise ValueError("The module was not set in %s that use the connection [%s]" % (process["tag"], process["connection_name"])) if not (exists(join(python_path, module + ".py")) or exists(join(python_path, module + ".pyc"))): raise ValueError("The module [%s] not exist in the path [%s]" % (module, python_path)) class_name = format_value(process, "class", default_value=None) method = format_value(process, "method", default_value="run") module_ref = __import__(module, fromlist=None if class_name is None else [class_name, ]) instance = (None, 1, "Was not implemented yet!") if class_name: class_ref = getattr(module_ref, class_name)() instance = getattr(class_ref, method)(process, action_value, *args, **kwargs) else: instance = getattr(module_ref, method)(process, action_value, *args, **kwargs) return instance except Exception, e: return (None, settings.SYSTEM_ERROR, e)
def init(self, *args, **kwargs): super(RPlugins, self).init(*args, **kwargs) if "rplugins" not in self.config: raise ConfigError("Remote Plugins not configured!") for param in ("path",): if param not in self.config["rplugins"]: raise ConfigError("Remote Plugins not configured! Missing: {0}".format(repr(param))) self.data.init( { "rplugins": { "allowed": {}, "pending": {}, "enabled": {}, } } ) rplugins_path = self.config["rplugins"]["path"] if not path_exists(rplugins_path): mkdir(rplugins_path) rplugins_init_py = path_join(rplugins_path, "__init__.py") if not path_exists(rplugins_init_py): with open(rplugins_init_py, "w") as f: f.write("") if rplugins_path not in module_search_path: module_search_path.append(rplugins_path) Commands().register(self) RPluginsCommands().register(self)
def instantiatePlugins(self): """ Looks for plugins classes and instantiates them Returns ------- plg : list A list of class instances, one for each plugin found. grp : list A list of the plugins groups. """ # TODO: Implement the plugin classes: Input, Output, Analysis and use # them to place widgets on the screen PLUGINS_DIR = 'Plugins' VALID_CLASSES = ['Input', 'Analysis'] plugins_folder = os.path.join(os.path.dirname(os.path.realpath(__file__)), PLUGINS_DIR) plugins_files = glob(os.path.join(plugins_folder, '*.py')) plugins_list = [f[:-3] for _, f in [os.path.split(p) for p in plugins_files]] path.append(plugins_folder) # Add plugins path to PYTHONPATH plg = [] grp = set() for v in plugins_list: try: v = getattr(import_module(v), v) except ImportError as e: print 'TODO: handle this error: ' + e.message pass else: inst = v() if VALID_CLASSES.count(inst.pluginClass) == 0: continue plg.append(inst) grp.add(inst.pluginGroup) return plg, list(grp)
def _config_check(): from message import Messager from sys import path from copy import deepcopy from os.path import dirname # Reset the path to force config.py to be in the root (could be hacked # using __init__.py, but we can be monkey-patched anyway) orig_path = deepcopy(path) # Can't you empty in O(1) instead of O(N)? while path: path.pop() path.append(path_join(abspath(dirname(__file__)), '../..')) # Check if we have a config, otherwise whine try: import config del config except ImportError, e: path.extend(orig_path) # "Prettiest" way to check specific failure if e.message == 'No module named config': Messager.error(_miss_config_msg(), duration=-1) else: Messager.error(_get_stack_trace(), duration=-1) raise ConfigurationError
def load_environment(global_conf, app_conf): """Configure the Pylons environment via the ``pylons.config`` object """ # Pylons paths root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) paths = dict(root=root, controllers=os.path.join(root, 'controllers'), static_files=os.path.join(root, 'public'), templates=[os.path.join(root, 'templates')]) # Initialize config with the basic options config.init_app(global_conf, app_conf, package='OnDeafTweers_web', paths=paths) config['routes.map'] = make_map() config['pylons.app_globals'] = app_globals.Globals() config['pylons.h'] = OnDeafTweers_web.lib.helpers # Create the Mako TemplateLookup, with the default auto-escaping config['pylons.app_globals'].mako_lookup = TemplateLookup( directories=paths['templates'], error_handler=handle_mako_error, module_directory=os.path.join(app_conf['cache_dir'], 'templates'), input_encoding='utf-8', default_filters=['escape'], imports=['from webhelpers.html import escape']) # Setup the SQLAlchemy database engine engine = engine_from_config(config, 'sqlalchemy.') init_model(engine) # CONFIGURATION OPTIONS HERE (note: all config options will override # any Pylons config options) # Make sure we've got the lib directory in sys.path importpath.append(os.path.join(root, 'lib'))
def __init__(self, keyspace_name, table_name, record_schema, cassandra_session, replication_strategy=None): title = '%s.__init__' % self.__class__.__name__ # construct fields model from jsonmodel.validators import jsonModel self.fields = jsonModel(self._class_fields) # validate inputs input_fields = { 'keyspace_name': keyspace_name, 'table_name': table_name, 'record_schema': record_schema, 'replication_strategy': replication_strategy } for key, value in input_fields.items(): if value: object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # validate cassandra session from sys import path as sys_path sys_path.append(sys_path.pop(0)) from cassandra.cluster import Session sys_path.insert(0, sys_path.pop()) if not isinstance(cassandra_session, Session): raise ValueError('%s(cassandra_session) must be a cassandra.cluster.Session datatype.' % title) self.session = cassandra_session
def setUpModule(): global feedlinks path.append(basedir+'/lib') path.append(basedir+'/bin') feedlinks = load_source('feedlinks', basedir+'/bin/classfeedlinks') return
def application(environ, start_response): peticiones = environ['REQUEST_URI'].split('/') peticiones.pop(0) cantidad = len(peticiones) if cantidad == 3: modulo, modelo, recurso = peticiones arg = '' elif cantidad == 4: modulo, modelo, recurso, arg = peticiones else: modulo = 'users' modelo = 'user' recurso = 'index' arg = '' controller_name = '%sController' % modelo.capitalize() from sys import path path.append(environ['SCRIPT_FILENAME'].replace('frontcontroller.py', '')) exec ('from modules.%s.controllers.%s import %s' % (modulo, modelo, controller_name)) controller = locals()[controller_name](recurso, arg, environ) output = controller.output start_response('200 OK', [('Content-Type', 'text/html; charset=UTF-8')]) return output
def loadConfig(self): # if local configuration file exists use it self.FCONFIG = ('./etc/timekpr.conf' if isfile('./etc/timekpr.conf') else '/etc/timekpr.conf') if not isfile(self.FCONFIG): exit(_("Error: timekpr configuration file %s does not exist.") % self.FCONFIG) self.VAR = loadVariables(self.FCONFIG) if self.VAR['DEVACTIVE']: from sys import path path.append('.') self.logkpr('Loaded Configuration from %s' % self.FCONFIG, 1) self.logkpr('Variables: GRACEPERIOD: %s POLLTIME: %s LOCKLASTS: %s' % (\ self.VAR['GRACEPERIOD'], self.VAR['POLLTIME'], self.VAR['LOCKLASTS'])) self.logkpr('Debuging: DEBUGME: %s CLOCKSPEED: %s CLOCKTIME: %s' % (\ self.VAR['DEBUGME'], self.VAR['CLOCKSPEED'], self.VAR['CLOCKTIME'])) self.logkpr('Directories: LOGFILE: %s TIMEKPRDIR: %s TIMEKPRWORK: %s TIMEKPRSHARED: %s' % (\ self.VAR['LOGFILE'], self.VAR['TIMEKPRDIR'], self.VAR['TIMEKPRWORK'], self.VAR['TIMEKPRSHARED'])) #Check if all directories exists, if not, create it if not isdir(self.VAR['TIMEKPRDIR']): mkdir(self.VAR['TIMEKPRDIR']) if not isdir(self.VAR['TIMEKPRWORK']): makedirs(self.VAR['TIMEKPRWORK']) #set clockspeed if self.VAR['CLOCKSPEED'] != 1: self.logkpr('setting clockspeed to %s' % self.VAR['CLOCKSPEED']) clock.setSpeed(self.VAR['CLOCKSPEED']) if self.VAR['CLOCKTIME'] != '': self.logkpr('setting clock time to %s' % strDateTime2tuple(self.VAR['CLOCKTIME'])) clock.setTime(strDateTime2tuple(self.VAR['CLOCKTIME']))
def process(inputFile, outputFile): ''' Reads a block log CSV, cleans the comment, reorders the output and writes it to disk according to outputFile. Please be aware that, if writing permissions are given for outputFile, it will blindly overwrite everything you love. ''' import csv from sys import path path.append("./WikiCodeCleaner") try: from WikiCodeCleaner.clean import clean as cleanWikiCode except ImportError: # Ubuntu 12.04's Python 3.2.3 behaves differently: from clean import clean as cleanWikiCode ignoredBlocksCount = 0 with inputFile: logReader = csv.reader(inputFile, delimiter='\t', quotechar='"') logWriter = csv.writer(outputFile, delimiter='\t', quotechar='|', quoting=csv.QUOTE_MINIMAL) for [comment, userId, userName, timestamp, blockedUserName] in logReader: comment = comment.lower() cleanedComment = cleanWikiCode(comment).strip() if isCommentOfInterest(cleanedComment): logWriter.writerow([timestamp, blockedUserName, cleanedComment, userId, userName]) else: ignoredBlocksCount += 1 print('[I] Ignored %i comments' % ignoredBlocksCount)
def main(args): argp = _argparser().parse_args(args[1:]) try: # At first, try using global installs import simstring del simstring except ImportError: # If that fails, try our local version sys_path.append(SIMSTRING_PY_DIR) try: import simstring del simstring except ImportError: from sys import stderr print('ERROR: Failed to import SimString, did you run make to ' 'build it or install it globally?', file=stderr) return -1 if not argp.simstring_dbs: raise NotImplementedError else: db_paths = argp.resources tokens = [unicode(l, encoding=argp.input_encoding).rstrip('\n') for l in argp.input] for token, repr in _token_reprs(tokens, db_paths, verbose=argp.verbose): repr_tsv = '\t'.join(str(r) for r in repr) # Assume that the user wants the output/input to have the same encoding print((u'{}\t{}'.format(token, repr_tsv)).encode(argp.input_encoding), file=argp.output) return 0
def importPlanner(module_or_file_name=None): if module_or_file_name is None: if len(argv) != 2: print "Usage: %s <planner-module>" % argv[0] exit(2) module_or_file_name = argv[1].strip() if module_or_file_name.endswith(".py"): module_or_file_name = module_or_file_name[:-3] try: dirname, filename = string.rsplit(module_or_file_name, '/', 1) path.append(dirname) except ValueError: filename = module_or_file_name module = __import__(filename) try: if hasattr(module, 'controller'): if hasattr(module, 'graceful_exit'): return module.update, module.controller, module.graceful_exit return module.update, module.controller else: return module.update except AttributeError: raise AttributeError("The planner module must have an update() function.")
def addpathImp(path): if not isdir(path): return if path not in SYSPATH: SYSPATH.append(path) for x in listdir(path): addpathImp("%s/%s" % (path, x))
def parse_configuration(root): """Parses an XML document representing the configuration of the logwrap utility. The document is parsed and two items are returned: the error handler used to process errors and a list of encapsulators for the event framework. """ encapsulators = [] config = get_child_elements("logWrap", root, first=1) if config == None: raise BuilderError, "You must have an outer logWrap element" for node in get_child_elements("handlerDirectory", config): dir = interpolate_text(node.getAttribute("dir"), environ) if not dir: raise BuilderError, "handlerDirectory requires a dir attribute" path.append(str(dir)) error_handler = build_error_handler(config) if error_handler: error_handler = ErrorHandlerAdapter(error_handler) for node in get_child_elements("logEncapsulator", config): encapsulators.append(parse_logEncapsulator(node)) if len(encapsulators) == 0: raise BuilderError, "You must configure at least one log encapsulator" return error_handler, encapsulators
def main(): fusil_dir = dirname(__file__) sys_path.append(fusil_dir) # Disable xmlrpclib backport to avoid a bug in the unit tests from ufwi_rpcd.python import backportXmlrpclib backportXmlrpclib.done = True # Test documentation in doc/*.rst files testDoc('tests/try_finally.rst') # Test documentation of some functions/classes testModule("ufwi_rpcd.common.human") testModule("ufwi_rpcd.common.network") testModule("ufwi_rpcd.common.tools") testModule("ufwi_rpcd.common.transport") testModule("ufwi_rpcd.common.error") testModule("ufwi_rpcd.common.defer") testModule("ufwi_rpcd.common.ssl.checker") testModule("ufwi_rpcd.common.namedlist") testModule("ufwi_rpcd.common.process") # testModule("ufwi_rpcd.qt.tools") testModule("tools.ufwi_rpcd_client", "tools/ufwi_rpcd_client") # __import__('tools.ufwi_rpcd_client') compiles the Python file # to tools/ufwi_rpcd_clientc unlink("tools/ufwi_rpcd_clientc")
def run(i=0, edir=''): from sys import path path.append(edir) from kmos.run import KMC_Model model = KMC_Model(banner=False, print_rates=False) model.settings.random_seed = i assert not model.do_steps(1000) assert not model.deallocate()
def getconfig(param): from os.path import dirname from sys import path p = dirname(__file__) path.append(p) import config val = eval('config.%s' % (param)) return val
def main(): nuface_dir = dirname(__file__) sys_path.append(nuface_dir) # Test documentation in doc/*.rst files # testDoc('doc/nuface.rst') # Test documentation of some functions/classes testModule("ufwi_log.core.datasource")
def __init__(self, config): self.config = config self.http_sep = re.compile('\r\n\r\n') # append any specific jars to the path for dirname, dirnames, filenames in walk('~/current/code/jars'): for filename in filenames: if ".jar" in filename: append(join(dirname, filename)) print "Appended to path: %s" % join(dirname, filename)
def return_module(self, name='test'): file_name = path.join(self.out_dir.name, name + '.py') sys_path.append(self.out_dir.name) loader = SourceFileLoader(self._testMethodName, file_name) module = loader.load_module(self._testMethodName) sys_path.remove(self.out_dir.name) return module
def __init__(self, files=None, blanks=BLANKS, color=True, *args, **kargs): self.color = color if not files: self.files = sorted([f for f in listdir(".") if f.endswith(".py") and path.isfile(f)]) else: self.files = files self.blanks = blanks self.reload_files() syspath.append(path.abspath(".")) super(DemoConsole, self).__init__(*args, **kargs)
def load_dependencies(): required_modules = ['Kotoba', 'Imagination', 'Tori'] base_mod_path = abspath(join( app_path, (is_production and 'lib' or '../..') )) for required_module in required_modules: mod_path = join(base_mod_path, required_module) path.append(mod_path)
def validate_family(value): try: path.append(config.datafilepath('families')) __import__('%s_family' % value) except ImportError: raise ValidationError(_("Family %(family)s doesn't exist.") % {'family': value} ) finally: path.pop()
def main(): from os.path import dirname as dir from sys import path print(dir(path[0])) path.append(dir(path[0])) gw = GateWay() try: gw.start() finally: gw.exit()
def configimport(configfile): #Import exploit config path.append(datadir + '/exploits') global args if configfile.endswith('.py'): configfile = configfile[:-3] try: print greentext + '[+]' + resettext + ' Loading ' + configfile + '.py config file\n' args = __import__(configfile) except: print redtext + '[X]' + resettext + ' Config file ' + datadir + '/exploits/' + configfile + '.py not found!\n' exit()
def main(): fusil_dir = dirname(__file__) sys_path.append(fusil_dir) # Test documentation in doc/*.rst files # testDoc('tests/try_finally.rst') # Test documentation of some functions/classes testModule("ufwi_rpcc_qt.error") testModule("ufwi_rpcc_qt.html") testModule("ufwi_rpcc_qt.tools")
def denoise_ornlm(data_in, v=3, f=1, h=0.05): from commands import getstatusoutput from sys import path # append python path for importing module # N.B. PYTHONPATH variable should take care of it, but this is only used for Travis. status, path_sct = getstatusoutput('echo $SCT_DIR') path.append(path_sct + '/external/denoise/ornlm') from ornlm import ornlm from numpy import array, max, float64 dat = data_in.astype(float64) denoised = array(ornlm(dat, v, f, max(dat)*h)) return denoised
def task_directory(self, value): """ Change of task directory. :param str value: new task directory path to use. """ if value is not None: # add value in path if not None if value not in path: path.append(value) self._task_directory = value
from sys import path path.append(r'./GNN_implement/') from GNN_implement.main import parse_args, gnn from GNN_implement.gnn import split_train_test, train path.append(r"./node2vec/src/") import numpy as np import networkx as nx from sklearn import metrics import node2vec from gensim.models import Word2Vec from operator import itemgetter from tqdm import tqdm def load_data(data_name, network_type): """ :param data_name: :param network_type: use 0 and 1 stands for undirected or directed graph, respectively :return: """ print("load data...") file_path = "./raw_data/" + data_name + ".txt" positive = np.loadtxt(file_path, dtype=int, usecols=(0, 1)) # sample negative G = nx.Graph() if network_type == 0 else nx.DiGraph() G.add_edges_from(positive) print(nx.info(G)) negative_all = list(nx.non_edges(G)) np.random.shuffle(negative_all) negative = np.asarray(negative_all[:len(positive)])
# Our directories # Note: On cadalab, there is an extra sub-directory called "program" # Keave this stuff "as is" running_on_codalab = False run_dir = os.path.abspath(".") codalab_run_dir = os.path.join(run_dir, "program") if os.path.isdir(codalab_run_dir): run_dir=codalab_run_dir running_on_codalab = True print "Running on Codalab!" lib_dir = os.path.join(run_dir, "sample_code") res_dir = os.path.join(run_dir, "res") # Our libraries path.append (run_dir) path.append (lib_dir) import data_io # general purpose input/output functions from data_io import vprint # print only in verbose mode from data_manager import DataManager # load/save data and get info about them from classifier import Classifier # example models from scikit learn if debug_mode >= 4 or running_on_codalab: # Show library version and directory structure data_io.show_version() data_io.show_dir(run_dir) # =========================== BEGIN PROGRAM ================================ if __name__=="__main__" and debug_mode<4: #### Check whether everything went well (no time exceeded) execution_success = True
================================================================================ File : testsuite.py Author(s) : Luuvish Version : 2.0 Revision : 2.0 May 13, 2014 Executor classify ================================================================================ ''' from sys import path, argv, stdout, stderr from os import remove from os.path import dirname path.append(dirname(__file__)) from test.globber import Globber from test.model import ModelExecutor from test.suite import h264, hevc, vc1, vp8, vp9 codecs = (h264, hevc, vc1, vp8, vp9) models = (model for codec in codecs for model in codec.models) suites = (suite for codec in codecs for suite in codec.suites) class TestSuite(object): program = 'testsuite.py' description = 'shell command helper for testsuite of video codecs'
def AddPath(): from sys import path add_path = getMainDir() if not add_path in path: #print(add_path) path.append(add_path)
import os my_home = os.popen("echo $MYWORK_DIR").readlines()[0][:-1] from sys import path, argv path.append("%s/work/mylib/"% my_home) import h5py import numpy import c4py import time expo_name = argv[1] expo_path = "/mnt/perc/hklee/CFHT/CFHT_cat_4_20_2021/cat_hdf5/%s"%expo_name sep_pix_thresh = 12 sep_z_thresh = 0.2 h5f = h5py.File(expo_path,"r") expo_data = h5f["/data"][()] h5f.close() expo_ichip_all = expo_data[:,16].astype(dtype=numpy.intc) expo_xc_all = expo_data[:, 18] expo_yc_all = expo_data[:, 19] expo_z_all = expo_data[:, 10] expo_radius_all = numpy.sqrt(expo_data[:,25]/numpy.pi) print(expo_ichip_all.min(), expo_ichip_all.max()) nums_all = numpy.zeros((expo_ichip_all.max()+1, ), dtype=numpy.intc)
# -*- coding: utf-8 -*- import datetime from decimal import Decimal from os import getcwd from sys import path as jppath jppath.append(getcwd()) import re from PyQt5.QtCore import (QAbstractTableModel, QDate, QModelIndex, QObject, Qt, QVariant, pyqtSignal) from PyQt5.QtGui import QColor from PyQt5.QtWidgets import QAbstractItemView, QMessageBox, QTableView import lib.JPMvc.JPDelegate as myDe from lib.JPDatabase.Database import JPDb from lib.JPDatabase.Field import JPFieldType from lib.JPDatabase.Query import (JPQueryFieldInfo, JPTabelFieldInfo, JPTabelRowData) from lib.JPFunction import (JPBooleanString, JPDateConver, JPGetDisplayText, JPRound, PrintFunctionRunTime) from lib.JPMvc import JPWidgets from lib.JPPublc import JPPub class JPTableViewModelBase(QAbstractTableModel): dataChanged = pyqtSignal(QModelIndex, object) firstHasDirty = pyqtSignal() editNext = pyqtSignal(QModelIndex) readingRow = pyqtSignal(int)
from sys import path, argv path.append('../source/') from hashcracker import hash_password if len(argv) == 1: print('Usage: <type> <string>') exit() print(hash_password(argv[2], argv[1]))
#!/usr/bin/env python # Created by Wazuh, Inc. <*****@*****.**>. # This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 from sys import exit, path, argv, stdout from os.path import dirname from signal import signal, SIGINT from time import sleep import argparse import os import re # Set framework path path.append(dirname(argv[0]) + '/../framework') # It is necessary to import Wazuh package # Import framework try: from wazuh import Wazuh from wazuh.agent import Agent from wazuh.exception import WazuhException from wazuh import common except Exception as e: print("Error importing 'Wazuh' package.\n\n{0}\n".format(e)) exit() # Functions def signal_handler(n_signal, frame): print("")
from typing import List from glob import glob import pandas as pd from os.path import abspath, dirname, splitext, basename, join as pjoin from sys import path SCRIPT_PATH = dirname(abspath(__file__)) path.append(SCRIPT_PATH) from scripts.search_nvd_records import NVD from scripts.classes import CVE_Item, CVSS_V3, CVSS_V2 nvd = NVD() # データを読み込む csv_list = glob(pjoin(SCRIPT_PATH, 'output', '*.csv')) dfs = [pd.read_csv(csv, index_col=0) for csv in csv_list] df_concat = pd.concat(dfs) df_concat = df_concat.fillna(0) def sort_function(elem: CVE_Item) -> tuple: """CVE_Item のリスト をソートするための関数 """ cvss_v3: CVSS_V3 = elem.impact.get('V3', CVSS_V3({})) cvss_v2: CVSS_V2 = elem.impact.get('V2', CVSS_V3({})) # ソートする順番、- を付けて降順ソートにしている tier1 = -(cvss_v3.baseScore + cvss_v2.baseScore) tier2 = -cvss_v3.baseScore
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from sys import path if '/Users/pleuvoir/dev/space/git/python-tutorial' not in path: path.append('/Users/pleuvoir/dev/space/git/python-tutorial') """它打出来的是一个list,如果你使用的是ide,它会自己把当前目录加进去,会导致如果你在命令行下执行,可能会出现找不到该模块""" print(path) # '/Users/pleuvoir/dev/space/git/python-tutorial/07_模块和包', '/Users/pleuvoir/dev/space/git/python-tutorial' """使用 https://pypi.org/ 进行包的搜索"""
''' (*)~---------------------------------------------------------------------------------- Pupil - eye tracking platform Copyright (C) 2012-2014 Pupil Labs Distributed under the terms of the CC BY-NC-SA License. License details are in the file license.txt, distributed as part of this software. ----------------------------------------------------------------------------------~(*) ''' if __name__ == '__main__': # make shared modules available across pupil_src from sys import path as syspath from os import path as ospath loc = ospath.abspath(__file__).rsplit('pupil_src', 1) syspath.append(ospath.join(loc[0], 'pupil_src', 'shared_modules')) del syspath, ospath import os, sys from time import time import shelve import logging from ctypes import c_int, c_bool, c_float, create_string_buffer import numpy as np #display from glfw import * import atb # helpers/utils from methods import normalize, denormalize, Temp
''' SimSem classifiers module. Author: Pontus Stenetorp <pontus is s u-tokyo ac jp> Version: 2011-04-07 ''' from os.path import join as path_join from os.path import basename, dirname, isfile from sys import path as sys_path # XXX: Path hack! sys_path.append(path_join(dirname(__file__), '..')) from liblinear import LibLinearClassifier #from ....features import AbstractFeature ### Code generation and import of generated code # XXX: The below code is NOT extensively tested # TODO: Can this be inserted into __init__.py? If so we can expose # "classifiers" outside of the package #XXX: If regeneration doesn't work it is most likely a more serious error, print it! DEBUG = False from config import (SIMSTRING_DB_PATHS, FEATURES_MODULE_PATH, CLASSIFIERS_MODULE_PATH) def _features_module_is_valid(): try: from features import SIMSTRING_FEATURES
myclassifier = classifier_dict[key] acc = D.ClfScatter( myclassifier ) #compute_accuracy(myclassifier, D, key) # Replace by a call to ClfScatter return acc # Return the last accuracy (important to get the correct answer in the TP) if __name__ == "__main__": # We can use this function to test the Classifier if len( argv ) == 1: # Use the default input and output directories if no arguments are provided input_dir = "../public_data" output_dir = "../results" score_dir = "../scoring_program" else: input_dir = argv[1] output_dir = argv[2] score_dir = argv[3] # The M2 may have prepared challenges using sometimes AutoML challenge metrics path.append(score_dir) from zDataManager import DataManager # The class provided by binome 1 basename = 'Iris' D = DataManager(basename, input_dir) # Load data print(D) test(D)
#!/usr/bin/env python3 #--------- # one pedestrian created at frame 0 # pedestrian starts in position (0, 1) and moves forward # in a straight corridor with a speed of 1 m/s # # arrival times should be: # - 7s at line in x=7 # - 9s at line in x=9 #--------- import os from sys import argv, path import logging utestdir = os.path.abspath( os.path.dirname(os.path.dirname(os.path.dirname(path[0])))) path.append(utestdir) path.append(os.path.dirname(path[0])) # source helper file from utils import SUCCESS, FAILURE from JPSRunTest import JPSRunTestDriver import numpy as np should_be_7 = 7.0 should_be_9 = 9.0 def runtest(inifile, trajfile): logging.info("===== Method A - Flow-NT ===============") data_9_filename = os.path.join('./Output', 'Fundamental_Diagram', 'FlowVelocity', 'Flow_NT_%s_id_9.dat' % trajfile)
# -*- coding: utf-8 -*- import geatpy as ea # 导入geatpy库 from sys import path as paths from os import path paths.append(path.split(path.split(path.realpath(__file__))[0])[0]) class soea_SGA_templet(ea.SoeaAlgorithm): """ soea_SGA_templet : class - Simple GA templet(最简单、最经典的遗传算法模板) 算法描述: 本模板实现的是最经典的单目标遗传算法。算法流程如下: 1) 根据编码规则初始化N个个体的种群。 2) 若满足停止条件则停止,否则继续执行。 3) 对当前种群进行统计分析,比如记录其最优个体、平均适应度等等。 4) 独立地从当前种群中选取N个母体。 5) 独立地对这N个母体进行交叉操作。 6) 独立地对这N个交叉后的个体进行变异,得到下一代种群。 7) 回到第2步。 """ def __init__(self, problem, population): ea.SoeaAlgorithm.__init__(self, problem, population) # 先调用父类构造方法 if population.ChromNum != 1: raise RuntimeError('传入的种群对象必须是单染色体的种群类型。') self.name = 'SGA' self.selFunc = 'rws' # 轮盘赌选择算子 if population.Encoding == 'P':
#coding:UTF-8 ''' Created on 2016年12月13日 统计各个天数(1-30)的用户登录人数 @author: Ivy ''' import sys,os from sys import path path.append('tools/') path.append(path[0]+'/tools') import MySQLdb import time from impala.dbapi import connect reload(sys) sys.setdefaultencoding('utf8') import json import base64 from itertools import groupby def getKey(item): return item[1] def behavior(mysqlhostIP, mysqlUserName = '******', mysqlPassword = '', dbname = 'weibo'): now = int(time.time()) timeArray = time.localtime(now) otherStyleTime = time.strftime("%Y-%m-%d %H:%M:%S", timeArray) print otherStyleTime monthly_period = list() # for inter in range(17,47): # now = int(time.time())-86400*inter # timeArray = time.localtime(now) # # otherStyleTime = time.strftime("%Y-%m-%d", timeArray)
For regression, you can plot Y as a function of X. plot the residual a function of X. For classification, you can show the histograms of X for each Y value. show ROC curves. For both: provide a table of scores and error bars. """ # Add the sample code in the path mypath = "../ingestion_program" from sys import argv, path from os.path import abspath import os path.append(abspath(mypath)) # Graphic routines import seaborn as sns sns.set() import matplotlib.pyplot as plt from matplotlib.colors import LinearSegmentedColormap colors = [(1, 0, 0), (0, 1, 0), (0, 0, 1)] # Red, lime, blue cm = LinearSegmentedColormap.from_list('rgb', colors, N=3) # Data types import pandas as pd import numpy as np
def alarm_system_ind_commands(): # mode = 1 : Home Arm # mode = 0 : Full return alarm_system_indicators.get_state() os_type = platform if os_type == 'darwin': main_path = '/Users/guy/Documents/github/Rpi/' elif os_type == 'win32': main_path = 'd:/users/guydvir/Documents/git/Rpi/' elif os_type == 'linux': main_path = '/home/guy/Documents/github/Rpi/' path.append(main_path + 'GPIO_Projects/lcd') path.append(main_path + 'SmartHome/LocalSwitch') path.append(main_path + 'modules') path.append(main_path + 'SmartHome/LocalSwitch') path.append(main_path + 'SmartHome/RemoteSwitch') from gpiobuttonlib import HWRemoteOutput, HWRemoteInput root = tk.Tk() chat_id = 596123373 build_gui(root) bot = telepot.Bot('497268459:AAFrPh-toL6DPPArWknqJzIAby8jMi21S4c') me = bot.getMe() root.title('Telegram BOT:' + me['first_name'] + '#' + str(me['id'])) win1 = HWRemoteOutput(ip='192.168.2.114', output_pins=[19, 26],
from sys import path path.append('/usr/lib64/python2.7/site-packages/') import yaml from cvplibrary import CVPGlobalVariables, GlobalVariableNames, Device ztp = CVPGlobalVariables.getValue(GlobalVariableNames.ZTP_STATE) ip = CVPGlobalVariables.getValue(GlobalVariableNames.CVP_IP) if ztp == 'true': user = CVPGlobalVariables.getValue(GlobalVariableNames.ZTP_USERNAME) passwd = CVPGlobalVariables.getValue(GlobalVariableNames.ZTP_PASSWORD) else: user = CVPGlobalVariables.getValue(GlobalVariableNames.CVP_USERNAME) passwd = CVPGlobalVariables.getValue(GlobalVariableNames.CVP_PASSWORD) ss = Device(ip,user,passwd) def get_hostname(): show_hostname = ss.runCmds(["enable", {"cmd": "show hostname"}])[1] hostname = show_hostname['response']['hostname'] return hostname def get_bgpasn(): show_ip_bgp_summary = ss.runCmds(["enable", {"cmd": "show ip bgp summary"}])[1] asn = show_ip_bgp_summary['response']['vrfs']['default']['asn'] return asn def create_routes(hostname): number = hostname[-1:] if hostname.startswith("leaf"): switch_type = "10"
import os import platform from logging.handlers import SysLogHandler from os.path import abspath, dirname, join from sys import path from corsheaders.defaults import default_headers as corsheaders_default_headers here = lambda *x: join(abspath(dirname(__file__)), *x) PROJECT_ROOT = here('..') root = lambda *x: abspath(join(abspath(PROJECT_ROOT), *x)) path.append(root('apps')) # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = os.environ.get('COURSE_DISCOVERY_SECRET_KEY', 'insecure-secret-key') OPENEXCHANGERATES_API_KEY = None # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'dal', 'dal_select2', 'django.contrib.admin',
from sys import path path.append('/work/rqiao/HFdata') from mewp.simulate.wrapper import PairAlgoWrapper from mewp.simulate.runner import PairRunner from mewp.math.simple import SimpleMoving from mewp.util.clock import Clock from mewp.data.order import OrderType from mewp.simulate.report import MasterReport from mewp.simulate.report import Report from mewp.reader.futuresqlite import SqliteReaderDce from mewp.util.futures import get_day_db_path from mewp.util.pair_trade_analysis import TradeAnalysis from mewp.data.item import Contract from joblib import Parallel, delayed import datetime import numpy as np import pandas as pd import itertools import os DATA_PATH = '/work/rqiao/HFdata/dockfuture' market = 'shfe' def get_contract_list(market, contract): return os.listdir(DATA_PATH + '/' + market + '/' + contract) def get_position(contract, date, DATA_PATH): # create a dictionary, where date is the key try:
from sys import path from os.path import dirname, realpath MY_DIR = dirname(realpath(__file__)) path.append(MY_DIR) PARENT_DIR = dirname(path[0]) path.append(PARENT_DIR) import numpy as np from random import shuffle from scipy.optimize import minimize from scipy.optimize import Bounds from math import exp class Example: def __init__(self, values, weight=1): self.attributes = np.array(values[0: len(values)-1]) self.attributes = np.append(self.attributes, [1]) # self.attributes = values[0: len(values)-1] # self.attributes.append(1) self.label = -1 if values[-1] == 0 else 1 # self.label = values[len(values)-1] self.weight = weight def examples_from_file(filename): examples = list() with open(filename, 'r') as train_data: for line in train_data: terms = line.strip().split(',') for idx in range(len(terms)):
# -*- coding: utf-8 -*- import matplotlib matplotlib.use('Agg') import os from sys import path my_home = os.popen("echo $HOME").readlines()[0][:-1] path.append('%s/work/fourier_quad/' % my_home) import tool_box from Fourier_Quad import Fourier_Quad from sys import argv import numpy from mpi4py import MPI import time comm = MPI.COMM_WORLD rank = comm.Get_rank() cpus = comm.Get_size() del_bin, bin_num, cho, cho_thre = argv[1], argv[2], argv[3], argv[4] ts = time.clock() del_bin = int(del_bin) cho_thre = float(cho_thre) bin_num = int(bin_num) with open("%s/work/envs/envs.dat" % my_home, "r") as f: contents = f.readlines() for path in contents: if "cfht_data_path" in path: data_path = path.split("=")[1]
# - Configure the framework_path variable. # Optional: # - Configure the python path. Example for python27 package in Centos6 # - export PATH=$PATH:/opt/rh/python27/root/usr/bin # - export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/rh/python27/root/usr/lib64 # - Use the framework sqlite lib # - export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/var/ossec/api/framework/lib from sys import path, exit # cwd = /var/ossec/api/framework/examples #framework_path = '{0}'.format(path[0][:-9]) # cwd = /var/ossec/api #framework_path = '{0}/framework'.format(path[0]) # Default path framework_path = '/var/ossec/api/framework' path.append(framework_path) try: from wazuh.rule import Rule except Exception as e: print("No module 'wazuh' found.") exit() print("file;id;description;level;status;groups;pci;details") for rule in Rule.get_rules(status='enabled', limit=0, sort={ "fields": ["file"], "order": "asc" })['items']: print("{0};{1};{2};{3};{4};{5};{6};{7}".format(rule.file, rule.id,
searx is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with searx. If not, see < http://www.gnu.org/licenses/ >. (C) 2013- by Adam Tauber, <*****@*****.**> ''' if __name__ == '__main__': from sys import path from os.path import realpath, dirname path.append(realpath(dirname(realpath(__file__)) + '/../')) import hashlib import hmac import json import os import sys import requests from searx import logger logger = logger.getChild('webapp') try: from pygments import highlight from pygments.lexers import get_lexer_by_name
this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os from os.path import abspath, dirname from sys import path from django.core.wsgi import get_wsgi_application SITE_ROOT = dirname(dirname(abspath(__file__))) path.append(SITE_ROOT) # We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks # if running multiple sites in the same mod_wsgi process. To fix this, use # mod_wsgi daemon mode with each site in its own daemon process, or use # os.environ["DJANGO_SETTINGS_MODULE"] = "jajaja.settings" os.environ.setdefault("DJANGO_SETTINGS_MODULE", "meta_efa.settings.production") # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. application = get_wsgi_application() # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application)
from sys import path import os path.append('./tools') import sequence_tools import PDB_tools ## read the parents from the fasta alignment file par_names, parents, parent_alignment = sequence_tools.read_fasta_alignment( './alignment.fasta') ## do PDB search on each parent, and merge results print 'searching the PDB for related structures' pdb_ids = [] for par in parents: pdb_ids.extend(PDB_tools.pdb_search(par)) pdb_ids = sorted(set(pdb_ids)) ## download each PDB, parse, and save each chain structure_path = './structures/' if os.path.isdir(structure_path) == False: os.mkdir(structure_path) for id in pdb_ids: PDB_tools.download_parse_save(id, structure_path)
# -*- coding:utf-8 -*- import cv2 import wenzishibie3 as tf import numpy as np from sys import path path.append('../..') # from common import extract_mnist # 初始化单个卷积核上的参数 def weight_variable(shape): initial = tf.truncated_normal(shape, stddev=0.1) return tf.Variable(initial) # 初始化单个卷积核上的偏置值 def bias_variable(shape): initial = tf.constant(0.1, shape=shape) return tf.Variable(initial) # 输入特征x,用卷积核W进行卷积运算,strides为卷积核移动步长, # padding表示是否需要补齐边缘像素使输出图像大小不变 def conv2d(x, W): return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME') # 对x进行最大池化操作,ksize进行池化的范围, def max_pool_2x2(x):
#!/usr/bin/env python3.6 import torch import numpy as np import matplotlib as mpl mpl.use('Agg') import matplotlib.pyplot as plt from sys import path, argv path.append('/home/junwang/') from mps_mgpu_distSGD import MPS, mpsLoad store_path = '/data/mnist/28_127.5/rand1k/DmaxGrad/trail2/' resume_from = '/data/mnist/28_127.5/rand1k/DmaxGrad/trail2/L17D1000_' def grapher(data): m, n_space = data.shape aaa = int(n_space**0.5) assert n_space == aaa**2 dat = data.reshape(m, aaa, -1) n_row = int(m**0.5) dat = np.array_split(dat, n_row) n_col = dat[0].shape[0] fig, axs = plt.subplots(n_row, n_col, figsize=(n_col, n_row)) if n_row == 1: axs = [axs] if n_col == 1: axs = [axs] for i in range(n_row): for j in range(dat[i].shape[0]): ax = axs[i][j] ax.matshow(dat[i][j]**1.4, cmap="hot_r")
# required jinja2 # vul_top10.tsv の内容を Executive Summary 用に latex document ソースコードに変換する。 # 本文の一部のみが出力されるので \input で使う。 from typing import List from sys import argv import pandas as pd from os.path import abspath, dirname, basename, exists, join as pjoin from os import getcwd from sys import path SCRIPT_PATH = dirname(abspath(__file__)) path.append(pjoin(SCRIPT_PATH, '..')) from search_attack import CAPEC, extract_ids from jinja2 import FileSystemLoader, Environment latex_jinja_env = Environment( block_start_string='\BLOCK{', block_end_string='}', variable_start_string='\VAR{', variable_end_string='}', comment_start_string='\#{', comment_end_string='}', line_statement_prefix='%%', line_comment_prefix='%#', trim_blocks=True, autoescape=False, loader=FileSystemLoader(SCRIPT_PATH) )