def __init__(self): """ while not os.path.exists('app.json'): time.sleep(1) print pwd()+'/app.json' """ if exists('app.json'): opt = loadjson('app.json') elif exists('../app.json'): opt = loadjson('../app.json') elif exists('../../app.json'): opt = loadjson('../../app.json') elif exists('../../../app.json'): opt = loadjson('../../../app.json') else: exit('app.json lost!') species = opt['species'] s = im('aces.materials.%s' % species) m = s.structure(opt) self.m = m m.home = pwd() assert m.home != '' Runner = im('aces.runners.%s' % m.runner) self.runner = Runner.runner(m)
def __init_task( self, conn_type, verbose, port, network_uuid, ): if not conn_type: is_can = not is_network_module_connected() and is_on_pi() conn_type = 'can' if is_can else 'ser' if conn_type == 'ser': return im('modi.task.ser_task').SerTask(verbose, port) elif conn_type == 'soc': return im('modi.task.soc_task').SocTask(verbose, port) elif conn_type == 'vir': return im('modi.task.vir_task').VirTask(verbose, port) elif conn_type == 'can': return im('modi.task.can_task').CanTask(verbose) elif conn_type == 'ble': if not network_uuid: raise ValueError('Network UUID not specified!') self.network_uuids[network_uuid] = self mod_path = { 'win32': 'modi.task.ble_task.ble_task_mac', 'linux': 'modi.task.ble_task.ble_task_rpi', 'darwin': 'modi.task.ble_task.ble_task_mac', }.get(sys.platform) return im(mod_path).BleTask(verbose, network_uuid) else: raise ValueError(f'Invalid conn mode: {conn_type}')
def preLead(self): m = self.m s = im('aces.materials.%s' % m.leads) lat = m.leadlat mm = s.structure( dict(latx=lat[0], laty=lat[1], latz=lat[2], xp=1, yp=1, zp=1)) mm.dimension = m.dimension s = im('aces.lead') u = s.Lead(mm) u.cores = m.cores return u
def __init_task(conn_mode, verbose, port, uuid): if not conn_mode: is_can = not is_network_module_connected() and is_on_pi() conn_mode = 'can' if is_can else 'ser' if conn_mode == 'ser': return im('modi.task.ser_task').SerTask(verbose, port) elif conn_mode == 'can': return im('modi.task.can_task').CanTask(verbose) elif conn_mode == 'ble': return im('modi.task.ble_task').BleTask(verbose, uuid) else: raise ValueError(f'Invalid conn mode {conn_mode}')
def runlead(self): """generate lead force constants TRICK!! > If hc1/hc2 are None, they are assumed to be identical to the coupling matrix elements between neareste neighbor principal layers in lead1/lead2. 3 is important for the NEGF calculation ,if use 2, the regurlar fc and periodic fc is undistinguable, 1 -1 -1 1 and the transimission is curve for example , the fclead should be when lead layer=1,and two layer interaction is 2 -1 -1 2 and we can get from 3 layer supercell by fc[:2n,:2n] , this process will complete in rearangefc 2 -1 -1 -1 2 -1 -1 -1 2 """ m = self.m s = im('aces.materials.graphene') mm = s.structure(dict(latx=1, laty=1, latz=1, xp=1, yp=0, zp=0)) mm.atoms = io.read("POSCAR") mm.supercell = [3, 1, 1] mm.phofc = True mm.__dict__ = dict(m.__dict__, **mm.__dict__) PRunner(mm).run()
def __init__(self, model_full_name, force=False, *args, **kwargs): if model_full_name == '': raise Exception('Please supply a valid full name of model.') names = model_full_name.split('.') self.force = force self.app_name = names[0] self.model_name = names[-1] self.module_name = ".".join(names[:-1]) self.dirname = os.path.dirname(__file__) self.app_path = os.path.dirname(os.path.dirname(__file__)) self.module = im(self.module_name) self.model = getattr(self.module, self.model_name) self.table = self.model.__table__ self.columns = [ col for col in self.table.columns if col.name not in ('entry_user', 'entry_date') ] self.autoincrement = [ col for col in self.table.columns if col.autoincrement == True ] self.primary_keys = [col for col in self.columns if col.primary_key]
def preCenter(self): m = self.m s = im('aces.device') leadm = self.preLead() centerm = self.preCenter() mm = s.Device(m, leadm, leadm) mm.cores = m.cores return mm
def expand_extensions(existing): for name in extension_names: ext = (im('lizard_ext.lizard' + name.lower()).LizardExtension() if isinstance(name, str) else name) existing.insert( len(existing) if not hasattr(ext, "ordering_index") else ext.ordering_index, ext) return existing
def __init_task(conn_mode, verbose, port, uuid): if not conn_mode: is_can = not is_network_module_connected() and is_on_pi() conn_mode = 'can' if is_can else 'ser' if conn_mode == 'ser': return im('modi.task.ser_task').SerTask(verbose, port) elif conn_mode == 'can': return im('modi.task.can_task').CanTask(verbose) elif conn_mode == 'ble': mod_path = { 'win32': 'modi.task.ble_task.ble_task_win', 'linux': 'modi.task.ble_task.ble_task_rpi', 'darwin': 'modi.task.ble_task.ble_task_mac', }.get(sys.platform) return im(mod_path).BleTask(verbose, uuid) else: raise ValueError(f'Invalid conn mode {conn_mode}')
def sgconnect(): global sgbench, SG sgtype = request.args.get('sgtype') print("user selected %s!" % sgtype) try: SG = im("pyqum.instrument.benchtop.%s" % sgtype) sgbench = SG.Initiate() status = "Success" except: status = "Error" return jsonify(message=status)
def get_application(): urlpatterns = [] for app in conf.INSTALLED_APPS: cmd = app + '.urls' urlpatterns = urlpatterns + getattr(im(cmd), 'urlpatterns') return web.Application( urlpatterns, debug=conf.DEBUG, )
def expand_extensions(existing): for name in extension_names: ext = patch_extension( im('lizard_ext.lizard' + name.lower()) .LizardExtension() if isinstance(name, str) else name) existing.insert( len(existing) if not hasattr(ext, "ordering_index") else ext.ordering_index, ext) return existing
def preLead(self): m = self.m s = im('aces.materials.%s' % m.leads) lat = m.leadlat mm = s.structure( dict(latx=lat[0], laty=lat[1], latz=lat[2], xp=1, yp=1, zp=1)) mm.dimension = m.dimension import device.lead as s u = s.Lead(mm) # u.cores=m.cores u.__dict__ = dict(m.__dict__, **u.__dict__) return u
def naconnect(): natype = request.args.get('natype') if natype not in nabench.keys(): try: NA[natype] = im("pyqum.instrument.benchtop.%s" %natype) nabench[natype] = NA[natype].Initiate() message = "Successfully connected to %s" %natype except: message = "Please check %s's connection configuration or interface" %natype else: message = "%s is already linked-up" %natype linkedna = [x for x in nabench.keys()] print(linkedna) return jsonify(message=message,linkedna=linkedna)
def clear_database(test=0, app_name='all'): for app in conf.INSTALLED_APPS: # 执行cls.sql if app == app_name or app_name == 'all': module = im(app) conn = common.db.DB(**conf.DATABASES[module.database][test]) path = os.path.join(os.path.dirname(os.path.abspath(__file__)), app, 'migrations') clear_cmd = "mysql -h{0} -u{1} -p{2} {3} < {4}".format( conn.host, conn.user, conn.password, conn.database, os.path.join(path, 'cls.sql')) clear_cmd += ' 2>&1 | grep -v "Warning: Using a password" ' os.system(clear_cmd)
def GetConnectedScopes(): scpDrvrs = scope_comm.__all__ drivers = [] for item in scpDrvrs: drivers.append(im(scope_comm.__name__ + "." + item)) for driver in drivers: devs = driver.FindSupportedDevices() if len(devs) > 0: for item in devs: SCOPES[item.MODEL] = driver.Oscilloscope(item) return SCOPES
def GetConnectedAWGs(): scpDrvrs = awg_comm.__all__ drivers = [] for item in scpDrvrs: drivers.append(im(awg_comm.__name__ + "." + item)) for driver in drivers: devs = driver.FindSupportedDevices() if len(devs) > 0: for item in devs: AWGS[item.MODEL] = driver.AWG(item) return AWGS
def sgconnect(): sgtype = request.args.get('sgtype') if sgtype not in sgbench.keys(): try: SG[sgtype] = im("pyqum.instrument.benchtop.%s" %sgtype) sgbench[sgtype] = SG[sgtype].Initiate() message = "Successfully connected to %s" %sgtype except: message = "Please check %s's connection configuration or interface" %sgtype else: message = "%s is already linked-up" %sgtype linkedsg = [x for x in sgbench.keys()] print(linkedsg) return jsonify(message=message,linkedsg=linkedsg)
def __call__(self) -> int: errors = 0 sys.path.insert(0, Master.configDir) for _, targetModule in Master.masterFiles.values(): if Path(Master.configDir / self.gvPackage /\ targetModule).joinpath('.py').is_file(): try: pm = im(targetModule, Master.gvPackage) pm.configCode()() # Run the code for the target module except ImportError: Master._L.warning('Unable to import' f' {self.gvPackage}.{targetModule}') errors += 1 errors += pm.shutdown() return errors
def run_lizard(folder, concurrency=4, find_duplicates=True): duplcode_ext = architect.utils.DuplcodeExtension() from importlib import import_module as im wordcount_ext = im('lizard_ext.lizardwordcount').LizardExtension() extensions = lizard.get_extensions(['mccabe', 'nd']) + [wordcount_ext] if find_duplicates: extensions.append(duplcode_ext) # extensions = [lizard.preprocessing, lizard.line_counter, duplcode_ext] files = find_files(folder, for_lizard=True, lang_ext='') file_analyzer = lizard.FileAnalyzer(extensions) with cf.ProcessPoolExecutor(max_workers=concurrency) as executor: futures = {} complexity_by_file = {} for file in files: futures[executor.submit(file_analyzer, file)] = file for future in tqdm(cf.as_completed(futures), total=len(futures), desc='Analyzing complexity in files'): file = futures[future] logging.debug(f'Analyzed complexity for file: {file}') if future._exception: logging.warning(f'Failed to analyze complexity for file: {file}') else: file_info: lizard.FileInformation = future.result() complexity_by_file[file] = file_info lizard_metrics = {'complexity_by_file': {k: file_info_to_dict(v) for k, v in complexity_by_file.items()}} if find_duplicates: list(duplcode_ext.cross_file_process(complexity_by_file.values())) duplicate_blocks = [] for duplicate_block in tqdm(duplcode_ext.get_duplicates(), desc='Analyzing duplicate code:'): duplicate_blocks.append([snippet_to_dict(snippet) for snippet in duplicate_block]) logging.info(f'Total duplicate blocks: {len(duplicate_blocks)}') logging.info("Total duplicate rate: %.2f%%" % (duplcode_ext.duplicate_rate() * 100)) logging.info("Total unique rate: %.2f%%" % (duplcode_ext.unique_rate() * 100)) lizard_metrics.update({ 'duplicate_blocks': duplicate_blocks, 'duplicate_blocks_count': len(duplicate_blocks), 'duplicate_rate': duplcode_ext.duplicate_rate(), 'unique_rate': duplcode_ext.unique_rate() }) return lizard_metrics
def __init__(self, func_name, value, tests, key=lambda a, b: a == b, notall=1 / 2, time=10): self.func = func_name self.value = value self.tests = tests self.key = key self.notall = notall self.time = time self.amount = len(tests) self.solved = self.run(im("solved.{}".format(file))) self.mutable = False
def __init__(self, names): """ Takes a raw list of middlewares and stores the call chains """ names.reverse() mws = [] splitted = [n.rpartition('.') for n in names] for mod_name, _, cls_name in splitted: try: mw_mod = sys.modules.get(mod_name, im(mod_name)) mws.append(getattr(mw_mod, cls_name)) except ImportError: raise MiddlewareLoadError('can not get module %s' % mod_name) except AttributeError: raise MiddlewareLoadError('No such class %s in %s module' % (cls_name, mod_name)) chain = lambda xs, mth: reduce(lambda ca, x: mth(x(ca)), xs, lambda cb, _: cb) self.run_chain = chain(mws, lambda x: x.run) self.end_chain = chain(mws, lambda x: x.end)
def get_search_for(key, value): if len(value) > 1: codes = [] for item in value: clean = item.replace("'", "") try: codes.append( InitiativeType.objects.get(name=clean)['id']) except Exception: pass return {'initiative_type': {'$in': codes}} else: value = value[0].replace("'", "") try: code = InitiativeType.objects.get(name=value)['id'] except Exception: code = '' itm = im('tipi_backend.api.managers.{}.initiative_type'.format( Config.COUNTRY)) return itm.InitiativeTypeManager().get_search_for(code)
def get_grade(self): tests = copy.deepcopy(self.tests) for i in range(length): name = names[i] try: module = im("broker.repositories.{}.{}".format(name, file)) answer = [] grade = [] for test in self.tests: output = test.run(module) answer.append(output[1]) grade.append(output[0]) except: answer = [["unable to import" for i in t.tests] for t in self.tests] grade = [0] * len(tests) self.answers[i] = answer self.grades[i] = grade
def migrate(mode=0): regex = re.compile('^[0-9]{4}\.sql$') for app in conf.INSTALLED_APPS: module = im(app) database = conf.DATABASES[module.database][mode] conn = common.db.DB(**database) # 无条件运行 0000.sql path = os.path.join(os.path.dirname(os.path.abspath(__file__)), app, 'migrations') cmd = "mysql -h{0} -u{1} -p{2} {3} < {4}".format( conn.host, conn.user, conn.password, conn.database, os.path.join(path, '0000.sql')) cmd += ' 2>&1 | grep -v "Warning: Using a password" ' os.system(cmd) # 检查所有的以四个数字命名的文件,如果没有运行过,则运行,并更新版本库 files = os.listdir(path) files.sort() for file in files: if regex.match(file) and file != '0000.sql': sql = "select app, version from migration where app = '{0}' and version = '{1}';".format( app, file[0:4]) result = conn.session.execute(sql) if not result.first(): cmd_sql = "insert into migration (app, version) values ('{0}', '{1}');".format( app, file[0:4]) conn.session.execute(cmd_sql) conn.session.commit() cmd_migrate = "mysql -h{0} -u{1} -p{2} {3} < {4}".format( conn.host, conn.user, conn.password, conn.database, os.path.join(path, file)) cmd_migrate += ' 2>&1 | grep -v "Warning: Using a password" ' print(conn.database, os.path.join(path, file)) os.system(cmd_migrate) conn.close()
def __init__(self): self.members_extractor = im('extractors.{}.members'.format(MODULE_EXTRACTOR)).MembersExtractor() self.initiatives_extractor = im('extractors.{}.initiatives'.format(MODULE_EXTRACTOR)).InitiativesExtractor() super().__init__()
cmd_disp = { 'about':'./about.txt', 'help':'./help.txt', } cmd_disp_set=cmd_disp.keys() cmd_exec = { 'fetch proxy':'modules.proxy_fetch', 'start bruteforcing':'modules.bruteforce' } cmd_exec_set=cmd_exec.keys() while(True): cmd=input('drax> ') if cmd=='exit' or cmd=='quit': print("Exiting Drax...") sys.exit() if cmd in cmd_disp_set: try: print(open(cmd_disp[cmd],'r').read()) except Exception as e: print(f"Error occured while trying to execute command {cmd}. Error message: {e}") continue if cmd in cmd_exec_set: try: #exec(open(cmd_exec[cmd],'r').read()) im(cmd_exec[cmd]).run() except Exception as e: print(f"Error occured while trying to execute command {cmd}. Error message: {e}") continue
import random import shlex #from queue import Queue as q from hashlib import blake2b from itertools import chain import json import re import os from datetime import datetime, timedelta from operator import itemgetter # import additional constants from importlib import import_module as im import vaivora_modules for mod in vaivora_modules.modules: im(mod) # basic declarations and initializations client = discord.Client() vdbs = dict() vdst = dict() ### BGN CONST #### File related constants wings = "wings-" txt = ".txt" log = ".log" tmp = ".tmp"
from importlib import import_module as im if __name__ == "__main__": """ Uncomment the tool you are interested in """ im("lib.unreal_collector").UnrealCollector("neighborhood_stuff").collect( count=400, preprocess=False) # im("toolbox.build_testing_dataset").build() # im("toolbox.build_default_dataset").build() # im("toolbox.find_class").find_class(rgba="(R=0,G=78,B=207,A=255)")
def get_initiative_status(): ism = im('tipi_backend.api.managers.{}.initiative_status'.format(Config.COUNTRY)) return ism.InitiativeStatusManager().get_values()
def install_module(module, install=True, conda_or_pip="pip", print_terminal=True, verbose=False): """Allows installation of a module directly from a notebook or script. Improves reproducibility of scripting/ notebook usage, where installed modules may differ between users. Arguments: module -- string. The name of the module intended to be installed. install -- bool. Install the module (True) or check if already installed (False) conda_or_pip -- string. Only required if install = True. Must be either 'conda' or 'pip'. print_terminal -- bool. Print the terminal output resulting from installing module, or not. If verbose = False, print_terminal is overridden. Returns: "True" if module is available for import, "False" if not. """ # Import libraries: # importlib.import_module to allow import via a string argument. # subprocess.getstatusoutput to allow printing of terminal output and to check zero or non-zero exit status. # logging for... yep, logging from importlib import import_module as im from subprocess import getstatusoutput as sub import logging # Set inital state for progress and success/failure messaging. state = 0 # Set out to default, in case no terminal output to display out = {1: "No terminal output to display"} # Create inverse of pip or conda for messaging if conda_or_pip == "conda": inv_conda_or_pip = "pip" elif conda_or_pip == "pip": inv_conda_or_pip = "conda" # Check whether module is available for import and adjust messaging state try: im(module) except (ImportError): state = 1 if install == True: assert conda_or_pip in [ "conda", "pip" ], 'the attribute conda_or_pip needs to be one of "conda" or "pip"' if verbose: print(module, "not installed") print("automatically installing", module, "using", conda_or_pip, "...please wait.") # Install module using conda if conda_or_pip == "conda": out = sub("conda install " + module + " -y") # Install module using pip elif conda_or_pip == "pip": out = sub("pip install " + module) # If non-zero exit, adjust state if out[0] != 0: state = 2 # If zero exit, reset state if out[0] == 0: state = 0 # If module still can't be imported, adjust state try: im(module) except (ImportError): state = 3 # Messaging based on state if state == 0: if verbose: print(module + " is installed and available for import.") elif state == 1: logging.error( module + " not installed. Please install using pip or conda before importing. \n" + "(Run this function again with the 'conda_or_pip' attribute set to either 'conda' or 'pip' and 'install' set to 'True')." ) elif state == 2: logging.error(module + " could not be installed using " + conda_or_pip + ". Try using " + inv_conda_or_pip + ". If both fail, check module spelling.") elif state == 3: logging.error(module + " is installed but not importable. Install using " + inv_conda_or_pip + " and try again.") # Print terminal output if verbose: if print_terminal: print("-" * 18 + "\n Terminal output \n" + "-" * 18) print(out[1]) # Set exit status if state == 0: return True else: return False
def __open_conn(): if is_on_pi(): return im('modi.task.can_task').CanTask() else: return im('modi.task.ser_task').SerTask()
#!/usr/bin/env python3 from flask import Flask,redirect,url_for from flask_bootstrap import Bootstrap import sys,os from flask_wtf.csrf import CSRFProtect from flask_sqlalchemy_session import flask_scoped_session from importlib import import_module as im app = Flask(__name__, static_url_path='/static') app.add_template_global(name='app', f=app) app.config['TEMPLATES_AUTO_RELOAD']=True bps = "root" for bp in bps.split(): m = im("views." + bp) app.register_blueprint(m.bp) Bootstrap(app) if __name__ == "__main__": print("Running MAIN!!!") app.run( host='127.0.0.1', port=8800, threaded=True )