def loadPlugins(): modules = getPythonScriptModules() print("Found python modules to scan for relink lookup %s" % len(modules)) # print("Modules:") # modules.sort() # for module in modules: # print(' ', module) ret = {} for fPath, modName in modules: # print("Loading", fPath, modName) try: loader = SourceFileLoader(modName, fPath) mod = loader.load_module() plugClasses = findPluginClass(mod, 'Scrape') for key, pClass in plugClasses: if key in ret: raise ValueError("Two plugins providing an interface with the same name? Name: '%s'" % key) ret[key] = pClass except AttributeError: print("Attribute error!", modName) traceback.print_exc() pass except ImportError: print("Import error!", modName) traceback.print_exc() pass return ret
def load_module(module_name, path): """Load arbitrary Python source file""" loader = SourceFileLoader(module_name, path) spec = spec_from_loader(loader.name, loader) module = module_from_spec(spec) loader.exec_module(module) return module
def load_race(name): ensure_directories() base_name = name.lower() file_name = base_name.replace('-', '_') + ".py" class_name = ''.join(part.capitalize() for part in base_name.split('-')) try: # from this library Race = getattr(__import__("dndraces"), class_name) except AttributeError: # Don't make __pycache__ file in the races directories sys.dont_write_bytecode = True try: # from $HOME loader = SourceFileLoader(class_name, os.path.join(HOME_RACES, file_name)) races = loader.load_module() Race = getattr(races, class_name) except FileNotFoundError: try: # from /etc loader = SourceFileLoader(class_name, os.path.join(ETC_RACES, file_name)) races = loader.load_module() Race = getattr(races, class_name) except FileNotFoundError: msg = "Can not find class for " + name + "\n" msg += 'Looking for class "' + class_name + '" in ' + file_name raise FileNotFoundError() return Race
def test_gantry_resampling(self): tmp_output_dir = tempfile.mkdtemp() script_file = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'scripts', 'dicom2nifti') assert os.path.isfile(script_file) try: if sys.version_info > (3, 0): from importlib.machinery import SourceFileLoader dicom2nifti_module = SourceFileLoader("dicom2nifti_script", script_file).load_module() else: import imp dicom2nifti_module = imp.load_source('dicom2nifti_script', script_file) dicom2nifti_module.main(['-G', '-r', '-o', '1', '-p', '-1000', test_data.FAILING_ORHTOGONAL, tmp_output_dir]) assert os.path.isfile(os.path.join(tmp_output_dir, "4_dicom2nifti.nii.gz")) dicom2nifti_module.main(['--allow-gantry-tilting', '--resample', '--resample-order', '1', '--resample-padding', '-1000', test_data.FAILING_ORHTOGONAL, tmp_output_dir]) assert os.path.isfile(os.path.join(tmp_output_dir, "4_dicom2nifti.nii.gz")) finally: shutil.rmtree(tmp_output_dir)
def send_alert(alert_id, message, log_errors=True): alert = Alert.query.get(alert_id) module_config = {} for option in alert.module_options: module_config[option.key] = option.value try: module_found = False for module in get_alert_modules(): if module["id"] == alert.module: module_found = True if not module_found: raise AlertExecutionError("Module not found!") module = SourceFileLoader( "module.{}".format(alert.module), os.path.join(module_dir, "{}.py".format(alert.module)) ).load_module() module.config = module_config module.handle_alert(message) except AlertExecutionError as ex: if log_errors: log_message("Alerting - {}".format(alert.module), str(ex)) else: raise except Exception as ex: if log_errors: log_message( "Alerting - {}".format(alert.module), "Could not read " "data from alert module: {}".format(alert.module), ) else: raise AlertExecutionError(str(ex))
def test_unlock(self): for sample in self.samples: cfg.startaddress = 0x00000000 mod_name = sample[0] filepath = sample[1] try: module = SourceFileLoader(mod_name, filepath).load_module() except ImportError: assert(module) self.opts.interface = 'file' self.opts.filename = path.join( path.dirname(__file__), 'samples/') + mod_name + '.bin' foundtarget = False for i, target in enumerate(unlock.targets, 1): if target.signatures[0].os == module.OS: foundtarget = [target] self.opts.target_number = i # print(module.OS) self.assertTrue(foundtarget) self.assertIsNotNone(self.opts.target_number) module.IS_INTRUSIVE = True sys.stdout = StringIO() # Suppress output device, memsize = interface.initialize(self.opts, module) memspace = memory.MemorySpace(device, memsize) address, page = unlock.run(self.opts, memspace) sys.stdout = sys.__stdout__ # Restore output # print(address & 0x00000fff) # print(module.offset) #self.assertEqual(address & 0x00000fff, module.offset) self.assertEqual(page, module.page)
def get_info_from_module(target): """Load the module/package, get its docstring and __version__ """ log.debug("Loading module %s", target.file) sl = SourceFileLoader(target.name, str(target.file)) with _module_load_ctx(): m = sl.load_module() docstring = m.__dict__.get('__doc__', None) if not docstring: raise NoDocstringError('Cannot package module without docstring. ' 'Please add a docstring to your module.') module_version = m.__dict__.get('__version__', None) if not module_version: raise NoVersionError('Cannot package module without a version string. ' 'Please define a `__version__="x.y.z"` in your module.') if not isinstance(module_version, str): raise InvalidVersion('__version__ must be a string, not {}.' .format(type(module_version))) if not module_version[0].isdigit(): raise InvalidVersion('__version__ must start with a number. It is {!r}.' .format(module_version)) docstring_lines = docstring.lstrip().splitlines() return {'summary': docstring_lines[0], 'version': m.__version__}
def load(path): path = pathlib.Path(path) # I hate lines like this so much. name = path.stem logging.debug("Loading '%s' definition from %s", name, path) with path.open() as f: specs = list(csv.DictReader(f, delimiter="\t")) base = define_struct(name, specs) modpath = path.parent.joinpath(name + '.py') logging.debug("Looking for make_struct hook in %s", modpath) try: module = SourceFileLoader(name, str(modpath)).load_module() except FileNotFoundError: msg = "Nothing at %s, skipping" logging.debug(msg, modpath) return base if not hasattr(module, "make_struct"): msg = "%s doesn't contain make_struct, skipping" logging.debug(msg, modpath) return base msg = "Processing hook %s.make_struct for '%s' in %s" logging.debug(msg, module.__name__, name, path) newbase = module.make_struct(base) if not isinstance(newbase, type) or not issubclass(newbase, base): msg = ("{}.make_struct() in {} failed to return subclass of {}; " "actually returned: {}") msg = msg.format(module.__name__, path, base.__name__, newbase) raise Exception(msg) return newbase
class JudgeTest(ParametrizedTestCase): '''trybva testovete da ima suffix za da razbera koi test e gramnal ''' solution_module = None def setUp(self): self.solution_module = SourceFileLoader("module.name", os.path.join(os.path.dirname(__file__), 'solutions/', self.param + '.py')).load_module() @timeout(0.5) def test_something_0(self): self.assertEqual(4, self.solution_module.solution(8)) @timeout(1) def test_something_1(self): self.assertEqual(2, self.solution_module.solution(4)) @timeout(1) def test_something_2(self): self.assertEqual(6, self.solution_module.solution(12)) @timeout(1) def test_something_3(self): self.assertEqual(5, self.solution_module.solution(10))
def run_example(self, theInput, theOutput): # Import program (decrapted in 3.4, no other way at the moment) from importlib.machinery import SourceFileLoader solution = SourceFileLoader("solution", self.programPath).load_module() # Feed the input with PatchStd(theInput) as std: # Start time counter startTime = time.time() # Run the program solution.main() # Get end time endTime = time.time() - startTime # Get memory (include current tests ~14MB but more or less is that) mem = memory_usage(max_usage=True) # Check output actual_output = std.getStdOut().getvalue() self.assertEqual(actual_output, theOutput) # Print time (not do before because output is not yet retrieved) std.restore() print("\tTime: %.3f sec" % endTime) print("\tMemory: %.3f MB" % mem) # Show errors if any errors = std.getStdErr().getvalue() if errors != '': print("\t" + errors)
def load_config(self, config_file): self.test_config_file_read(config_file) from importlib.machinery import SourceFileLoader configuration = SourceFileLoader("configuration", config_file).load_module() configuration._source = config_file return configuration
def load_settings(cfg_path=None): """todo: Docstring for load_settings :param cfg_path: arg description :type cfg_path: type description :return: :rtype: """ global settings cfg_path = cfg_path or _config['cfg_path'] cfg_d = _config.copy() if os.path.exists(cfg_path): sfl = SourceFileLoader('upkg_cfg', cfg_path) cfg_mod = sfl.load_module() for m in inspect.getmembers(cfg_mod): if m[0][0] != '_': cfg_d[m[0]] = m[1] # end for m in inspect.getme # Make the paths absolute. cfg_d["cfg_path"] = _clean_path(cfg_d["cfg_path"]) cfg_d["upkg_destdir"] = _clean_path(cfg_d["upkg_destdir"]) settings = Namespace(**cfg_d)
def load_module_from_file(name, path): try: loaded = SourceFileLoader(name, path) if isinstance(loaded, types.ModuleType): return loaded, None return loaded.load_module(), None except Exception as error: return None, error
def return_module(self, name='test'): file_name = path.join(self.out_dir.name, name + '.py') sys_path.append(self.out_dir.name) loader = SourceFileLoader(self._testMethodName, file_name) module = loader.load_module(self._testMethodName) sys_path.remove(self.out_dir.name) return module
def load_module_by_path(fpath, module_name=None): if module_name is None: module_name = str(uuid()) fpath = os.path.realpath(fpath) if os.path.isdir(fpath): fpath = os.path.join(fpath, '__init__.py') loader = SourceFileLoader(module_name, fpath) m = loader.load_module() return m
def __init__(self, module_path, module_name): tyk.log( "Loading module: '{0}'".format(module_name), "info") self.module_path = module_path self.handlers = {} try: source = SourceFileLoader(module_name, self.module_path) self.module = source.load_module() self.register_handlers() except: tyk.log_error( "Middleware initialization error:" )
def load_doc_manager(path): name, _ = os.path.splitext(os.path.basename(path)) try: from importlib.machinery import SourceFileLoader loader = SourceFileLoader(name, path) module = loader.load_module(name) except ImportError: module = imp.load_source(name, path) return module
def list_filaments(cls): filaments = {} for filament in os.listdir(FILAMENTS_DIR): path = os.path.join(FILAMENTS_DIR, filament) if path.endswith('.py'): name = os.path.basename(filament)[:-3] loader = SourceFileLoader(name, path) film = loader.load_module() filaments[name] = inspect.getdoc(film) return filaments
def test_02(self): """ Test Case 02: Try importing the magrathea script (Python>=3.4) Test is passed if a RuntimeError exception is raised. """ path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'scripts', 'magrathea.py')) with self.assertRaises(RuntimeError): loader = SourceFileLoader('magrathea', path) __ = loader.load_module()
def list_filaments(cls): Filament._assert_root_dir() filaments = {} paths = [os.path.join(FILAMENTS_DIR, path) for path in os.listdir(FILAMENTS_DIR) if path.endswith('.py')] for path in paths: filament_name = os.path.basename(path)[:-3] loader = SourceFileLoader(filament_name, path) filament = loader.load_module() filaments[filament_name] = inspect.getdoc(filament) return filaments
def _load(name): '''Import a module by name.''' try: plugin_path = path.join(prefix, name + '.py') spec = SourceFileLoader(name, plugin_path) module = spec.load_module() if args.verbose == 2: print('INFO: load [PASS]') return module except Exception as e: if args.verbose == 2: print('e:', e)
def load(self, default=False): if default: self._load_default() else: try: self._ensure_config_exists(self.path) loader = SourceFileLoader("config", self.path) self.data = loader.load_module() except: logging.exception("Configuration error, falling back to " "default") self._load_default()
def test_all_imports_py(): """ Tests: test_all_imports_py: for syntax correctness and internal imports """ print('::: TEST: test_all_imports_py()') all_modules_path = [] for root, dirnames, filenames in os_walk(ROOT_PACKAGE_PATH): all_modules_path.extend(glob(root + '/*.py')) for py_module_file_path in all_modules_path: module_filename = path_basename(py_module_file_path) module_filename_no_ext = path_splitext(module_filename)[0] py_loader = SourceFileLoader(module_filename_no_ext, py_module_file_path) py_loader.load_module(module_filename_no_ext)
def loadPlugin(self, pluginFilepath): pluginName = pluginFilepath[len('plugins/'):-len('.py')] if pluginName not in self.pluginsToLoad: return try: pluginModuleName = 'plugin_' + pluginName loader = SourceFileLoader(pluginModuleName, pluginFilepath) pluginModule = loader.load_module() self.plugins.append(pluginModule) print('Plugin "{}" loaded.'.format(pluginFilepath)) except Exception as e: print('Error loading plugin "{}".'.format(pluginFilepath)) traceback.print_exc()
def load_module(name, filename): """Load a module into name given its filename""" if sys.version_info < (3, 5): import imp import warnings with warnings.catch_warnings(): # Required for Python 2.7 warnings.simplefilter("ignore", RuntimeWarning) return imp.load_source(name, filename) else: from importlib.machinery import SourceFileLoader loader = SourceFileLoader(name, filename) return loader.load_module()
def load_module(module_name, filename): if sys.version_info >= (3, 5): import importlib.util spec = importlib.util.spec_from_file_location(module_name, filename) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return module elif sys.version_info >= (3, 3): from importlib.machinery import SourceFileLoader loader = SourceFileLoader(module_name, filename) module = loader.load_module() return module else: raise Exception("What version of python are you running?")
def _import_plugin_from_path(self, name, path): try: mname = "hyperspyui.plugins." + name if sys.version_info >= (3, 5): import importlib.util spec = importlib.util.spec_from_file_location(mname, path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) else: from importlib.machinery import SourceFileLoader loader = SourceFileLoader(mname, path) loader.load_module() except Exception: self.warn("import", path)
def load_module(name, path): """ Load the migration file as a python module. :param name: Name of module :type name: str :param path: Path to module :type path: pathlib.Path """ # TODO: change this to not use deprecated loader.load_module # after dropping Python 3.4 support loader = SourceFileLoader(name, str(path)) module = loader.load_module(name) return module
def load_configuration(conf): if conf is None: return {} conf = os.path.abspath(conf) if not os.path.exists(conf): print("Configuration file not present:", conf) sys.exit() print("Configuration path", conf) mod = SourceFileLoader("module.name", conf).load_module() return mod.get_config()
def find_and_add_available_backends(self): """Find and add all available backends. :raise ValueError: Raised if no backend or multiple backends are found within a single file. """ for backend in BACKENDS_PATH.glob("*.py"): name = "auth.backend." + backend.basename().splitext()[0] loader = SourceFileLoader(name, str(backend)) module = loader.load_module(name) for var in vars(module).values(): if isinstance(var, Backend): self[var.name.casefold()] = var break else: raise ValueError('Found no backend or multiple backends in "{}".'.format(backend))
import numpy as np import torch.nn.functional as f import matplotlib.pyplot as plt #import parent module import os, sys, inspect currentdir = os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe()))) parentdir = os.path.dirname(currentdir) sys.path.insert(0, parentdir) from methods import magnitude_rank #from vgg_computeComb2 import compute_combinations import argparse import torch import torch.nn as nn from importlib.machinery import SourceFileLoader dataset_cifar = SourceFileLoader( "module_cifar", "../dataloaders/dataset_cifar.py").load_module() model_lenet5 = SourceFileLoader("module_vgg", "../models/vgg.py").load_module() from module_cifar import load_cifar from methods.script_vgg_vggswitch import switch_run as script_vgg from methods import shapley_rank from models import vgg ####### # PATH cwd = os.getcwd() #if 'g0' in socket.gethostname() or 'p0' in socket.gethostname(): # #the cwd is where the sub file is so ranking/ # sys.path.append(os.path.join(cwd, "results_switch")) # path_compression = cwd # #path_compression = os.path.join(cwd, "results_compression")
"""Test GCP/GKE configuration parsing.""" from importlib.machinery import SourceFileLoader from unittest.mock import MagicMock, patch import subprocess import os import textwrap import pytest tfwrapper = SourceFileLoader("tfwrapper", "bin/tfwrapper").load_module() @patch("pathlib.Path.is_file") def test_dot_kubeconfig_refresh(mock_is_file, caplog): # noqa: D103 os.environ = {} adc_path = "/home/test/.config/gcloud/application_default_credentials.json" gke_name = "gke-testproject" project = "testproject" region = "testregion" kubeconfig_path = "/home/test/.run/testaccount/testenvironment/{}/{}.kubeconfig".format(region, gke_name) cmd_env = {} cmd_env["CLOUDSDK_CONTAINER_USE_APPLICATION_DEFAULT_CREDENTIALS"] = "true" cmd_env["CLOUDSDK_AUTH_CREDENTIAL_FILE_OVERRIDE"] = adc_path cmd_env["KUBECONFIG"] = kubeconfig_path command = [ "gcloud", "container", "clusters", "get-credentials", gke_name,
import numpy as np import pandas as pd import matplotlib.pyplot as plt from importlib.machinery import SourceFileLoader # Initialise wrapper wr = SourceFileLoader("mobius", r"../mobius.py").load_module() cu = SourceFileLoader("mobius_calib_uncert_lmfit", r"..\mobius_calib_uncert_lmfit.py").load_module() wr.initialize('../../Applications/SimplyC_regional/simplyc_regional.dll') def nash_sutcliffe(sim, obs): residuals = sim - obs nonnan = np.count_nonzero(~np.isnan(residuals)) sumsquare = np.nansum(np.square(residuals)) meanob = np.nansum(obs) / nonnan return 1.0 - sumsquare / np.nansum(np.square(obs - meanob)) def main(): catch_setup = pd.read_csv('catchment_organization.csv', sep='\t') skip_timesteps = 50 reduced_only = True setups = [
# Generate RST lists of external dependencies. from collections import defaultdict, namedtuple import pathlib import sys import urllib.parse from importlib.util import spec_from_loader, module_from_spec from importlib.machinery import SourceFileLoader # bazel/repository_locations.bzl must have a .bzl suffix for Starlark import, so # we are forced to do this workaround. _repository_locations_spec = spec_from_loader( 'repository_locations', SourceFileLoader('repository_locations', 'bazel/repository_locations.bzl')) repository_locations = module_from_spec(_repository_locations_spec) _repository_locations_spec.loader.exec_module(repository_locations) # Render a CSV table given a list of table headers, widths and list of rows # (each a list of strings). def CsvTable(headers, widths, rows): csv_rows = '\n '.join(', '.join(row) for row in rows) return f'''.. csv-table:: :header: {', '.join(headers)} :widths: {', '.join(str(w) for w in widths) } {csv_rows} '''
from CyberSource import * import os import json from importlib.machinery import SourceFileLoader config_file = os.path.join(os.getcwd(), "data", "Configuration.py") configuration = SourceFileLoader("module.name", config_file).load_module() capture_payment_path = os.path.join(os.getcwd(), "samples", "Payments", "Capture", "capture-payment.py") capture_payment = SourceFileLoader("module.name", capture_payment_path).load_module() # To delete None values in Input Request Json body def del_none(d): for key, value in list(d.items()): if value is None: del d[key] elif isinstance(value, dict): del_none(value) return d def refund_capture(): api_capture_response = capture_payment.capture_payment() id = api_capture_response.id clientReferenceInformationCode = "TC50171_3" clientReferenceInformation = Ptsv2paymentsClientReferenceInformation( code = clientReferenceInformationCode ) orderInformationAmountDetailsTotalAmount = "102.21" orderInformationAmountDetailsCurrency = "USD"
# Do not modify this file (if you want to modify anyway, contact a mentor before, who will explain why do not modify) import unittest import os import data_manager from importlib.machinery import SourceFileLoader current_file_path = os.path.dirname(os.path.abspath(__file__)) store = SourceFileLoader("store", current_file_path + "/store/store.py").load_module() hr = SourceFileLoader("hr", current_file_path + "/hr/hr.py").load_module() tool_manager = SourceFileLoader( "tool_manager", current_file_path + "/tool_manager/tool_manager.py").load_module() accounting = SourceFileLoader("accounting", current_file_path + "/accounting/accounting.py").load_module() selling = SourceFileLoader("selling", current_file_path + "/selling/selling.py").load_module() crm = SourceFileLoader("crm", current_file_path + "/crm/crm.py").load_module() def compare_lists(tester, expected_list, result_list): for item in result_list: tester.assertTrue(item in expected_list) def get_subscribed_list(): return [ "[email protected];Lieselotte Rainey", "[email protected];Maude Toll", "[email protected];Fawn Lambrecht", "[email protected];Phylis Farberanmt",
from importlib.machinery import SourceFileLoader from setuptools import find_packages, setup version = SourceFileLoader('version', 'stpmex/version.py').load_module() install_requires = [ 'cryptography>=3.0,<3.4', 'cuenca-validations>=0.4,<0.7', 'requests>=2.24,<2.26', ] with open('README.md', 'r') as f: long_description = f.read() setup( name='stpmex', version=version.__version__, author='Cuenca', author_email='*****@*****.**', description='Client library for stpmex.com', long_description=long_description, long_description_content_type='text/markdown', url='https://github.com/cuenca-mx/stpmex-python', packages=find_packages(), include_package_data=True, package_data=dict(stpmex=['py.typed']), python_requires='>=3.6', install_requires=install_requires, classifiers=[ 'Programming Language :: Python :: 3',
#u = "bolt://localhost:7687" #us ="neo4j" #pw = "kK0_1" #s = r"S:\Uni\Master\MasterArbeit\CompleteAnalysis\Out\Java\summary.csv" #main(s,u,us,pw) if __name__ == "__main__": if len(sys.argv) < 5: print("WRONG USAGE!") exit(0) path = sys.argv[5] + "/Default/Callgraph.py" if sys.version_info >= (3, 5): import importlib.util spec = importlib.util.spec_from_file_location("Callgraph.CallGraph", path) cgI = importlib.util.module_from_spec(spec) spec.loader.exec_module(cgI) elif sys.version_info >= (3, 3): from importlib.machinery import SourceFileLoader cgI = SourceFileLoader("Callgraph.CallGraph", path).load_module() elif sys.version_info >= (2, 0): import imp cgI = imp.load_source("Callgraph.CallGraph", path) else: print("Not supported python version: " + sys.version) main(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])
from CyberSource import * import json import os from importlib.machinery import SourceFileLoader config_file = os.path.join(os.getcwd(), "data", "Configuration.py") configuration = SourceFileLoader("module.name", config_file).load_module() def create_payment_instrument(): try: # Setting the json message body request = Body2() card_info = Tmsv1paymentinstrumentsCard() card_info.expiration_month = "09" card_info.expiration_year = "2022" card_info.type = "visa" request.card = card_info.__dict__ bill_to_info = Tmsv1paymentinstrumentsBillTo() bill_to_info.first_name = "John" bill_to_info.last_name = "Deo" bill_to_info.company = "CyberSource" bill_to_info.address1 = "12 Main Street" bill_to_info.address2 = "20 My Street" bill_to_info.locality = "Foster City" bill_to_info.administrative_area = "CA" bill_to_info.postal_code = "90200" bill_to_info.country = "US" bill_to_info.email = "*****@*****.**" bill_to_info.phone_number = "555123456"
# data structure: # id: string # Unique and random generated (at least 2 special char()expect: ';'), 2 number, 2 lower and 2 upper case letter) # name: string # birth_date: number (year) # importing everything you need import os from importlib.machinery import SourceFileLoader current_file_path = os.path.dirname(os.path.abspath(__file__)) # User interface module ui = SourceFileLoader("ui", current_file_path + "/../ui.py").load_module() # data manager module data_manager = SourceFileLoader("data_manager", current_file_path + "/../data_manager.py").load_module() # common module common = SourceFileLoader("common", current_file_path + "/../common.py").load_module() # start this module by a module menu like the main menu # user need to go back to the main menu from here # we need to reach the default and the special functions of this module from the module menu # def start_module(): table = data_manager.get_table_from_file("hr/persons.csv") title_list = ["ID", "Name", "Year of birth"] options = [ "Show table", "Add item", "Remove item", "Update item", "Get oldest person", "Get persons closest to average" ]
def load_source(name, path): if not os.path.exists(path): return {} return vars(SourceFileLoader('mod', path).load_module())
import os, sys import django import codeinquero.settings from importlib.machinery import SourceFileLoader os.environ.setdefault("DJANGO_SETTINGS_MODULE", "codeinquero.settings") django.setup() if __name__ == '__main__': script = SourceFileLoader('script', sys.argv[1]).load_module() script.execute()
import os from importlib.machinery import SourceFileLoader from setuptools import setup, find_packages module_name = 'romanify' module = SourceFileLoader(module_name, os.path.join(module_name, '__init__.py')).load_module() setup(name='romanify', version=module.__version__, author=module.__author__, author_email=module.__email__, license=module.__license__, description=module.__doc__, long_description=open('README.rst').read(), platforms="all", classifiers=[ 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: MacOS', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6',
#!/usr/bin/env python3 from importlib.machinery import SourceFileLoader constants = SourceFileLoader('constants', 'lambdarado/_constants.py').load_module() print(constants.__dict__['__version__'])
import os import logging from importlib.machinery import SourceFileLoader from setuptools import setup, find_packages _MLFLOW_SKINNY_ENV_VAR = "MLFLOW_SKINNY" version = (SourceFileLoader("mlflow.version", os.path.join("mlflow", "version.py")).load_module().VERSION) # Get a list of all files in the JS directory to include in our module def package_files(directory): paths = [] for (path, _, filenames) in os.walk(directory): for filename in filenames: paths.append(os.path.join("..", path, filename)) return paths # Prints out a set of paths (relative to the mlflow/ directory) of files in mlflow/server/js/build # to include in the wheel, e.g. "../mlflow/server/js/build/index.html" js_files = package_files("mlflow/server/js/build") models_container_server_files = package_files("mlflow/models/container") alembic_files = [ "../mlflow/store/db_migrations/alembic.ini", "../mlflow/temporary_db_migrations_for_pre_1_users/alembic.ini", ] extra_files = ["ml-package-versions.yml", "pyspark/ml/log_model_allowlist.txt"]
def test_update(self, pihole): SourceFileLoader("actions", "./actions/update").load_module() assert pihole.mock_subprocess.check_call.call_count == 1
from setuptools import setup, Extension from sys import platform from os import path from importlib.machinery import SourceFileLoader module = SourceFileLoader("version", path.join("aiofile", "version.py")).load_module() libraries = [] if platform == 'linux': libraries.append('rt') if platform in ('linux', 'darwin'): try: from Cython.Build import cythonize extensions = cythonize([ Extension( "aiofile.posix_aio", ["aiofile/posix_aio.pyx"], libraries=libraries, ), ], force=True, emit_linenums=False, quiet=True) except ImportError as e: print(e)
def test_restart_dns(self, pihole): SourceFileLoader("actions", "./actions/restart-dns").load_module() assert pihole.mock_subprocess.check_call.call_count == 1
# try: import sys if sys.version_info[0] < 3: from imp import load_source ycm_flags = load_source(module_name, modyle_path) flags = ycm_flags.flags else: if sys.version_info[1] < 5: import importlib.util spec = importlib.util.spec_from_file_location(module_name, modyle_path) ycm_flags = importlib.util.module_from_spec(spec) spec.loader.exec_module(ycm_flags) flags = ycm_flags.flags else: from importlib.machinery import SourceFileLoader ycm_flags = SourceFileLoader(modyle_name, modyle_path).load_module() flags = ycm_flags.flags # except: # flags = [ # '-Wall', # #'-Wextra', # '-Werror', # #'-Wc++98-compat', # '-Wno-long-long', # '-Wno-variadic-macros', # '-fexceptions', # # THIS IS IMPORTANT! Without a "-std=<something>" flag, clang won't know which # # language to use when compiling headers. So it will guess. Badly. So C++ # # headers will be compiled as C headers. You don't want that so ALWAYS specify # # a "-std=<something>".
import cv2 import yaml from PIL import Image from importlib.machinery import SourceFileLoader import torch from torch import nn from tqdm import tqdm import numpy as np import matplotlib.pyplot as plt import pandas import numpy __filedir__ = os.path.dirname(os.path.realpath(__file__)) network_module = SourceFileLoader(".", os.path.join(__filedir__, "network.py")).load_module() class DTOIDWrapper(nn.Module): def __init__(self, backend="cuda", no_filter_z=False): super(DTOIDWrapper, self).__init__() # Initialize the network model = network_module.Network() model.eval() model_path = os.path.join(__filedir__, "model.pth.tar") checkpoint = torch.load(model_path, map_location=lambda storage, loc: storage) model.load_state_dict(checkpoint["state_dict"])
""" Functional tests for method deprecation. """ import unittest from importlib.machinery import SourceFileLoader, SourcelessFileLoader SourceFileLoader('unittest', unittest.__file__).load_module() # [deprecated-method] SourcelessFileLoader('unittest', unittest.__file__).load_module() # [deprecated-method]
from importlib.machinery import SourceFileLoader import os from setuptools import find_packages, setup basedir = os.path.dirname(__file__) modelforge = SourceFileLoader( "modelforge", os.path.join(basedir, "modelforge", "version.py")).load_module() with open(os.path.join(basedir, "README.md"), encoding="utf-8") as f: long_description = f.read() setup(name="modelforge", description="APIs and tools to work with abstract \"models\" - files " "with numpy arrays and metadata. It is possible to publish " "models, list them. There is a built-in cache. Storage has backends.", long_description=long_description, long_description_content_type="text/markdown", version=modelforge.__version__, license="Apache 2.0", author="source{d}", author_email="*****@*****.**", url="https://github.com/src-d/modelforge", download_url="https://github.com/src-d/modelforge", packages=find_packages(exclude=("modelforge.tests", )), keywords=[ "model", "git", "asdf", "gcs", "google cloud storage", "machine learning", "registry" ], install_requires=[ "asdf>=2.3.0,<2.4", "lz4>=1.0,<3.0", "numpy>=1.12,<2.0",
import setuptools import os from importlib.machinery import SourceFileLoader version = SourceFileLoader('segmind.version', os.path.join('segmind', 'version.py')).load_module().VERSION with open('README.md', 'r') as f: long_description = f.read() setuptools.setup( name='segmind', version=version, author='T Pratik', author_email='*****@*****.**', description='A tracking tool for deep-learning', long_description=long_description, long_description_content_type='text/markdown', # url="https://github.com/pk00095/keras_jukebox/archive/0.0.3.tar.gz", packages=setuptools.find_packages(), install_requires=[ 'pycocotools', 'click', 'protobuf==3.13.0', 'lxml', 'pascal_voc_writer', 'GPUtil', 'PYyaml', 'pandas', 'entrypoints', 'psutil' ], entry_points={'console_scripts': ['segmind=segmind.cli:cli']}, include_package_data=True, classifiers=[ 'Programming Language :: Python :: 3', 'License :: OSI Approved :: MIT License',
#!/usr/bin/python from importlib.machinery import SourceFileLoader data_analysis = SourceFileLoader("data_analysis", "scripts/data_analysis.py").load_module() log_reg = SourceFileLoader("log_reg", "scripts/log_reg.py").load_module() naive_bayes_pre_trained = SourceFileLoader( "naive_bayes_pre_trained", "scripts/naive_bayes_pre_trained.py").load_module() confusion_matrix_lg = SourceFileLoader( "confusion_matrix_lg", "scripts/confusion_matrix_lg.py").load_module() confusion_matrix_nb = SourceFileLoader( "confusion_matrix_nb", "scripts/confusion_matrix_nb.py").load_module() #Data Analysis data_analysis.main("Capitals") data_analysis.main("Numbers") #Logistic Regression log_reg.main() confusion_matrix_lg.main() #Naive Bayes confusion_matrix_nb.main() naive_bayes_pre_trained.main()
# SOFTWARE. from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf import os PROJECT_PATH = os.path.dirname(os.path.abspath(__file__)) log_path = os.path.join(PROJECT_PATH, 'output') models_path = os.path.join(PROJECT_PATH, 'models') from importlib.machinery import SourceFileLoader facenet = SourceFileLoader('facenet', os.path.join(PROJECT_PATH, 'MR_facenet.py')).load_module() eval_data_reader = SourceFileLoader( 'eval_data_reader', os.path.join(PROJECT_PATH, 'eval_data_reader.py')).load_module() verification = SourceFileLoader('verification', os.path.join(PROJECT_PATH, 'verification.py')).load_module() lfw = SourceFileLoader('lfw', os.path.join(PROJECT_PATH, 'lfw.py')).load_module() def main(args): with tf.Graph().as_default():
from keras.callbacks import TensorBoard, ModelCheckpoint, LambdaCallback import matplotlib as mtpl import matplotlib.pyplot as plt from tqdm import trange, tqdm import cv2 import time from os.path import join import fnmatch #import libraries from GitHub ##NOTE: this command is only for Colaboratory notebooks, generally you can just import these libraries as any other common one by doing <import 'path of the library'>## from importlib.machinery import SourceFileLoader mask1 = SourceFileLoader( 'Mask_generator.py', join( '/content/drive/My Drive/CMB_Inpainting_Oxford/CMB_Inpainting_masking/', 'utils/Mask_generator.py')).load_module() unet_100 = SourceFileLoader( 'PConv_UNet_model_100.py', join( '/content/drive/My Drive/CMB_Inpainting_Oxford/CMB_Inpainting_masking/', 'utils/PConv_UNet_model_100.py')).load_module() mask2 = SourceFileLoader( 'Mask_generator2.py', join( '/content/drive/My Drive/CMB_Inpainting_Oxford/CMB_Inpainting_masking/', 'utils/Mask_generator2.py')).load_module() start_time = time.time() # Settings
def import_recipe(module, filename): return SourceFileLoader(module, filename).load_module()
evaluate_all = args.evaluate_all if evaluate_test_set and evaluate_all: raise ValueError( 'evaluate_all and evaluate_test_set cannot be chosen together!') use_iter = args.iter if use_iter: logging.info('Using iteration: %d' % use_iter) base_path = sys_config.project_root model_path = os.path.join(base_path, args.EXP_PATH) model_path = '/scratch_net/biwirender02/cany/scribble/logdir/heart_dropout_welch_non_exp' config_file = glob.glob(model_path + '/*py')[0] config_module = config_file.split('/')[-1].rstrip('.py') exp_config = SourceFileLoader(config_module, os.path.join(config_file)).load_module() recursion = args.RECURSION recursion = 0 apply_crf = False evaluate_test_set = True if evaluate_test_set: logging.warning('EVALUATING ON TEST SET') input_path = sys_config.test_data_root output_path = os.path.join(model_path, 'predictions_testset') elif evaluate_all: logging.warning('EVALUATING ON ALL TRAINING DATA') input_path = sys_config.data_root output_path = os.path.join(model_path, 'predictions_alltrain') else: logging.warning('EVALUATING ON VALIDATION SET') input_path = sys_config.data_root
#!/usr/bin/env python3 from importlib.machinery import SourceFileLoader import unittest from salt.modules import cmdmod salt_test_case = SourceFileLoader("salt_test_case", "salt_test_case.py").load_module() rust = SourceFileLoader("rust", "../_modules/rust.py").load_module() class Testinstance(unittest.TestCase, salt_test_case.SaltTestCase): def setUp(self): self.initialize_mocks() self.instance = rust self.mock_salt() self.salt["cmd.shell"] = cmdmod.shell def test_get_rustc_triplet(self): triplet = rust.get_rustc_triplet() self.assertTrue(len(triplet.split("-")) > 2) if __name__ == "__main__": unittest.main()
# MARK: - Run script def run_code_loop(): while True: try: code = str(pyto.Python.shared.codeToRun) exec(code) if code == pyto.Python.shared.codeToRun: pyto.Python.shared.codeToRun = None except: print(traceback.format_exc()) if pyto.ConsoleViewController.isPresentingView: sleep(0.02) else: sleep(0.2) threading.Thread(target=run_code_loop, args=()).start() script = "%@" try: SourceFileLoader("main", script).load_module() except Exception as e: ex_type, ex, tb = sys.exc_info() traceback.print_tb(tb)