def pytest_generate_tests(metafunc):
    # get all the tactic file names
    tactic_paths = pathlib.Path(*TACTIC_PATH).glob("[!_]*.py")
    tactic_modules_list = []

    # import all the tactic in variables
    for tactic_path in tactic_paths:
        import_name = '.'.join(tactic_path.parts[0:]).rsplit('.', 1)[0]
        tactic_modules_list.append(importlib.import_module(import_name))

    # some import magic to get the tactic class since we want its type
    tactic_base_class_import = pathlib.Path(*TACTIC_PATH, 'tactic')
    tactic_import_name = '.'.join(tactic_base_class_import.parts[0:])
    tactic_base_class_type = importlib.import_module(tactic_import_name).Tactic

    # check which objects within the modules are classes derived from Tactic
    tactic_classes_list = []
    for a_tactic_module in tactic_modules_list:
        for __, obj in inspect.getmembers(a_tactic_module):
            # we want:
            # a class,
            # that is in defined in the module,
            # derived from Tactic or a child of it
            if inspect.isclass(obj) and\
               inspect.getfile(obj) == inspect.getfile(a_tactic_module) and\
               tactic_base_class_type in inspect.getmro(obj)[1:]:
                tactic_classes_list.append(obj)

    metafunc.parametrize('tactic_class', tactic_classes_list)
Beispiel #2
0
    def __call__(self, parser, namespace, values, option_string=None):

        import pycbc
        version_str="--- PyCBC Version --------------------------\n" + \
            pycbc.version.git_verbose_msg + \
            "\n\nImported from: " + inspect.getfile(pycbc)

        version_str += "\n\n--- LAL Version ----------------------------\n"
        try:
            import lal.git_version
            lal_module = inspect.getfile(lal)
            lal_library = os.path.join( os.path.dirname(lal_module),
                '_lal.so')
            version_str += lal.git_version.verbose_msg + \
            "\n\nImported from: " + lal_module + \
            "\n\nRuntime libraries:\n" + print_link(lal_library)
        except ImportError:
            version_str += "\nLAL not installed in environment\n"

        version_str += "\n\n--- LALSimulation Version-------------------\n"
        try:
            import lalsimulation.git_version
            lalsimulation_module = inspect.getfile(lalsimulation)
            lalsimulation_library = os.path.join( os.path.dirname(lalsimulation_module),
                '_lalsimulation.so')
            version_str += lalsimulation.git_version.verbose_msg + \
            "\n\nImported from: " + lalsimulation_module + \
            "\n\nRuntime libraries:\n" + print_link(lalsimulation_library)
        except ImportError:
            version_str += "\nLALSimulation not installed in environment\n"

        print version_str
        sys.exit(0)
Beispiel #3
0
def pmodule(c):
    global _pmodule_lookup    
    
    name = c._name = c.__name__.lower()
    
    assert type(name) is str

    ret = _pmodule_lookup.setdefault(name, c)
    
    if not ret is c:
        if (inspect.getsourcefile(ret) == inspect.getsourcefile(c)
            or inspect.getfile(ret) == inspect.getfile(c)):
        
            return c         
        
        raise NameError("Processing Module '%s' doubly defined in files %s and %s." 
                        % (name, inspect.getfile(ret), inspect.getfile(c)))

        
    c.log = logging.getLogger(c._name)

    parameters.processPModule(c)

    parameters.registerPreset("r." + name, PModulePreset(name),
                              description = "Runs processing module '%s'." % c.__name__)
    
    logging.getLogger("Manager").debug("Processing module '%s' registered." % c.__name__)
        
    c._is_pmodule = True
    
    return c
Beispiel #4
0
def _getName(func, args=[]):
	"""Default name of cache file"""
	if hasattr(func, "im_class"): 
		#bound class method 
		if func.im_class.__module__ != "__main__":
			name = func.im_class.__module__ 
		else:
			name = basename(getfile(func))
		name += "." + func.im_class.__name__ + "." + func.__name__
	
	elif func.func_code.co_varnames and func.func_code.co_varnames[0] == 'self': 
		#unbound class method
		if args[0].__class__.__module__ != "__main__":
			name = args[0].__class__.__module__ 
		else:
			name = basename(getfile(func))
		name += "." + args[0].__class__.__name__ + "." + func.__name__
	
	else: 
		#top-level function
		if func.__module__ != "__main__":
			name = func.__module__
		else:
			name = basename(getfile(func))
		name += "." + func.__name__
	
	return name +".cache"
    def testlogger(self):
        import logging
        import inspect
        import os.path
        from comoonics import ComLog
        _mylogger=logging.getLogger("comoonics.ComLog")
        logging.basicConfig()
        _mylogger.setLevel(logging.DEBUG)
        #from comoonics.db.ComDBLogger import DBLogger
        #registerHandler("DBLogger", DBLogger)
        _filenames=("loggingconfig.ini")
        ComLog.getLogger().info("Testing ComLog:")
        loggers={"test1": logging.DEBUG,
                 "test2": logging.INFO,
                 "test3": logging.WARNING}
        for loggername in loggers.keys():
            print "%s level: %s" %(loggername, logging.getLevelName(loggers[loggername]))
            ComLog.setLevel(loggers[loggername], loggername)
            self.__testLogger(loggername, ComLog.getLogger(loggername))

        print("mylogger without level")
        self.__testLogger("mylogger", ComLog.getLogger("mylogger"))
        cp=None

        print("ComLog._classregistry: %s" %ComLog._classregistry)
        for _filename in _filenames:
            logging.shutdown()
            print("Testing configfile %s/%s cwd: %s" %(os.path.dirname(inspect.getfile(self.__class__)), _filename, os.path.curdir))
            ComLog.fileConfig(os.path.join(os.path.dirname(inspect.getfile(self.__class__)), _filename), None, )
            rootlogger=ComLog.getLogger()
            self.__testLogger("root", rootlogger)
            print("handlernames: %s" %rootlogger.manager.loggerDict.keys())
            for _lname in [ "atix", "atix", "atix.atix1" ]:
                self.__testLogger(_lname, logging.getLogger(_lname))
                self.__testLogger(_lname+".test", logging.getLogger(_lname+".test"))
Beispiel #6
0
    def importer_handler (name, src, dst):
        with ResultSender (src) as send:
            if name is None:
                return False # dispose importer

            module = sys.modules.get (name, False)
            if module is None:
                send (None) # Module is cached as not found (python 2)

            loader = pkgutil.get_loader (name)
            if loader is None or not hasattr (loader, 'get_source'):
                send (None)

            source = loader.get_source (name)
            if source is None:
                send (None)

            ispkg = loader.is_package (name)
            if module and hasattr (module, '__package__'):
                pkg = module.__package__
            else:
                pkg = name if ispkg else name.rpartition ('.') [0]

            try:
                filename = (inspect.getfile (loader.get_code (name)) if not module else
                            inspect.getfile (module))
            except TypeError:
                filename = '<unknown>'

            send (ImporterLoader (name, pkg, ispkg, filename, source))
        return True
Beispiel #7
0
    def __new__(cls, name, bases, d):
        new_class = super(MetaProcessor, cls).__new__(cls, name, bases, d)
        if new_class in implementations(IProcessor):
            id = str(new_class.id())
            if id in _processors:
                # Doctest test can duplicate a processor
                # This can be identify by the conditon "module == '__main__'"
                new_path = os.path.realpath(inspect.getfile(new_class))
                id_path = os.path.realpath(inspect.getfile(_processors[id]))
                if new_class.__module__ == '__main__':
                    new_class = _processors[id]
                elif _processors[id].__module__ == '__main__':
                    pass
                elif new_path == id_path:
                    new_class = _processors[id]
                else:
                    raise ApiError("%s and %s have the same id: '%s'"
                                   % (new_class.__name__,
                                      _processors[id].__name__, id))
            if not MetaProcessor.valid_id.match(id):
                raise ApiError("%s has a malformed id: '%s'"
                               % (new_class.__name__, id))

            _processors[id] = new_class

        return new_class
Beispiel #8
0
def test_faulty_events(tmpdir, recwarn):
    tmpdir = str(tmpdir)
    file_1 = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(
        inspect.getfile(inspect.currentframe())))), "data", "ExampleProject",
        "EVENTS", "GCMT_event_TURKEY_Mag_5.1_2010-3-24-14-11.xml")
    file_2 = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(
        inspect.getfile(inspect.currentframe())))), "data", "ExampleProject",
        "EVENTS", "GCMT_event_TURKEY_Mag_5.9_2011-5-19-20-15.xml")
    cat = obspy.readEvents(file_1)
    cat += obspy.readEvents(file_2)

    # Modify it to trigger all problems.
    temp = io.BytesIO()
    cat.write(temp, format="quakeml")
    temp.seek(0, 0)
    temp = temp.read()
    pattern = re.compile(r"<depth>.*?</depth>", re.DOTALL)
    temp = re.sub(pattern, "<depth></depth>", temp)
    temp = re.sub(r"<type>.*?</type>", "<type></type>", temp)
    with open(os.path.join(tmpdir, "random.xml"), "wb") as fh:
        fh.write(temp)

    comm = Communicator()
    EventsComponent(tmpdir, comm, "events")

    event = comm.events.get('random')
    assert "more than one event" in str(recwarn.pop(LASIFWarning).message)
    assert "contains no depth" in str(recwarn.pop(LASIFWarning).message)
    assert "Magnitude has no specified type" in str(
        recwarn.pop(LASIFWarning).message)

    # Assert the default values it will then take.
    assert event["depth_in_km"] == 0.0
    assert event["magnitude_type"] == "Mw"
Beispiel #9
0
 def testIfFileExists(self,filename):
     """ 
     If file exists in all possible folders,
     return complete file path,
     otherwise, return None
     """
     
     #print "*", filename
     if os.path.isfile(filename) is True :
         #print "*1", filename
         
         return filename
     elif os.path.isfile(os.path.join(os.getcwd(),filename)) is True :
         #print "*2", os.path.isfile(os.path.join(os.getcwd(),filename))
         cfile = os.path.isfile(os.path.join(os.getcwd(),filename))
         
         return cfile
     elif os.path.isfile(os.path.join(os.path.dirname(sys.argv[0]),filename)) is True :
         #print "*3", os.path.join(os.path.dirname(sys.argv[0]),filename)
         cfile =  os.path.join(os.path.dirname(sys.argv[0]),filename)
         
         return cfile        
     elif os.path.isfile(os.path.join(os.path.dirname(inspect.getfile( inspect.currentframe() )),filename)) is True :
         #print "*4", os.path.join(os.path.dirname(inspect.getfile( inspect.currentframe() )),filename)
         cfile = os.path.join(os.path.dirname(inspect.getfile( inspect.currentframe() )),filename)
         
         return cfile
     else :
         #print "*5", filename
         return None
Beispiel #10
0
def import_child_modules(parts, ignore="^[\._].*", error_callback=None):
    matcher = None if ignore is None else re.compile(ignore)
    if isinstance(parts, types.ModuleType):
        parent_module = parts
        parts = path_to_module_parts(inspect.getfile(parts))
    else:
        parent_module = import_module_by_name_parts(*parts)
    parent_dir = os.path.dirname(inspect.getfile(parent_module))
    modules = {}
    for child in os.listdir(parent_dir):
        if matcher is not None and matcher.match(child):
            continue
        child_name = fin.string.rtrim(child, *PY_EXTENSIONS)
        child_path = os.path.join(parent_dir, child)
        if child_name == child and not os.path.isdir(child_path):
            continue
        if child_name in modules:
            continue
        try:
            modules[child_name] = import_module_by_name_parts(
                *(tuple(parts) + (child_name, )))
        except Exception as e:
            if error_callback is not None:
                error_callback(e)
            else:
                raise
    return modules
Beispiel #11
0
 def testIfFileExistsInFolders(self,filename,folderList):
     """ 
     If file exists in all possible folders
     and in the folderList provided
     
     return complete file path,
     otherwise, return None
     """
     
     #print "*", filename
     if os.path.isfile(filename) is True :
         #print "*1", filename
         return filename
     elif os.path.isfile(os.path.join(os.getcwd(),filename)) is True :
         #print "*2", os.path.isfile(os.path.join(os.getcwd(),filename))
         return os.path.isfile(os.path.join(os.getcwd(),filename))
     elif os.path.isfile(os.path.join(os.path.dirname(sys.argv[0]),filename)) is True :
         #print "*3", os.path.join(os.path.dirname(sys.argv[0]),filename)
         return os.path.join(os.path.dirname(sys.argv[0]),filename)        
     elif os.path.isfile(os.path.join(os.path.dirname(inspect.getfile( inspect.currentframe() )),filename)) is True :
         #print "*4", os.path.join(os.path.dirname(inspect.getfile( inspect.currentframe() )),filename)
         return os.path.join(os.path.dirname(inspect.getfile( inspect.currentframe() )),filename)
     else :
         for i in folderList:
             if  os.path.isfile(os.path.join(i,filename)) is True:
                 return os.path.join(i,filename)
         return None
Beispiel #12
0
 def __init__(self, py_func):
     self._is_closure = bool(py_func.__closure__)
     self._lineno = py_func.__code__.co_firstlineno
     # Get qualname
     try:
         qualname = py_func.__qualname__
     except AttributeError:
         qualname = py_func.__name__
     # Find a locator
     source_path = inspect.getfile(py_func)
     for cls in self._locator_classes:
         locator = cls.from_function(py_func, source_path)
         if locator is not None:
             break
     else:
         raise RuntimeError("cannot cache function %r: no locator available "
                            "for file %r" % (qualname, source_path))
     self._locator = locator
     # Use filename base name as module name to avoid conflict between
     # foo/__init__.py and foo/foo.py
     filename = inspect.getfile(py_func)
     modname = os.path.splitext(os.path.basename(filename))[0]
     fullname = "%s.%s" % (modname, qualname)
     abiflags = getattr(sys, 'abiflags', '')
     self._filename_base = self.get_filename_base(fullname, abiflags)
Beispiel #13
0
 def _add_subcommands(cls, parser):
     """Adds subcommands to the parser parser."""
     # add subcommands
     subcmds = cls.cls_map().get(cls.__name__, [])
     if subcmds:
         subparsers = parser.add_subparsers()
         seen = {}
         for sub_cls in subcmds:
             if sub_cls.cmd in seen:
                 new_loc = inspect.getfile(sub_cls)
                 old_loc = inspect.getfile(seen[sub_cls.cmd][-1])
                 msg = ("\"%s\" already defined in %s (ignoring "
                        "definition in %s") % (sub_cls.cmd, old_loc,
                                               new_loc)
                 logger().warn(msg)
                 continue
             seen.setdefault(sub_cls.cmd, []).append(sub_cls)
             descr = sub_cls.description()
             kw = {'description': sub_cls.description(),
                   # keep indention and newlines in docstr
                   'formatter_class': argparse.RawDescriptionHelpFormatter}
             if sub_cls.help() is not None:
                 kw['help'] = sub_cls.help()
             subparser = subparsers.add_parser(sub_cls.cmd, **kw)
             sub_cls.add_arguments(subparser)
Beispiel #14
0
 def translate_path(self, path):
     """Translate a /-separated PATH to the local filename syntax with md_slide_dir  md_slide_dir prefix
     """
     # abandon query parameters
     path = path.split('?', 1)[0]
     path = path.split('#', 1)[0]
     # Don't forget explicit trailing slash when normalizing. Issue17324
     trailing_slash = path.rstrip().endswith('/')
     try:
         path = urllib.parse.unquote(path, errors='surrogatepass')
     except UnicodeDecodeError:
         path = urllib.parse.unquote(path)
     path = posixpath.normpath(path)
     words = path.split('/')
     if 'README.md' in words:
         return os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), 'README.md')
     elif 'Tutorial.md' in words:
         return os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), 'Tutorial.md')
     words = filter(None, words)
     path = md_slide_dir
     for word in words:
         drive, word = os.path.splitdrive(word)
         head, word = os.path.split(word)
         if word in (os.curdir, os.pardir): continue
         path = os.path.join(path, word)
     if trailing_slash:
         path += '/'
     return path
    def test_sql_test_case_with_discovery_queries(self):
        tinc_test_loader = tinctest.TINCTestLoader()
        pwd = os.path.dirname(inspect.getfile(self.__class__))
        test_dir = os.path.join(pwd, 'sql_pattern')
        tinc_test_suite = tinc_test_loader.discover(start_dirs = [test_dir], patterns = ['sql_pattern.py'],
                                                    top_level_dir = None, query_handler = TINCDiscoveryQueryHandler(['method=test_functional_*']))

        test_case = None
        test_result = None
            
        with closing(_WritelnDecorator(StringIO())) as buffer:
            tinc_test_runner = TINCTestRunner(stream = buffer, descriptions = True, verbosity = 1)
            test_result = tinc_test_runner.run(tinc_test_suite)
            self.assertEqual(test_result.testsRun, 6)
            self.assertEqual(len(test_result.skipped), 6)


        # Queries using metadata from sql files
        tinc_test_loader = tinctest.TINCTestLoader()
        pwd = os.path.dirname(inspect.getfile(self.__class__))
        test_dir = os.path.join(pwd, 'sql_pattern')
        tinc_test_suite = tinc_test_loader.discover(start_dirs = [test_dir], patterns = ['sql_pattern.py'],
                                                    top_level_dir = None,
                                                    query_handler = TINCDiscoveryQueryHandler(['method=test_functional_* and tags != long']))

        test_case = None
        test_result = None
            
        with closing(_WritelnDecorator(StringIO())) as buffer:
            tinc_test_runner = TINCTestRunner(stream = buffer, descriptions = True, verbosity = 1)
            test_result = tinc_test_runner.run(tinc_test_suite)
            self.assertEqual(test_result.testsRun, 3)
            self.assertEqual(len(test_result.skipped), 3)
Beispiel #16
0
def autodiscover(path=None, plugin_prefix='intake_'):
    """Scan for Intake plugin packages and return a dict of plugins.

    This function searches path (or sys.path) for packages with names that
    start with plugin_prefix.  Those modules will be imported and scanned for
    subclasses of intake.source.base.Plugin.  Any subclasses found will be
    instantiated and returned in a dictionary, with the plugin's name attribute
    as the key.
    """

    plugins = {}

    for importer, name, ispkg in pkgutil.iter_modules(path=path):
        if name.startswith(plugin_prefix):
            t = time.time()
            new_plugins = load_plugins_from_module(name)

            for plugin_name, plugin in new_plugins.items():
                if plugin_name in plugins:
                    orig_path = inspect.getfile(plugins[plugin_name].__class__)
                    new_path = inspect.getfile(plugin.__class__)
                    warnings.warn('Plugin name collision for "%s" from'
                                  '\n    %s'
                                  '\nand'
                                  '\n    %s'
                                  '\nKeeping plugin from first location.'
                                  % (plugin_name, orig_path, new_path))
                else:
                    plugins[plugin_name] = plugin
            logger.debug("Import %s took: %7.2f s" % (name, time.time() - t))

    return plugins
Beispiel #17
0
 def test_run_sql_file_with_out_file(self):
     sql_file = os.path.join(os.path.dirname(inspect.getfile(self.__class__)),'test.sql')
     out_file = os.path.join(os.path.dirname(inspect.getfile(self.__class__)),'test.out')
     self.assertTrue(PSQL.run_sql_file(sql_file = sql_file, out_file = out_file))
     self.assertTrue(os.path.exists(out_file))
     os.remove(out_file)
     self.assertFalse(os.path.exists(out_file))
Beispiel #18
0
def FlagsForFile(filename, **kwargs):
    data = kwargs['client_data']
    filetype = data['&filetype']

    flags = [
                '-Wall',
                '-Wextra'
            ]
    
    lang_specific_flags = \
    {
        'cpp': ['-xc++', '-std=c++11'],
        'c'  : ['-xc']
    }

    flags.extend(lang_specific_flags[filetype])

    includes = ['../include']
    includes += glob.glob(os.path.dirname(os.path.realpath(inspect.getfile(inspect.currentframe())))  + '../deps/*/include')
    includes += glob.glob(os.path.dirname(os.path.realpath(inspect.getfile(inspect.currentframe()))) + '/deps/*/include')
    defines  = []

    for i in includes:
        flags.append('-I' + i)
    for d in defines:
        flags.append('-D' + d)

    return {
        'flags': flags,
        'do_cache': True
    }
Beispiel #19
0
    def test_run_sql_command_catalog_update(self):
        sql_cmd = 'show gp_session_role;'
        out_file = os.path.join(os.path.dirname(inspect.getfile(self.__class__)),'test_catalog_update.out')
        self.assertFalse(os.path.exists(out_file))
        try:
            PSQL.run_sql_command_catalog_update(sql_cmd = sql_cmd, out_file = out_file)
            self.assertTrue(os.path.exists(out_file))
            with open(out_file, 'r') as f:
                output = f.read()
                self.assertIsNotNone(re.search('utility', output))
        finally:
            os.remove(out_file)
            self.assertFalse(os.path.exists(out_file))

        sql_cmd = 'show allow_system_table_mods;'
        out_file = os.path.join(os.path.dirname(inspect.getfile(self.__class__)),'test_catalog_update.out')
        self.assertFalse(os.path.exists(out_file))
        try:
            PSQL.run_sql_command_catalog_update(sql_cmd = sql_cmd, out_file = out_file)
            self.assertTrue(os.path.exists(out_file))
            with open(out_file, 'r') as f:
                output = f.read()
                self.assertIsNotNone(re.search('DML', output))
        finally:
            os.remove(out_file)
            self.assertFalse(os.path.exists(out_file))
    def test_merge(self):
        uut = SectionManager()
        tmp = StringConstants.system_coafile
        StringConstants.system_coafile=os.path.abspath(os.path.join(
            os.path.dirname(inspect.getfile(SectionManagerTestCase)),
            "section_manager_test_files",
            "default_coafile"))

        config = os.path.abspath(os.path.join(
            os.path.dirname(inspect.getfile(SectionManagerTestCase)),
            "section_manager_test_files",
            ".coafile"))
        # Check merging of default_coafile and .coafile
        conf_sections = uut.run(arg_list=["-c", config])[0]
        self.assertEqual(str(conf_sections["test"]),
                         "test {value : 2}")
        self.assertEqual(str(conf_sections["test-2"]),
                         "test-2 {files : ., bears : LineCountBear}")
        # Check merging of default_coafile, .coafile and cli
        conf_sections = uut.run(arg_list=["-c",
                                          config,
                                          "-S",
                                          "test.value=3",
                                          "test-2.bears=",
                                          "test-5.bears=TestBear2"])[0]
        self.assertEqual(str(conf_sections["test"]), "test {value : 3}")
        self.assertEqual(str(conf_sections["test-2"]),
                         "test-2 {files : ., bears : }")
        self.assertEqual(str(conf_sections["test-3"]),
                         "test-3 {files : MakeFile}")
        self.assertEqual(str(conf_sections["test-4"]),
                         "test-4 {bears : TestBear}")
        self.assertEqual(str(conf_sections["test-5"]),
                         "test-5 {bears : TestBear2}")
        StringConstants.system_coafile = tmp
def main():
  email = None
  password = None
  filepath = None
  convert = 'false'  # Convert to Google Docs format by default
  default_chunk_size = gdata.client.ResumableUploader.DEFAULT_CHUNK_SIZE;
  chunk_size = default_chunk_size
  debug = False
  ssl = True

  try:
    opts, args = getopt.getopt(
        sys.argv[1:], '', ['email=', 'password='******'filepath=',
                           'convert', 'chunk_size=', 'ssl',
                           'debug'])

  except getopt.error, msg:
    print 'python '+inspect.getfile( inspect.currentframe() )+'''
        --email= [your Google Docs email]
        --password= [your Google Docs password]
        --filepath= [file to upload]
        --convert [converts uploaded file]
        --chunk_size= [size of upload chunks. default is '''+str(default_chunk_size)+''']
        --nossl [disables HTTPS if set]
        --debug [prints debug info if set]'''
    print ('Example usage: '+inspect.getfile( inspect.currentframe() )+' '
           '--filepath=/path/to/test.doc --convert --nossl')
    sys.exit(2)
Beispiel #22
0
 def importer_handler(name, dst, src):
     try:
         if name is None:
             return False  # dispose importer
         module = sys.modules.get(name, False)
         if module is None:
             src.send(None)  # Module is cached as not found (python 2)
             return True
         try:
             loader = pkgutil.get_loader(name)
         except AttributeError:
             # this is workaround for http://http://bugs.python.org/issue14710
             src.send(None)
             return True
         if loader is None or not hasattr(loader, 'get_source'):
             src.send(None)
             return True
         source = loader.get_source(name)
         if source is None:
             src.send(None)
             return True
         ispkg = loader.is_package(name)
         if module and hasattr(module, '__package__'):
             pkg = module.__package__
         else:
             pkg = name if ispkg else name.rpartition('.')[0]
         try:
             filename = (inspect.getfile(loader.get_code(name)) if not module else
                         inspect.getfile(module))
         except TypeError:
             filename = '<unknown>'
         src.send(BootLoader(name, source, filename, ispkg, pkg))
     except Exception:
         src.send(Result.from_current_error())
     return True
   def test_multiple_models(self):
       """
       Test running running with two different pysd models
       Returns
       -------
 
       """
       relative_path_to_file = '../models/Sales_Agent_Market_Building_Dynamics.mdl'
       directory = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
       mdl_file = os.path.join(directory, relative_path_to_file)
        
       market_model = PysdModel(mdl_file=mdl_file)
       market_model.uncertainties = [RealParameter('Startup Subsidy',0, 3),
                                     RealParameter('Startup Subsidy Length', 0, 10)]
       market_model.outcomes = [TimeSeriesOutcome('Still Employed')]
 
       relative_path_to_file = '../models/Sales_Agent_Motivation_Dynamics.mdl'
       directory = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
       mdl_file = os.path.join(directory, relative_path_to_file)
 
       motivation_model = PysdModel(mdl_file=mdl_file)
       motivation_model.uncertainties = [RealParameter('Startup Subsidy', 0, 3),
                                     RealParameter('Startup Subsidy Length', 0, 10)]
       motivation_model.outcomes =[TimeSeriesOutcome('Still Employed')]
 
       models = [market_model, motivation_model]  # set the model on the ensemble
       perform_experiments(models, 5)
Beispiel #24
0
 def checkLib(self, lib):
     try:
         inspect.getfile(__import__(lib))
     except ImportError:
         return False
     else:
         return True
Beispiel #25
0
def home(request):
	context = {}
	context['query'] = False
	context['search_query'] = ''

	if request.method == 'GET':
		return render(request, 'hue/home.html', context)

	if 'search_q' in request.POST:
		search_query = request.POST['search_q']
		context['search_query'] = search_query
        print(search_query)
        context['query'] = True

        # twitter.twitter_query(search_query)
        r = RedditParser()
        r.reddit_query(search_query, 25, 25)

        path = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile(inspect.currentframe() ))[0],"datumbox")))
        ifile  = cmd_subfolder + '/data.json'
        ofile  = path + '/sentiment.csv'

        print ifile
        print ofile

        sentiment.analyze_sentiment(ifile, ofile, 0.1)

        path = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile(inspect.currentframe() ))[0],"semantic-similarity-master")))
        cofile = path + '/reddit_senti.json'
        os.system(path + "/similar" + ' ' + ifile + ' ' + ofile + ' ' + cofile)
        with open(cofile) as data_file:
            data = json.load(data_file)
        context['data'] = json.dumps(data)

	return render(request, 'hue/home.html', context)
Beispiel #26
0
def get_schematron_xsl_from_schema(schema_path, force_generate=False):
    """ Gets path to schematron from schema_path, creating if necessary """
    sch_path = schema_path.replace('.xsd', '-schematron.sch')
    xsl_path = schema_path.replace('.xsd', '-schematron.xslt')

    # generate sch if it doesn't exist or force_generate
    if (not os.path.isfile(sch_path) or force_generate):
        print(
            'INFO: extracting schematron from schema\n\t...this may take 10+ minutes...'
        )

        # get path to schematron tools
        scripts_path = os.path.dirname(
            os.path.abspath(inspect.getfile(inspect.currentframe())))
        schematron_tools_path = os.path.realpath(scripts_path +
                                                 '/lib/iso-schematron-xslt1')

        # parse schema
        schema_tree = etree.parse(schema_path)

        # extract schematron from schema
        sch_tree = apply_xslt(schema_tree,
                              schematron_tools_path + '/ExtractSchFromXSD.xsl')

        # write to file
        with open(sch_path, mode='wt', encoding='utf-8') as f:
            f.write(str(sch_tree))

        # force regenerate of xsl
        force_generate = True

    # generate xsl from sch if it doesn't exist or force_generate
    if (not os.path.isfile(xsl_path) or force_generate):
        print('INFO: generating schematron xsl from sch')

        # get path to schematron tools
        scripts_path = os.path.dirname(
            os.path.abspath(inspect.getfile(inspect.currentframe())))
        schematron_tools_path = os.path.realpath(scripts_path +
                                                 '/lib/iso-schematron-xslt1')

        # parse sch
        sch_tree = etree.parse(sch_path)

        # these preprocessing steps don't seem to be necessary:
        # preprocess schema with iso_dsdl_include.xsl to assemble the schema from parts
        # schematron_tree = apply_xslt(schematron_tree, schematron_tools_path + '/iso_dsdl_include.xsl')
        # preprocess with iso_abstract_expand.xsl to convert abstract patterns to real patterns
        # schematron_tree = apply_xslt(schematron_tree, schematron_tools_path + '/iso_abstract_expand.xsl')

        # convert sch to XSLT script using iso_svrl_for_xslt1.xsl
        xslt_tree = apply_xslt(
            sch_tree, schematron_tools_path + '/iso_svrl_for_xslt1.xsl')

        # write to file
        with open(xsl_path, mode='wt', encoding='utf-8') as f:
            f.write(str(xslt_tree))

    return xsl_path
Beispiel #27
0
 def SavedMeasurementsFileLocationButtonPressed(self):
     try:
         MeasurementFileLocation = open(os.path.split(inspect.getfile(inspect.currentframe()))[0][:-13]+"/Settings/FileLocations/SavedMeasurements.txt", "r")
         MeasurementFileLocation = open(os.path.split(inspect.getfile(inspect.currentframe()))[0][:-13]+"/Settings/FileLocations/SavedMeasurements.txt", "r")
         self.SavedMeasurementsFileLocation = QFileDialog.getExistingDirectory(self, 'Choose Directory',
                                                                               MeasurementFileLocation.readline().rstrip())
     except:
         self.SavedMeasurementsFileLocation = QFileDialog.getExistingDirectory(self, 'Choose Directory', '/')
Beispiel #28
0
def WhereAmI():
    """ Runs a file path lookup. Returns a list of length 2. 
    [0] is the present file's full directory name
    [1] is the present file's name with directory."""

    dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
    fName = inspect.getfile(inspect.currentframe())
    
    return dir, fName
Beispiel #29
0
def init():
	# realpath() with make your script run, even if you symlink it :)
	cmd_folder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0]))
	if cmd_folder not in sys.path:
		sys.path.insert(0, cmd_folder)
	# use this if you want to include modules from a subfolder
	cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile(inspect.currentframe()))[0], "../")))
	if cmd_subfolder not in sys.path:
		sys.path.insert(0, cmd_subfolder)
def run_experiment_params(param_path='./params.yaml'):
    params = type("Parameters", (), load_yaml(param_path))


    def goalfn(state, goal, radius=0.1):
        # Can be quickly modified to have a new radius per goal element
        position = state[:2]
        return (
            np.linalg.norm(np.array(position)
                           - np.array(goal)) < radius
        )
    # # Load domain
    def encode_trial():
        rewards = list(params.domain_params['goalArray'])
        encode = Encoding(rewards, goalfn)
        return encode.strict_encoding

    params.domain_params['goalfn'] = goalfn
    params.domain_params['encodingFunction'] = encode_trial()
    # params.domain_params['goalArray'] = params.domain_params['goalArray'][::4]
    domain = eval(params.domain)(**params.domain_params)
    # domain = eval(params.domain)()

    #Load Representation
    representation = eval(params.representation)(
                domain, 
                **params.representation_params)
    policy = eval(params.policy)(
                representation, 
                **params.policy_params)
    agent = eval(params.agent)(
                policy, 
                representation,
                discount_factor=domain.discount_factor, 
                **params.agent_params)

    opt = {}
    opt["exp_id"] = params.exp_id
    opt["path"] = params.results_path + getTimeStr() + "/"
    opt["max_steps"] = params.max_steps
    # opt["max_eps"] = params.max_eps

    opt["num_policy_checks"] = params.num_policy_checks
    opt["checks_per_policy"] = params.checks_per_policy

    opt["domain"] = domain
    opt["agent"] = agent

    if not os.path.exists(opt["path"]):
        os.makedirs(opt["path"])

    shutil.copy(param_path, opt["path"] + "params.yml")
    shutil.copy(inspect.getfile(eval(params.domain)), opt["path"] + "domain.py")
    shutil.copy(inspect.getfile(inspect.currentframe()), opt["path"] + "exper.py")


    return eval(params.experiment)(**opt)
Beispiel #31
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The setup script."""

from setuptools import setup, find_packages
import inspect
from pathlib import Path

HOME_DIR = Path(inspect.getfile(inspect.currentframe())).parent


def filter_req_paths(paths, func):
    """Return list of filtered libs."""
    if not isinstance(paths, list):
        raise ValueError("Paths must be a list of paths.")

    libs = set()
    junk = set(['\n'])
    for p in paths:
        with p.open(mode='r') as reqs:
            lines = set([line for line in reqs if func(line)])
            libs.update(lines)

    return list(libs - junk)


def is_pipable(line):
    """Filter for pipable reqs."""
    if "# not_pipable" in line:
        return False
    elif line.startswith('#'):
Beispiel #32
0
def configure():
    # Create parameters file for analysis.
    #
    print("Creating XML file.")
    params = testingParameters()
    params.toXMLFile("psf_fft.xml")

    # Create localization on a grid file.
    #
    print("Creating gridded localization.")
    sim_path = os.path.dirname(inspect.getfile(storm_analysis)) + "/simulator/"
    subprocess.call([
        "python", sim_path + "emitters_on_grid.py", "--bin", "grid_list.hdf5",
        "--nx",
        str(settings.nx), "--ny",
        str(settings.ny), "--spacing", "20", "--zrange",
        str(settings.test_z_range), "--zoffset",
        str(settings.test_z_offset)
    ])

    # Create randomly located localizations file.
    #
    print("Creating random localization.")
    subprocess.call([
        "python", sim_path + "emitters_uniform_random.py", "--bin",
        "random_list.hdf5", "--density", "1.0", "--margin",
        str(settings.margin), "--sx",
        str(settings.x_size), "--sy",
        str(settings.y_size), "--zrange",
        str(settings.test_z_range)
    ])

    # Create sparser grid for PSF measurement.
    #
    print("Creating data for PSF measurement.")
    sim_path = os.path.dirname(inspect.getfile(storm_analysis)) + "/simulator/"
    subprocess.call([
        "python", sim_path + "emitters_on_grid.py", "--bin",
        "sparse_list.hdf5", "--nx", "6", "--ny", "3", "--spacing", "40"
    ])

    if False:

        # Create PSF using pupil functions directly.
        #
        psf_fft_path = os.path.dirname(
            inspect.getfile(storm_analysis)) + "/psf_fft/"
        print("Creating (theoritical) psf.")
        subprocess.call([
            "python", psf_fft_path + "make_psf_from_pf.py", "--filename",
            "psf.psf", "--size",
            str(settings.psf_size), "--pixel-size",
            str(settings.pixel_size), "--zrange",
            str(settings.psf_z_range), "--zstep",
            str(settings.z_step)
        ])

    else:

        # Create beads.txt file for PSF measurement.
        #
        with saH5Py.SAH5Py("sparse_list.hdf5") as h5:
            locs = h5.getLocalizations()
            numpy.savetxt(
                "beads.txt",
                numpy.transpose(numpy.vstack((locs['x'], locs['y']))))

        # Create drift file, this is used to displace the localizations in the
        # PSF measurement movie.
        #
        dz = numpy.arange(-settings.psf_z_range, settings.psf_z_range + 0.001,
                          0.010)
        drift_data = numpy.zeros((dz.size, 3))
        drift_data[:, 2] = dz
        numpy.savetxt("drift.txt", drift_data)

        # Also create the z-offset file.
        #
        z_offset = numpy.ones((dz.size, 2))
        z_offset[:, 1] = dz
        numpy.savetxt("z_offset.txt", z_offset)

        # Create simulated data for PSF measurement.
        #
        bg_f = lambda s, x, y, i3: background.UniformBackground(
            s, x, y, i3, photons=10)
        cam_f = lambda s, x, y, i3: camera.Ideal(s, x, y, i3, 100.)
        drift_f = lambda s, x, y, i3: drift.DriftFromFile(
            s, x, y, i3, "drift.txt")
        pp_f = lambda s, x, y, i3: photophysics.AlwaysOn(s, x, y, i3, 20000.0)
        psf_f = lambda s, x, y, i3: psf.PupilFunction(s, x, y, i3, 100.0,
                                                      settings.zmn)

        sim = simulate.Simulate(background_factory=bg_f,
                                camera_factory=cam_f,
                                drift_factory=drift_f,
                                photophysics_factory=pp_f,
                                psf_factory=psf_f,
                                x_size=settings.x_size,
                                y_size=settings.y_size)

        sim.simulate("psf.dax", "sparse_list.hdf5", dz.size)

        # Measure the PSF using spliner/measure_psf_beads.py
        #
        print("Measuring PSF.")
        spliner_path = os.path.dirname(
            inspect.getfile(storm_analysis)) + "/spliner/"
        subprocess.call([
            "python", spliner_path + "measure_psf_beads.py", "--movie",
            "psf.dax", "--zoffset", "z_offset.txt", "--aoi_size",
            str(int(settings.psf_size / 2) + 1), "--beads", "beads.txt",
            "--psf", "psf.psf", "--zrange",
            str(settings.psf_z_range), "--zstep",
            str(settings.z_step)
        ])
Beispiel #33
0
 def source_file(self):
     if self.class_name.startswith('climata.'):
         filename = inspect.getfile(self.io_class)
         return 'climata' + filename.split('climata')[1]
     return None
Beispiel #34
0
import inspect
import json
import logging
import os
import pprint
import random
import re
import shutil
import subprocess
import time
from collections import defaultdict
from typing import List, Dict, Any

from tac.platform.game.stats import GameStats

OUR_DIRECTORY = os.path.dirname(inspect.getfile(
    inspect.currentframe()))  # type: ignore
ROOT_DIR = os.path.join(OUR_DIRECTORY, "..")

logging.basicConfig(level=logging.INFO)


def parse_args() -> argparse.Namespace:
    """Argument parsing."""
    parser = argparse.ArgumentParser(
        "run_iterated_games",
        description=
        "Run the sandbox multiple times and collect scores for every run.",
    )
    parser.add_argument(
        "--nb_games",
        type=int,
Beispiel #35
0
import os
import copy
import inspect

configurations = os.path.realpath(inspect.getfile(inspect.currentframe())) # this file
configurations = os.path.dirname(configurations) # ggH2018
configurations = os.path.dirname(configurations) # Differential
configurations = os.path.dirname(configurations) # Configurations
configurations = os.path.dirname(configurations)

#aliases = {}

# imported from samples.py:
# samples, signals

mc = [skey for skey in samples if skey not in ('Fake', 'DATA')]

eleWP='mvaFall17V1Iso_WP90'
muWP='cut_Tight_HWWW_tthmva_80'

aliases['LepWPCut'] = {
    'expr': 'LepCut2l__ele_'+eleWP+'__mu_'+muWP,
    'samples': mc + ['DATA']
}

aliases['gstarLow'] = {
    'expr': 'Gen_ZGstar_mass >0 && Gen_ZGstar_mass < 4',
    'samples': 'VgS'
}

aliases['gstarHigh'] = {
Beispiel #36
0
def getSubuserDir():
  """ Get the toplevel directory for subuser. """
  return os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))))) # BLEGH!
Beispiel #37
0
from __future__ import division, print_function

# TODO: remove really ugly boilerplate
import logging
import sys
import os
import inspect
cmd_folder = os.path.realpath(
    os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0]))
cmd_folder = os.path.realpath(os.path.join(cmd_folder, ".."))
if cmd_folder not in sys.path:
    sys.path.insert(0, cmd_folder)

import numpy as np
from sklearn.cross_validation import KFold
from pyrfr import regression32 as regression

from smac.configspace import ConfigurationSpace
from ConfigSpace.hyperparameters import CategoricalHyperparameter, \
    UniformFloatHyperparameter, UniformIntegerHyperparameter

from smac.tae.execute_func import ExecuteTAFunc
from smac.scenario.scenario import Scenario
from smac.smbo.smbo import SMBO
from smac.stats.stats import Stats


def rfr(cfg, seed):
    """
    We optimize our own random forest with SMAC 
    """
Beispiel #38
0
 def inspect_getfile(obj):
     if obj == expected_arg:
         return return_value
     else:
         return inspect.getfile(obj)
Beispiel #39
0
class PySvcDocs(win32serviceutil.ServiceFramework):
    #region Default
    _svc_name_ = 'PySvcDocs'
    _svc_display_name_ = 'Python Service - Docs'
    _svc_description_ = 'This service, written in Python, copies files to DocRec'

    _config = configparser.ConfigParser()
    _path = os.path.dirname(
        os.path.abspath(inspect.getfile(inspect.currentframe())))
    _config.read(_path + '/app.ini')
    _srcpi = _config["default"]["srcpi"]
    _srcwc = _config["default"]["srcwc"]
    _tgtpi = _config["default"]["tgtpi"]
    _tgtwc = _config["default"]["tgtwc"]
    _start = _config["default"]["start"]
    _end = _config["default"]["end"]
    _smtp = _config["default"]["smtp"]
    _sender = _config["default"]["sender"]
    _recipient = _config["default"]["recipient"]
    _userid = _config["default"]["userid"]
    _userkey = _config["default"]["userkey"]
    _port = _config["default"]["port"]
    _cell = _config["default"]["cell"]
    _sqlcon = _config["connection"]["sqlcon"]
    _today = datetime.datetime.now()
    _logpath = _config["default"]["logpath"] + 'pyLog' + _today.strftime(
        '%Y%m%d') + '.txt'

    handler = logging.handlers.WatchedFileHandler(
        os.environ.get("LOGFILE", _logpath))
    handler = logging.FileHandler(_logpath)
    formatter = logging.Formatter(logging.BASIC_FORMAT)
    handler.setFormatter(formatter)
    root = logging.getLogger()
    root.setLevel(os.environ.get("LOGLEVEL", "INFO"))
    root.addHandler(handler)

    #endregion

    def __init__(self, args):
        win32serviceutil.ServiceFramework.__init__(self, args)

        # create an event to listen for stop requests on
        self.hWaitStop = win32event.CreateEvent(None, 0, 0, None)
        socket.setdefaulttimeout(60)

    # core logic of the service
    def SvcDoRun(self):
        import servicemanager

        rc = None

        # if the stop event hasn't been fired keep looping
        while rc != win32event.WAIT_OBJECT_0:
            #self.CopyPIDocsTree(self._srcpi, self._tgtpi)
            # self.ParallelCopy(self._srcpi, self._tgtpi)
            logging.info(self.isCopied(self._srcpi, self._tgtpi))

            # block for 5 seconds and listen for a stop event
            rc = win32event.WaitForSingleObject(self.hWaitStop, 60 * 60 * 1000)

        #logging.debug(self._svc_name_)

    # called when we're being shut down
    def SvcStop(self):
        # tell the SCM we're shutting down
        self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
        # fire the stop event
        win32event.SetEvent(self.hWaitStop)

    # copy_tree more useful for exe not windows service
    def CopyPIDocsTree(self, src, dst):
        _now = time.strftime("%H:%M")

        try:
            if _now > self._start and _now < self._end:
                copy_tree(src, dst)

                logging.info(
                    self._today.strftime('%Y-%m-%d %H:%M:%S ') +
                    self._svc_name_ + " file(s) copied from " + src + " to " +
                    dst)

            # parallel ??

            # check if a file is copied?
        except Exception:
            logging.exception(
                self._today.strftime('%Y-%m-%d %H:%M:%S ') + self._svc_name_)

    def ParallelCopy(self, src, dst):
        '''
            allfiles = os.listdir(self._srcpi)
            # only list the directories and files, but not subdir, and files within
            allfiles = next(os.walk(self._srcpi))[2] # [] only?
        '''
        try:
            allfiles = self.isCopied(self._srcpi, self._tgtpi)

            logging.info(allfiles)
        except Exception:
            logging.exception(
                self._today.strftime('%Y-%m-%d %H:%M:%S ') + self._svc_name_)

    def getFilePaths(self, directory):
        """
        This function will generate the file names in a directory
        tree by walking the tree either top-down or bottom-up. For each
        directory in the tree rooted at directory top (including top itself),
        it yields a 3-tuple (dirpath, dirnames, filenames).
        """
        file_paths = []  # List which will store all of the full filepaths.

        # Walk the tree.
        try:
            for root, directories, files in os.walk(directory):
                for filename in files:
                    # Join the two strings in order to form the full filepath.
                    filepath = os.path.join(root, filename)
                    file_paths.append(filepath)  # Add it to the list.

                    logging.info(
                        self._today.strftime('%Y-%m-%d %H:%M:%S ') + filepath)

            return file_paths
        except Exception:
            logging.exception(
                self._today.strftime('%Y-%m-%d %H:%M:%S ') + 'getFilePaths')

    def isCopied(self, src, dst):
        try:
            for root, directories, files in os.walk(src):
                for filename in files:
                    # srcpath = os.path.join(root, filename)
                    tgtpath = os.path.join(root.replace(src, dst), filename)
                    if os.path.exists(tgtpath):
                        return True
            logging.info(self._today.strftime('%Y-%m-%d %H:%M:%S ') + tgtpath)
        except Exception:
            logging.exception(
                self._today.strftime('%Y-%m-%d %H:%M:%S ') + 'isCopied')
Beispiel #40
0
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.

import os
import sys
import inspect
import shutil
import future_fstrings

__location__ = os.path.join(os.getcwd(), os.path.dirname(
    inspect.getfile(inspect.currentframe())))

# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.join(__location__, '../src'))

# -- Run sphinx-apidoc ------------------------------------------------------
# This hack is necessary since RTD does not issue `sphinx-apidoc` before running
# `sphinx-build -b html . _build/html`. See Issue:
# https://github.com/rtfd/readthedocs.org/issues/1139
# DON'T FORGET: Check the box "Install your project inside a virtualenv using
# setup.py install" in the RTD Advanced Settings.
# Additionally it helps us to avoid running apidoc manually

try:  # for Sphinx >= 1.7
Beispiel #41
0
import itertools as it
import re
import uuid
import upsg
import cgi
import importlib
from datetime import datetime
import numpy as np
from numpy.lib.recfunctions import merge_arrays
from sqlalchemy.schema import Table, Column
from sqlalchemy import MetaData
from sqlalchemy.sql import func
from sqlalchemy.orm import sessionmaker
import sqlalchemy.types as sqlt

UPSG_PATH = os.path.dirname(inspect.getfile(upsg))
REPO_PATH = os.path.join(UPSG_PATH, '..')
RESOURCES_PATH = os.path.join(UPSG_PATH, 'resources')


def get_resource_path(file_name):
    """given the name of a resource, returns the full path"""
    return os.path.join(RESOURCES_PATH, file_name)


__type_permissiveness_ranks = {'M': 0, 'i': 100, 'f': 200, 'S': 300}


def __type_permissiveness(dtype):
    # TODO handle other types
    return __type_permissiveness_ranks[dtype.kind] + dtype.itemsize
Beispiel #42
0
    myMidca.append_module("Interpret",
                          InstructionReceiver.InstructionReceiver())
    myMidca.append_module("Eval",
                          EvalPointingFromFeedback.EvalPointingFromFeedback())
    myMidca.append_module("Intend", SimpleIntend.SimpleIntend())
    myMidca.append_module(
        "Plan",
        AsynchPyhopPlanner.AsynchPyhopPlanner(methods.declare_methods,
                                              operators.declare_ops,
                                              monitors.declare_monitors))
    myMidca.append_module("Act", AsynchronousAct.AsynchronousAct())
    return myMidca


thisDir = os.path.dirname(
    os.path.abspath(inspect.getfile(inspect.currentframe())))

MIDCA_ROOT = thisDir + "/../"

myMidca = ros_style_midca()

myMidca.logger.logOutput()
myMidca.mem.enableLogging(myMidca.logger)

# calibration

rosMidca = rosrun.RosMidca(
    myMidca,
    incomingMsgHandlers=[
        #rosrun.CalibrationHandler("calibrate_done", myMidca),
        rosrun.ObjectsLocationHandler("obj_pos", myMidca),
Beispiel #43
0
#!/usr/bin/env python
# coding: utf-8

import os, inspect, sys
if sys.platform == 'linux':
    import matplotlib as mpl
    mpl.use('Agg')

user_dir = os.path.expanduser('~')
curr_dir = os.path.dirname(
    os.path.abspath(inspect.getfile(
        inspect.currentframe())))  # script directory
main_dir = '/'.join(curr_dir.split('/')[:-2])
RGCPD_func = os.path.join(main_dir, 'RGCPD')
cluster_func = os.path.join(main_dir, 'clustering/')
if cluster_func not in sys.path:
    sys.path.append(main_dir)
    sys.path.append(RGCPD_func)
    sys.path.append(cluster_func)

import numpy as np

import plot_maps
import cartopy.crs as ccrs
from RGCPD import RGCPD
from RGCPD import BivariateMI
# In[5]:

# CPPA_s30_21march= [('sst_CPPAs30', user_dir + '/surfdrive/output_RGCPD/easternUS/ERA5_mx2t_sst_Northern/ff393_ran_strat10_s30/data/ERA5_21-03-20_12hr_lag_0_ff393.h5')]
# RV = user_dir + '/surfdrive/output_RGCPD/easternUS/tf1_n_clusters4_q90_dendo_ff393.nc'
Beispiel #44
0
def static_file_directory():
    """The static directory to serve"""
    current_file = inspect.getfile(inspect.currentframe())
    current_directory = os.path.dirname(os.path.abspath(current_file))
    static_directory = os.path.join(current_directory, 'static')
    return static_directory
Beispiel #45
0
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.

import os
import sys
import inspect
import shutil

__location__ = os.path.join(
    os.getcwd(), os.path.dirname(inspect.getfile(inspect.currentframe())))

# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.join(__location__, '../src'))

# -- Run sphinx-apidoc ------------------------------------------------------
# This hack is necessary since RTD does not issue `sphinx-apidoc` before running
# `sphinx-build -b html . _build/html`. See Issue:
# https://github.com/rtfd/readthedocs.org/issues/1139
# DON'T FORGET: Check the box "Install your project inside a virtualenv using
# setup.py install" in the RTD Advanced Settings.
# Additionally it helps us to avoid running apidoc manually

try:  # for Sphinx >= 1.7
Beispiel #46
0
# price_morning(6)
# # price_solder(7) import 안되있어서 못씀

# from modul import price_solder as prs #<- 줄임표현
# prs(5)


# import traval.thailand
# trip_to = traval.thailand.ThailandPackage()
# trip_to.detail()

#from traval.vietnam import VietnamPackage
#trip_to = VietnamPackage()
#trip_to.detail()

#from traval import vietnam
#trip_to = VietnamPackage()
# trip_to.detail()

from traval import *
trip_to = vietnam.VietnamPackage()
trip_to.detail()

# 패키지 or 파일 위치 확인
import inspect
import random
import math
print(inspect.getfile(random))
print(inspect.getfile(math))

Beispiel #47
0
def test_fingerprint_changes_if_module_changes(tmp_path: Path,
                                               domain_path: Text,
                                               stories_path: Text,
                                               monkeypatch: MonkeyPatch):
    rule_policy_path = inspect.getfile(RulePolicy)
    module_name = "custom_rule_policy"
    new_class_name = "CustomRulePolicy"

    custom_module_path = Path(tmp_path, f"{module_name}.py")
    shutil.copy2(rule_policy_path, custom_module_path)

    # Rename class as the class name has to be unique
    source_code = custom_module_path.read_text()
    source_code = source_code.replace("RulePolicy", new_class_name)
    custom_module_path.write_text(source_code)

    config = textwrap.dedent(f"""
    version: "3.0"
    recipe: "default.v1"

    policies:
    - name: RulePolicy
    - name: {module_name}.{new_class_name}
    """)
    monkeypatch.syspath_prepend(tmp_path)

    new_config_path = tmp_path / "config.yml"
    rasa.shared.utils.io.write_yaml(rasa.shared.utils.io.read_yaml(config),
                                    new_config_path)

    # Train to initialize cache
    rasa.train(domain_path,
               str(new_config_path), [stories_path],
               output=str(tmp_path))

    # Make sure that the caching works as expected the code didn't change
    result = rasa.train(
        domain_path,
        str(new_config_path),
        [stories_path],
        output=str(tmp_path),
        dry_run=True,
    )

    assert result.code == 0

    # Make a change to the code so a new training is necessary
    source_code = custom_module_path.read_text()
    source_code = source_code.replace("Dict[Text, Any]", "Dict")
    custom_module_path.write_text(source_code)

    result = rasa.train(
        domain_path,
        str(new_config_path),
        [stories_path],
        output=str(tmp_path),
        dry_run=True,
    )

    assert result.code == rasa.model_training.CODE_NEEDS_TO_BE_RETRAINED
    assert not result.dry_run_results[
        f"train_{module_name}.{new_class_name}1"].is_hit
#     When you see 2B and 3B, that's the idea the paper is playing with.
#
# To decrypt "c", you'll need Step 2a from the paper (the search for the first
# "s" that, when encrypted and multiplied with the ciphertext, produces a
# conformant plaintext), Step 2c, the fast O(log n) search, and Step 3.
#
# Your Step 3 code is probably not going to need to handle multiple ranges.
#
# We recommend you just use the raw math from paper (check, check, double
# check your translation to code) and not spend too much time trying to grok
# how the math works.
import inspect
import os
import sys

sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(inspect.getfile(lambda: 0)))))

from util.misc import invmod, modexp
from util.rsa import make_rsa_keys, rsa
from util.text import byte_length, pad_pkcs15, unpad_pkcs15


def make_oracle(privkey):
    bsize = byte_length(privkey[1])

    def oracle(ctext):
        ptext = rsa(ctext, privkey)
        return ptext.rjust(bsize, b"\x00")[:2] == b"\x00\x02"

    return oracle
Beispiel #49
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# This file is part of fond4ltlfpltlf.
#
# fond4ltlfpltlf is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# fond4ltlfpltlf is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with fond4ltlfpltlf.  If not, see <https://www.gnu.org/licenses/>.
#
"""This module contains the configurations for the tests."""
import inspect
import os
from pathlib import Path

TEST_ROOT_DIR = os.path.dirname(inspect.getfile(
    inspect.currentframe()))  # type: ignore
ROOT_DIR = str(Path(TEST_ROOT_DIR, "..").resolve())  # type: ignore
Beispiel #50
0
        clan1 = Set(Set([Couplet(1, 'one')]))
        clan2 = Set(Set([Couplet(2, 'two')]))
        clan3 = Set(Set([Couplet(3, 'three')]))
        answer = Set(
            Set([Couplet(1, 'one'),
                 Couplet(2, 'two'),
                 Couplet(3, 'three')]))
        joined = join(clan1, clan2, clan3)
        # data is unordered so don't use equality, use symmetric difference
        self.assertEqual(0, len(answer.data ^ joined.data))

    def test_join_quaternary(self):
        clan1 = Set(Set([Couplet(1, 'one')]))
        clan2 = Set(Set([Couplet(2, 'two')]))
        clan3 = Set(Set([Couplet(3, 'three')]))
        clan4 = Set(Set([Couplet(4, 'four')]))
        answer = Set(
            Set(Couplet(1, 'one'), Couplet(2, 'two'), Couplet(3, 'three'),
                Couplet(4, 'four')))
        joined = join(clan1, clan2, clan3, clan4)
        # data is unordered so don't use equality, use symmetric difference
        self.assertEqual(0, len(answer.data ^ joined.data))


# --------------------------------------------------------------------------------------------------
if __name__ == '__main__':
    # The print is not really necessary. It helps making sure we always know what we ran in the IDE.
    print('main: {file}'.format(
        file=os.path.basename(inspect.getfile(inspect.currentframe()))))
    unittest.main()
    def getTitleListFromTheatreClassique(self):
        """Fetch titles from the Theatre-classique website"""

        self.infoBox.customMessage(
            "Fetching data from Theatre-classique website, please wait")

        # Attempt to connect to Theatre-classique...
        try:
            response = urllib.request.urlopen(self.base_url)
            base_html = response.read().decode('iso-8859-1')
            self.infoBox.customMessage(
                "Done fetching data from Theatre-classique website.")

        # If unable to connect (somehow)...
        except:

            # Set Info box and widget to "warning" state.
            self.infoBox.noDataSent(
                warning="Couldn't access theatre-classique website.")

            # Empty title list box.
            self.titleLabels = list()

            # Reset output channel.
            self.send("XML-TEI data", None, self)
            return None

        # Otherwise store HTML content in LTTL Input object.
        base_html_seg = Input(base_html)

        # Remove accents from the data...
        recoded_seg, _ = Segmenter.recode(base_html_seg, remove_accents=True)

        # Extract table containing titles from HTML.
        table_seg = Segmenter.import_xml(
            segmentation=recoded_seg,
            element="table",
            conditions={"id": re.compile(r"^table_AA$")},
        )

        # Extract table lines.
        line_seg = Segmenter.import_xml(
            segmentation=table_seg,
            element="tr",
        )

        # Compile the regex that will be used to parse each line.
        field_regex = re.compile(r"^\s*<td>\s*<a.+?>(.+?)</a>\s*</td>\s*"
                                 r"<td>(.+?)</td>\s*"
                                 r"<td.+?>\s*<a.+?>\s*(\d+?)\s*</a>\s*</td>\s*"
                                 r"<td.+?>\s*(.+?)\s*</td>\s*"
                                 r"<td.+?>\s*<a\s+.+?t=\.{2}/(.+?)'>\s*HTML")

        # Parse each line and store the resulting segmentation in an attribute.
        titleSeg = Segmenter.tokenize(
            segmentation=line_seg,
            regexes=[
                (field_regex, "tokenize", {
                    "author": "&1"
                }),
                (field_regex, "tokenize", {
                    "title": "&2"
                }),
                (field_regex, "tokenize", {
                    "year": "&3"
                }),
                (field_regex, "tokenize", {
                    "genre": "&4"
                }),
                (field_regex, "tokenize", {
                    "url": "&5"
                }),
            ],
            import_annotations=False,
            merge_duplicates=True,
        )

        # Try to save list in this module's directory for future reference...
        path = os.path.dirname(
            os.path.abspath(inspect.getfile(inspect.currentframe())))
        try:
            file = open(os.path.join(path, "cached_title_list"), "wb")
            pickle.dump(titleSeg, file, -1)
            file.close()
        except IOError:
            pass

        # Remove warning (if any)...
        self.error(0)
        self.warning(0)

        return titleSeg
Beispiel #52
0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.

import time
import sys
import threading
import os

if __name__ == '__main__':
    import inspect
    os.chdir(
        os.path.dirname(
            os.path.realpath(inspect.getfile(inspect.currentframe()))))

import server_pool
import db_transfer
from shadowsocks import shell
from configloader import load_config, get_config


class MainThread(threading.Thread):
    def __init__(self, obj):
        super(MainThread, self).__init__()
        self.daemon = True
        self.obj = obj

    def run(self):
        self.obj.thread_db(self.obj)
Beispiel #53
0
def script_abspath(frame=inspect.currentframe()):
    p = os.path.split(inspect.getfile(frame))[0]
    absdir = os.path.realpath(os.path.abspath(p))
    return absdir
import os
import argparse
import inspect
import platform

import testLogs


####-------------------
#### Values based on PC
####-------------------

if platform.system() == 'Windows':
	pathDivider = '\\'

baseDirectory = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
outputDirectory = baseDirectory + pathDivider + "output" + pathDivider
baseGraphDirectory = outputDirectory + "graphs" + pathDivider
baseXlDirectory = outputDirectory + "xl" + pathDivider
iperfDirectory = None
iperfLogDirectory = None


####---------------------
#### NAMING CONFIGURATION
####---------------------


generateReport = 0
modifyReport = 0
__date__ = 'July 2013'
__copyright__ = '(C) 2013, Victor Olaya'

# This will get replaced with a git SHA1 when you do a git archive

__revision__ = '6bb3fa61c5c8938180f5de5430cd8d2a30b24282'

import os
import sys
import inspect

from qgis.core import QgsApplication
from processing.core.Processing import Processing
from exampleprovider.ExampleAlgorithmProvider import ExampleAlgorithmProvider

cmd_folder = os.path.split(inspect.getfile(inspect.currentframe()))[0]

if cmd_folder not in sys.path:
    sys.path.insert(0, cmd_folder)


class ProcessingExampleProviderPlugin(object):

    def __init__(self):
        self.provider = ExampleAlgorithmProvider()

    def initGui(self):
        QgsApplication.processingRegistry().addProvider(self.provider)

    def unload(self):
        QgsApplication.processingRegistry().removeProvider(self.provider)
Beispiel #56
0
#
#  You should have received a copy of the GNU General Public License
#  along with fuddly. If not, see <http://www.gnu.org/licenses/>
#
################################################################################

import os
import framework
import sys
import inspect
from enum import Enum
from libs.utils import ensure_dir, ensure_file

fuddly_version = '0.24.1'

framework_folder = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
# framework_folder = os.path.dirname(framework.__file__)
framework_folder  = '.' if framework_folder == '' else framework_folder

app_folder = os.path.dirname(framework_folder)
app_folder = '.' if app_folder == '' else app_folder
projects_folder = app_folder + os.sep + 'projects' + os.sep
data_models_folder = app_folder + os.sep + 'data_models' + os.sep

fuddly_data_folder = os.path.expanduser('~' + os.sep + 'fuddly_data' + os.sep)
if not os.path.exists(fuddly_data_folder):
    new_fuddly_data_folder = True
ensure_dir(fuddly_data_folder)

exported_data_folder = fuddly_data_folder + 'exported_data' + os.sep
ensure_dir(exported_data_folder)
Beispiel #57
0
    def test_find_common_area(self):

        cmd_folder = os.path.realpath(
            os.path.abspath(os.path.split(getfile(currentframe()))[0]))

        testdata_folder = os.path.join(cmd_folder, 'test_data')

        input_folder = os.path.join(testdata_folder, 'input')
        output_folder = os.path.join(testdata_folder, 'output')
        reference_folder = os.path.join(testdata_folder, 'reference')

        veg_r = os.path.join(output_folder, "veg_r")
        slope_r = os.path.join(output_folder, "slope_r")
        aspect_r = os.path.join(output_folder, "aspect_r")

        if not arcpy.Exists(slope_r):
            arcpy.CopyRaster_management(os.path.join(input_folder, "slope_r"),
                                        slope_r)

        if not arcpy.Exists(aspect_r):
            arcpy.CopyRaster_management(os.path.join(input_folder, "aspect_r"),
                                        aspect_r)

        if not arcpy.Exists(veg_r):
            arcpy.CopyRaster_management(os.path.join(input_folder, "veg_r"),
                                        veg_r)

        parent = os.path.abspath(os.path.join(cmd_folder, os.pardir))

        if parent not in sys.path:
            sys.path.insert(0, parent)

        veg_expect = os.path.join(reference_folder, "expect_v_c")
        slope_expect = os.path.join(reference_folder, "expect_s_c")
        aspect_expect = os.path.join(reference_folder, "expect_a_c")

        from bal import find_common_area

        veg_c, slope_c, aspect_c = find_common_area(veg_r, slope_r, aspect_r)

        compare_result_veg = os.path.join(output_folder, "compare_veg_c.txt")
        compare_result_slope = os.path.join(output_folder,
                                            "compare_slope_c.txt")
        compare_result_aspect = os.path.join(output_folder,
                                             "compare_aspect_c.txt")

        arcpy.RasterCompare_management(veg_c, veg_expect, '', 'Pyramids Exist',
                                       '', compare_result_veg)
        if '"true"' not in open(compare_result_veg).read():
            self.assertEqual(1, 1, 'No errors')
        else:
            self.assertEqual(1, 0, 'Has errors')

        arcpy.RasterCompare_management(slope_c, slope_expect, '', '', '',
                                       compare_result_slope)
        if '"true"' not in open(compare_result_slope).read():
            self.assertEqual(1, 1, 'No errors')
        else:
            self.assertEqual(1, 0, 'Has errors')

        arcpy.RasterCompare_management(aspect_c, aspect_expect, '', '', '',
                                       compare_result_aspect)
        if '"true"' not in open(compare_result_aspect).read():
            self.assertEqual(1, 1, 'No errors')
        else:
            self.assertEqual(1, 0, 'Has errors')

        if arcpy.Exists(veg_c):
            arcpy.Delete_management(veg_c)
        if arcpy.Exists(slope_c):
            arcpy.Delete_management(slope_c)
        if arcpy.Exists(aspect_c):
            arcpy.Delete_management(aspect_c)

        os.remove(compare_result_veg)
        os.remove(compare_result_slope)
        os.remove(compare_result_aspect)
        os.remove(os.path.join(output_folder, "compare_veg_c.xml"))
        os.remove(os.path.join(output_folder, "compare_slope_c.xml"))
        os.remove(os.path.join(output_folder, "compare_aspect_c.xml"))
Beispiel #58
0
 def relpath(cls, path=None):
     result = os.path.dirname(inspect.getfile(cls))
     if path is not None:
         result = os.path.join(result, path)
     return result
Beispiel #59
0
'''
  @file adaboost.py
  @author Marcus Edel

  AdaBoost classifier with scikit.
'''

import os, sys, inspect

# Import the util path, this method even works if the path contains symlinks to
# modules.
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(
  os.path.split(inspect.getfile(inspect.currentframe()))[0], "../../util")))
if cmd_subfolder not in sys.path:
  sys.path.insert(0, cmd_subfolder)

from util import *
from sklearn.ensemble import AdaBoostClassifier

'''
This class implements the AdaBoost classifier benchmark.
'''
class SCIKIT_ADABOOST(object):
  def __init__(self, method_param, run_param):
    self.info = "SCIKIT_ADABOOST ("  + str(method_param) +  ")"

    # Assemble run model parameter.
    self.data = load_dataset(method_param["datasets"], ["csv"])
    self.data_split = split_dataset(self.data[0])

    self.build_opts = {}
(c) 2017 The Cronin Group, University of Glasgow

This provides a python class for the Julabo CF41 recirculation chiller. Command implementation is based on the manual
version 1.951.4871-V3 downloaded from the Julabo homepage. Nota bene: the chiller needs a null modem cable!

For style guide used see http://xkcd.com/1513/
"""

# system imports
import serial
import os
import sys
import inspect
from time import sleep

HERE = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
sys.path.append(os.path.join(HERE, '..'))

# additional module imports
from SerialDevice.serial_labware import SerialDevice, command


class JULABOCF41(SerialDevice):
    """
    This provides a python class for the JULABO CF41 chiller
    """
    def __init__(self, port=None, device_name=None, connect_on_instantiation=False, soft_fail_for_testing=False):
        """
        Initializer of the JULABOCF41 class

        Args: