Пример #1
0
def get_database_settings():
    """
    An example content of database.ini

    For MySQL.

    [Database]
    drivername: mysql+pymysql
    host: localhost
    port: 3306
    username: root
    password: root
    database: csmdb

    For PostgreSQL

    [Database]
    drivername: postgresql+psycopg2
    host: localhost
    port: 5432
    username: root
    password: root
    database: csmdb

    The username and password in database.ini will be encrypted
    if it is not already in encrypted format.
    """
    global ENABLE_DEBUG
    
    # Python 2.7.6, ConfigParser, Python 3.3, configparser
    module = import_module('ConfigParser')
    if module is None:
        module = import_module('configparser')
        
    config = module.RawConfigParser()

    # The database.ini should be in the csm_data directory which should be at the same level as the csm directory.
    config.read(os.path.join(os.getcwd(), 'database.ini'))
    # config.read(os.path.join(get_csm_data_directory(), 'database.ini'))

    db_dict = dict(config.items('Database'))
    username = decode(ENCRYPT, db_dict['username'])
    password = decode(ENCRYPT, db_dict['password'])

    # If the username/password have not been encrypted, encrypt them
    if username.find(PREFIX) == -1 and password.find(PREFIX) == -1:
        config.set('Database', 'username', encode(ENCRYPT, PREFIX + db_dict['username']))
        config.set('Database', 'password', encode(ENCRYPT, PREFIX + db_dict['password']))
        
        with open('database.ini', 'w') as config_file:
            config.write(config_file)
      
    else:
        db_dict['username'] = username.replace(PREFIX, '')
        db_dict['password'] = password.replace(PREFIX, '')

    db_dict['query'] = {'charset': 'latin1'}

    ENABLE_DEBUG = config.getboolean('Debug', 'debug')
    return db_dict
Пример #2
0
def get_database_settings():
    global ENABLE_DEBUG

    # Python 2.7.6, ConfigParser, Python 3.3, configparser
    module = import_module('ConfigParser')
    if module is None:
        module = import_module('configparser')

    config = module.RawConfigParser()
    config.read('database.ini')

    db_dict = dict(config.items('Database'))
    username = decode(ENCRYPT, db_dict['username'])
    password = decode(ENCRYPT, db_dict['password'])

    # If the username/password have not been encrypted, encrypt them
    if username.find(PREFIX) == -1 and password.find(PREFIX) == -1:
        config.set('Database', 'username',
                   encode(ENCRYPT, PREFIX + db_dict['username']))
        config.set('Database', 'password',
                   encode(ENCRYPT, PREFIX + db_dict['password']))

        with open('database.ini', 'w') as config_file:
            config.write(config_file)

    else:
        db_dict['username'] = username.replace(PREFIX, '')
        db_dict['password'] = password.replace(PREFIX, '')

    ENABLE_DEBUG = config.getboolean('Debug', 'debug')
    return db_dict
Пример #3
0
 def do_use(self, module_path, *arg):
     module_path = utils.pythonize_path(module_path)
     module_path = '.'.join(('modules', module_path, '__interpreter__'))
     try:
         utils.import_module(module_path, 'Interpreter')()
     except ModuleImportException as err:
         utils.print_failed(err)
Пример #4
0
def get_database_settings():
    global ENABLE_DEBUG
    
    # Python 2.7.6, ConfigParser, Python 3.3, configparser
    module = import_module('ConfigParser')
    if module is None:
        module = import_module('configparser')
        
    config = module.RawConfigParser()  
    config.read(os.getcwd() + os.path.sep + 'database.ini')

    db_dict = dict(config.items('Database'))
    username = decode(ENCRYPT, db_dict['username'])
    password = decode(ENCRYPT, db_dict['password'])

    # If the username/password have not been encrypted, encrypt them
    if username.find(PREFIX) == -1 and password.find(PREFIX) == -1:
        config.set('Database', 'username', encode(ENCRYPT, PREFIX + db_dict['username']))
        config.set('Database', 'password', encode(ENCRYPT, PREFIX + db_dict['password']))
        
        with open('database.ini', 'w') as config_file:
            config.write(config_file)
      
    else:
        db_dict['username'] = username.replace(PREFIX, '')
        db_dict['password'] = password.replace(PREFIX, '')
        
    ENABLE_DEBUG = config.getboolean('Debug', 'debug')
    return db_dict
Пример #5
0
def _load_conf(*args, **kwargs):
        
        """
        Ensures the configuration module gets
        imported when importing model_i18n.
        """
        # This is an idea from haystack app. We need to run the code that
        # follows only once, no matter how many times the main module is imported.
        # We'll look through the stack to see if we appear anywhere and simply
        # return if we do, allowing the original call to finish.
        stack = inspect.stack()
        for stack_info in stack[1:]:
            if '_load_conf' in stack_info[3]:
                return

        if not hasattr(settings, 'MODEL_I18N_CONF'):
            raise ImproperlyConfigured('You must define the MODEL_I18N_CONF \
            setting, it should be a python module path string, \
            for example "myproject.i18n_conf"')
        if not hasattr(settings, 'MODEL_I18N_MASTER_LANGUAGE'):
            raise ImproperlyConfigured('You must define the \
            MODEL_I18N_MASTER_LANGUAGE setting.')

        if settings.MODEL_I18N_CONF and settings.USE_I18N:
            # Import config module
            import_module(settings.MODEL_I18N_CONF)
Пример #6
0
def validate_installed_app(app_name, python_path_dir, known_apps,
                           app_dir_path, django_settings_module, results):
    try:
        import_module(app_name)
        logger.debug("Application '%s' Ok." % app_name)
    except:
        (exc_typ, exc_val, exc_tb) = sys.exc_info()
        if exc_typ==ImportError and str(exc_val)==("No module named %s" % app_name):
            logger.debug("Unable to find module named %s" % app_name)
            if app_name in PACKAGES_FOR_KNOWN_APPS:
                pkgs = PACKAGES_FOR_KNOWN_APPS[app_name]
                if len(pkgs)>1:
                    results.error("Django Application '%s' not found. Perhaps you wanted to include one of the following packages in your requirements.txt file: %s" %
                                    (app_name, pkgs))
                else:
                    results.error("Django Application '%s' not found. Perhaps you wanted to include the '%s' package in your requirements.txt file" %
                                  (app_name, pkgs[0]))
            else:
                pp_msg = "Your project was installed at %s. PYTHONPATH was set to %s." % (app_dir_path, get_python_path(python_path_dir, django_settings_module))
                results.error("Django Application '%s' not found. If it is from an external package, add the associated package to the requirements.txt file. If the application is supposed to be a python module included with you project tree, check the value you provided for the settings module. %s" %
                                  (app_name, pp_msg))
        else: # another exception occurred in import
            logger.exception("Exception when trying to import app %s" % app_name)
            results.warning("Django Application '%s' was found, but an error occurred when trying to import the application: %s(%s)" %
                          (app_name, exc_typ.__name__, str(exc_val)))
Пример #7
0
def test_import_module(capfd):
    with pytest.raises(SystemExit) as pytest_wrapped_e:
        import_module("plesk_backups")
    out, _ = capfd.readouterr()
    # PY2 vs PY3: No module named 'MySQLdb' vs No module named MySQLdb
    out = out.replace("'", "")
    assert out == "<<<plesk_backups>>>\nNo module named MySQLdb. Please install missing module via pip install <module>."
    assert pytest_wrapped_e.type == SystemExit
    assert pytest_wrapped_e.value.code == 0
Пример #8
0
def get_database_settings():
    """
    An example content of database.ini

    For MySQL.

    [Database]
    drivername: mysql+pymysql
    host: localhost
    port: 3306
    username: root
    password: root
    database: csmdb

    For PostgreSQL

    [Database]
    drivername: postgresql+psycopg2
    host: localhost
    port: 5432
    username: root
    password: root
    database: csmdb

    The username and password in database.ini will be encrypted
    if it is not already in encrypted format.
    """
    global ENABLE_DEBUG

    # Python 2.7.6, ConfigParser, Python 3.3, configparser
    module = import_module('ConfigParser')
    if module is None:
        module = import_module('configparser')

    config = module.RawConfigParser()
    config.read(os.getcwd() + os.path.sep + 'database.ini')

    db_dict = dict(config.items('Database'))
    username = decode(ENCRYPT, db_dict['username'])
    password = decode(ENCRYPT, db_dict['password'])

    # If the username/password have not been encrypted, encrypt them
    if username.find(PREFIX) == -1 and password.find(PREFIX) == -1:
        config.set('Database', 'username',
                   encode(ENCRYPT, PREFIX + db_dict['username']))
        config.set('Database', 'password',
                   encode(ENCRYPT, PREFIX + db_dict['password']))

        with open('database.ini', 'w') as config_file:
            config.write(config_file)

    else:
        db_dict['username'] = username.replace(PREFIX, '')
        db_dict['password'] = password.replace(PREFIX, '')

    ENABLE_DEBUG = config.getboolean('Debug', 'debug')
    return db_dict
Пример #9
0
    def mp_progress(func, iterable, processes=10, scale=10):
        gensim = utils.import_module("gensim")
        mp = utils.import_module("multiprocessing.pool")
        chunks = list(gensim.utils.chunkize(iterable, processes * scale))
        pool = mp.Pool(processes)
        ret = []

        for chunk in utils.tqdm(chunks):
            ret.extend(pool.map(func, chunk))

        return ret
Пример #10
0
    def execute(self, ctx):
        global AUT_PATH
        
        csm_au_module = import_module('au.csm_au', AUT_PATH)
        if csm_au_module is not None:
            status = csm_au_module.execute(ctx)
            if status == 0 :
                self.get_software(ctx.host,
                    install_inactive_cli=ctx.inactive_cli, 
                    install_active_cli=ctx.active_cli, 
                    install_committed_cli=ctx.committed_cli)
                ctx.success = True
        else:
            try:
                conn = condor.make_connection_from_context(ctx)
                conn.connect()
                ctx.inactive_cli = conn.send('sh install inactive summary')
                ctx.active_cli = conn.send('sh install active summary')
                ctx.committed_cli = conn.send('sh install committed summary')       
                conn.disconnect()
 
                self.get_software(ctx.host,
                    install_inactive_cli=ctx.inactive_cli, 
                    install_active_cli=ctx.active_cli, 
                    install_committed_cli=ctx.committed_cli)
                ctx.success = True
            except:
                pass
Пример #11
0
 def regenerate(self, **kwargs):
     # 实例化特定searchmethod是关键
     '''
     参数:
         search_method_name
         针对 GeneralSearch:
             reversedindex: T/F, 
             synonym: T/F
             倒排索引是否使用;近义词表是否使用
     '''
     super().regenerate(**kwargs)
     search_method_name = kwargs.get('search_method_name', None)
     if search_method_name == 'GeneralSearch':
         # 判断传入参数是否和MatchBased已经生成的GeneralSearch 一致
         # 若一致,不更新self.searchmethod
         # 这一切都是为了防止每次chatbot.get_response()都要重新实例化searchmethod
         # 防止重新加载synonym表
         reverseindex = kwargs.get('reverseindex', False)
         synonym = kwargs.get('synonym', False)
         if reverseindex == self.searchmethod.reverseindex and synonym == self.searchmethod.synonym:
             pass
         else:
             from chinesechatterbot.search import GeneralSearch
             self.searchmethod = GeneralSearch(self,
                                               reverseindex=reverseindex,
                                               synonym=synonym)
     elif search_method_name:
         dotted_path = "chinesechatterbot.search." + search_method_name
         from utils import import_module
         Class = import_module(dotted_path)
         self.searchmethod = Class(self, **kwargs)
Пример #12
0
 def init_optimizer(self):
     """
     Init optimizer for neural network model defined in `self.exp_dict["network"]`.
     If `self.exp_dict["optimizer"]` is not defined or `None`, no optimizer
     will be initialized for this model, i.e. the model can only work in
     evaluation mode. 
     """
     self.optim_defs = self.exp_dict.get("optimizer", None)
     if self.optim_defs is None:
         self.optimizer = None
         self.logger.warn("The model with no optimizer can not be updated.")
         return
     #
     self.logger.debug(
         'Initialize optimizer: %s with params: %s' %
         (self.optim_defs["module"], self.optim_defs["param"]))
     optimizer = import_x(self.optim_defs["module"],
                          import_module(self.optim_defs["package"]))
     if optimizer is None:
         raise ValueError("Not supported or recognized optimizer: " +
                          self.optim_defs["package"] + "." +
                          self.optim_defs["module"])
     parameters = self.get_trainable_parameters()
     self.optimizer = optimizer(
         parameters) if self.optim_defs["param"] is None else optimizer(
             parameters, **(self.optim_defs["param"]))
     self.init_optimizer_callback()
     # init scheduler
     self.init_lr_scheduler()
Пример #13
0
    def get_file_and_directory_dict(self, sub_directory=None):
        result_list = []
        is_reachable = True
        
        try:
            sftp_module = import_module('pysftp')
            if sftp_module is not None:
                with sftp_module.Connection(self.server.server_url, username=self.server.username, password=self.server.password) as sftp:
                    remote_directory = concatenate_dirs(self.server.server_directory, sub_directory)
                    if len(remote_directory) > 0:
                        sftp.chdir(remote_directory)
    
                    file_info_list = sftp.listdir()
                    for file_info in file_info_list:
                        file = {}
                        lstatout = str(sftp.lstat(file_info)).split()[0]
                        if 'd' in lstatout:
                            if sub_directory is None or len(sub_directory) == 0:
                                file['filename'] = file_info
                            else:
                                file['filename'] = sub_directory + '/' + file_info
                            file['is_directory'] = True 
                        else:
                            file['filename'] = file_info
                            file['is_directory'] = False

                        result_list.append(file)
        except:
            logger.exception('SFTPServer hit exception')
            is_reachable = False
            
        return result_list, is_reachable
Пример #14
0
    def execute(self, ctx):
        global AUT_PATH

        csm_au_module = import_module("au.csm_au", AUT_PATH)
        if csm_au_module is not None:
            status = csm_au_module.execute(ctx)
            if status == 0:
                self.get_software(
                    ctx.host,
                    install_inactive_cli=ctx.inactive_cli,
                    install_active_cli=ctx.active_cli,
                    install_committed_cli=ctx.committed_cli,
                )
                ctx.success = True
        else:
            try:
                conn = condor.make_connection_from_context(ctx)
                conn.connect()
                ctx.inactive_cli = conn.send("sh install inactive summary")
                ctx.active_cli = conn.send("sh install active summary")
                ctx.committed_cli = conn.send("sh install committed summary")
                conn.disconnect()

                self.get_software(
                    ctx.host,
                    install_inactive_cli=ctx.inactive_cli,
                    install_active_cli=ctx.active_cli,
                    install_committed_cli=ctx.committed_cli,
                )
                ctx.success = True
            except:
                pass
Пример #15
0
 def execute(self, item):
     logging.debug(item)
     image_reader = utils.import_module(\
             item["parameters"]["image_reader"]).read_image
     item["image"] = image_reader(item["source_image_filename"],\
             item["parameters"])
     return item
Пример #16
0
    def upload_file(self, source_file_path, dest_filename, sub_directory=None):
        sftp_module = import_module('pysftp')

        with sftp_module.Connection(self.server.server_url, username=self.server.username, password=self.server.password) as sftp:
            remote_directory = concatenate_dirs(self.server.server_directory, sub_directory)
            if len(remote_directory) > 0:
                sftp.chdir(remote_directory)
            sftp.put(source_file_path) 
Пример #17
0
 def do_load(self, scanner_name):
     module_path = utils.pythonize_path(scanner_name)
     module_path = '.'.join(('modules.scanner', module_path))
     try:
         self.module = utils.import_module(module_path, 'Scanner')
     except ModuleImportException as err:
         utils.print_failed(err)
     else:
         self.change_prompt(self.module)
Пример #18
0
 def load_app(self, name):
     mod = utils.import_module(name)
     clses = inspect.getmembers(mod,
                                lambda cls: (inspect.isclass(cls) and
                                             issubclass(cls, RyuApp) and
                                             mod.__name__ ==
                                             cls.__module__))
     if clses:
         return clses[0][1]
     return None
Пример #19
0
 def command_use(self, module_path, *args, **kwargs):
     if not module_path:
         utils.printf("Use something??", 'warn')
     else:
         self.module_path = module_path
         module_path = utils.python_path(module_path)
         module_path = '.'.join(('modules', module_path))
         try:
             self.current_module = utils.import_module(module_path)()
         except:
             utils.printf("Error while loading module", 'bad')
Пример #20
0
 def __call__(self, argdict):
     if isinstance(argdict, str):
         name, kwargs = argdict, {}
     else:
         name, kwargs = argdict.get("type"), argdict.get("vargs", {})
     manager = utils.import_module("model.manager")
     self.cls = manager.get(name, self.pkg)
     assert self.cls is not None, \
         f"module type not found in {self.pkg.__name__}: {name}"
     self.kwargs = self.cls.process_argdict(kwargs)
     return self.initialize
Пример #21
0
    def upload_file(self, source_file_path, dest_filename, sub_directory=None):
        sftp_module = import_module('pysftp')

        with sftp_module.Connection(self.server.server_url,
                                    username=self.server.username,
                                    password=self.server.password) as sftp:
            remote_directory = concatenate_dirs(self.server.server_directory,
                                                sub_directory)
            if len(remote_directory) > 0:
                sftp.chdir(remote_directory)
            sftp.put(source_file_path)
Пример #22
0
 def check_reachability(self):
     try:
         sftp_module = import_module('pysftp')
         if sftp_module is not None:
             server = self.server
             with sftp_module.Connection(server.server_url, username=server.username, password=server.password) as sftp:
                 if server.server_directory is not None and len(server.server_directory) > 0:
                     sftp.chdir(server.server_directory)      
             return True
         else:
             return False
     except:
         return False
Пример #23
0
def validate_fixture_file(fixture_name, installed_apps, fixture_dirs,
                          python_path_dir, settings_file_directory,
                          known_apps, results):
    """Use the same logic as django-admin.py loaddata to search for a
    fixture file.
    """
    if fixture_name.endswith((".json", ".xml")):
        filenames = [fixture_name]
    else:
        filenames = [fixture_name+".json", fixture_name+".xml"]
    # first try an absolute path
    if os.path.isabs(fixture_name):
        for filename in filenames:
            if os.path.exists(filename):
                logging.debug("Found fixture file %s" % filename)
                return
        results.error("Unable to find fixture %s" % fixture_name)
        return
    # next, look in the fixtures directory under all the installed apps
    for app_module in installed_apps:
        try:
            m = import_module(app_name)
            fixture_dir = os.path.join(os.path.dirname(m.__file__),
                                       "fixtures")
        except: # as a fallback, look under our python path
            fixture_dir = os.path.join(app_module_name_to_dir(python_path_dir,
                                                              app_module, False),
                                       "fixtures")
        if os.path.isdir(fixture_dir):
            for filename in filenames:
                fixture_path = os.path.join(fixture_dir, filename)
                if os.path.exists(fixture_path):
                    logger.debug("Found fixture %s at %s" % (fixture_name, fixture_path))
                    return
    # next, look under all the specified fixture directories
    for dirname in fixture_dirs:
        for filename in filenames:
            fixture_path = os.path.join(dirname, filename)
            if os.path.exists(fixture_path):
                logger.debug("Found fixture %s at %s" % (fixture_name, fixture_path))
                return
    # finally, look relative to settings_file_directory
    for filename in filenames:
        fixture_path = os.path.join(settings_file_directory, filename)
        if os.path.exists(fixture_path):
            logger.debug("Found fixture %s at %s" % (fixture_name, fixture_path))
            return
    # if we got here, we didn't find the fixture file anywhere
    results.error("Unable to find fixture %s" % fixture_name)
Пример #24
0
   def __init__(self, nworkers, name="Pool"):
       """
       \param nworkers (integer) number of worker threads to start
       \param name (string) prefix for the worker threads' name
       """ 
       # Python 2.7.6 use Queue, Python 3.3 use queue
       queue_module = import_module('Queue')
       if queue_module is None:
           queue_module = import_module('queue')
           
       self._workq   = queue_module.Queue()  
 
       self._closed  = False
       self._workers = []
       for idx in range(nworkers):
           thr = PoolWorker(self._workq, name="Worker-%s-%d" % (name, idx))
           try:
               thr.start()
           except:
               # If one thread has a problem, undo everything
               self.terminate()
               raise
           else:
               self._workers.append(thr)
Пример #25
0
    def __init__(self, nworkers, name="Pool"):
        """
        \param nworkers (integer) number of worker threads to start
        \param name (string) prefix for the worker threads' name
        """
        # Python 2.7.6 use Queue, Python 3.3 use queue
        queue_module = import_module('Queue')
        if queue_module is None:
            queue_module = import_module('queue')

        self._workq = queue_module.Queue()

        self._closed = False
        self._workers = []
        for idx in range(nworkers):
            thr = PoolWorker(self._workq, name="Worker-%s-%d" % (name, idx))
            try:
                thr.start()
            except:
                # If one thread has a problem, undo everything
                self.terminate()
                raise
            else:
                self._workers.append(thr)
Пример #26
0
def register_dashapp(app):
    meta_viewport = {
        "name": "viewport",
        "content": "width=device-width, initial-scale=1, shrink-to-fit=no"
    }

    basedir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
    apps_dir = basedir + '/dash_apps'

    for folder in os.scandir(apps_dir):
        dash_config_path = folder.path + "/config.json"
        if folder.is_dir() and os.path.isfile(dash_config_path):
            try:
                with open(dash_config_path) as f:
                    dash_config = json.loads(f.read())
                    assets_folder = folder.path + '/resources/'
                    # register each dash app and endpoint.
                    dashapp = dash.Dash(
                        __name__,
                        server=app,
                        url_base_pathname=f"/{dash_config['route']}/",
                        assets_folder=assets_folder,
                        meta_tags=[meta_viewport])

                    layout = import_module('layout',
                                           folder.path + "/layout.py")
                    callbacks = import_module('callbacks',
                                              folder.path + "/callbacks.py")

                    with app.app_context():
                        dashapp.title = dash_config['title']
                        dashapp.layout = layout.layout
                        callbacks.register_callbacks(dashapp)
                    # _protect_dashviews(dashapp)
            except Exception as e:
                pass
Пример #27
0
    def update_graph(selected_dropdown_value):
        device_types = ['Client A', 'Client B', 'Client C', 'Client D']
        yy = [27.5, 26.4, 32.3, 13.8]

        data = copy.deepcopy(graph_data)
        data['labels'] = device_types
        data['values'] = yy

        figure = dict(data=[data], layout=layout)

        models = import_module('models', app_base_dir + "/models.py")
        session = Session()
        plotters = session.query(models.Plotter).all()
        print(plotters)
        return figure
Пример #28
0
    def execute(self, ctx):
        global AUT_PATH

        csm_au_module = import_module("au.csm_au", AUT_PATH)
        if csm_au_module is not None:
            status = csm_au_module.execute(ctx)
            if status == 0:
                ctx.success = True
        else:
            try:
                time.sleep(10)
                ctx.post_status("Copying files from TFTP server to host...")
                time.sleep(10)
                ctx.success = True
            except:
                pass
Пример #29
0
 def execute(self, ctx):
     global AUT_PATH
     
     csm_au_module = import_module('au.csm_au', AUT_PATH)
     if csm_au_module is not None:
         status = csm_au_module.execute(ctx)
         if status == 0 :
             ctx.success = True   
     else:
         try:
             time.sleep(10)
             ctx.post_status('Copying files from TFTP server to host...')
             time.sleep(10)
             ctx.success = True
         except:
             pass
Пример #30
0
 def check_reachability(self):
     try:
         sftp_module = import_module('pysftp')
         if sftp_module is not None:
             server = self.server
             with sftp_module.Connection(server.server_url,
                                         username=server.username,
                                         password=server.password) as sftp:
                 if server.server_directory is not None and len(
                         server.server_directory) > 0:
                     sftp.chdir(server.server_directory)
             return True
         else:
             return False
     except:
         return False
Пример #31
0
    def execute(self, ctx):
        global AUT_PATH

        csm_au_module = import_module("au.csm_au", AUT_PATH)
        if csm_au_module is not None:
            status = csm_au_module.execute(ctx)
            if status == 0:
                ctx.success = True
        else:
            try:
                conn = condor.make_connection_from_urls("host", ctx.urls)
                conn.connect()
                conn.disconnect()
                ctx.success = True
            except:
                pass
Пример #32
0
 def execute(self, ctx):
     global AUT_PATH
     
     csm_au_module = import_module('au.csm_au', AUT_PATH)
     if csm_au_module is not None:
         status = csm_au_module.execute(ctx)
         if status == 0 :
             ctx.success = True
     else:    
         try:
             conn = condor.make_connection_from_urls('host', ctx.urls)
             conn.connect()
             conn.disconnect()
             ctx.success = True        
         except:
             pass
Пример #33
0
def save_template(args):
    pkg = utils.import_module(f"model.{args.package}")
    if args.module_name is not None:
        clsmap = manager.get_module_namemap(pkg)
        cls = clsmap.get(args.module_name)
    else:
        cls = manager.get_module_classes(pkg)[0]
    template = {
        "type": cls.name,
        "vargs": model.get_optarg_template(cls)
    }
    dump = utils.map_val(args.format, {
        "yaml": utils.dump_yaml,
        "json": json.dump
    }, "template format")
    with open(args.save_path, "w") as f:
        dump(template, f)
Пример #34
0
 def get_file_list(self):
     result_list = []
     is_reachable = True
     
     try:
         sftp_module = import_module('pysftp')
         if sftp_module is not None:
             server = self.server
             with sftp_module.Connection(server.server_url, username=server.username, password=server.password) as sftp:
                 if server.server_directory is not None and len(server.server_directory) > 0:
                     sftp.chdir(server.server_directory)
     
                 result_list = sftp.listdir()
     except:
         logger.exception('SFTPServer hit exception')
         is_reachable = False
                
     return result_list, is_reachable
Пример #35
0
    def process(self,
                statement,
                additional_response_selection_parameters=None):
        compare_method_name = additional_response_selection_parameters.pop(
            'compare_method', 'hash_similarity')
        candidates = self.searchmethod.search(
            statement, **additional_response_selection_parameters)
        # candidates 来源于数据库原始记录,Statement类型
        if statement.simhash_in_response_to == None or not candidates:
            return self.get_default_response(statement)

        from chinesechatterbot.utils import import_module
        dotted_path = "chinesechatterbot.compare." + compare_method_name
        compare_method = import_module(dotted_path)
        confidence = []
        for i, sentence in enumerate(candidates):
            score, factor = compare_method(sentence.simhash_in_response_to,
                                           statement.simhash_in_response_to)
            # factor记录该compare_method返回值越大越相似还是越小越相似
            if i == 0:
                if factor == 'small':
                    self.chatbot.logger.info(
                        "{} score: the smaller, the more similar".format(
                            compare_method_name))
                    state = 0
                if factor == 'big':
                    self.chatbot.logger.info(
                        "{} score: the greater, the more similar".format(
                            compare_method_name))
                    state = 1
            if state == 1:
                if score >= self.minimum_similarity_threshold:
                    sentence.confidence = score
                    confidence.append(sentence)
            elif score <= (1 - self.minimum_similarity_threshold) * 100:
                sentence.confidence = score
                confidence.append(sentence)
        if confidence:
            return self.select_response(statement,
                                        confidence,
                                        storage=self.chatbot.storage,
                                        logger=self.chatbot.logger)
        else:
            return self.get_default_response(statement)
Пример #36
0
    def init_lr_scheduler(self):
        """
        Init learning rate scheduler for optimizer for neural network 
        model defined in `self.exp_dict["network"]`. If `self.exp_dict["scheduler"]`
        is not defined or `None`, no scheduler will be initialized, i.e.
        the model can only be updated with fixed learning rate when running
        in train mode.
        """
        self.sched_defs = self.exp_dict.get("scheduler", None)
        if self.sched_defs is None:
            self.scheduler = None
            self.logger.warn(
                "The model with no scheduler can only be updated with fixed learning rate when running in train mode."
            )
            return

        # can only create scheduler for existed optimizer
        if self.optimizer is None:
            raise ValueError(
                "Cannot create learning rate scheduler for network when no optimizer"
            )

        self.logger.debug(
            'Initialize lr scheduler: %s with params: %s.' %
            (self.sched_defs["module"], self.sched_defs["param"]))
        schedulers = import_x(self.sched_defs["module"],
                              import_module(self.sched_defs["package"]))
        if schedulers is None:
            raise ValueError("Not supported or recognized scheduler: " +
                             self.sched_defs["package"] + "." +
                             self.sched_defs["module"])
        # currently donot support ReduceLROnPlateau scheduler
        if 'onplateau' in schedulers.__name__.lower():
            raise TypeError("Currently ReduceLROnPlateau is not supported.")
        self.scheduler = schedulers(
            self.optimizer
        ) if self.sched_defs["param"] is None else schedulers(
            self.optimizer, **(self.sched_defs["param"]))
        for _ in range(self.curr_epoch):
            self.scheduler.step(
            )  # in case the model is initialized from checkpoint
        self.init_lr_scheduler_callback()
Пример #37
0
    def write(self, item, **filename_args):
        features = item[self.parameter_name]

        feature_extractor = utils.import_module(\
                item["parameters"]["feature_extractor"])
        feature_types = feature_extractor.get_feature_types(features)
        feature_groups = feature_extractor.get_feature_groups(features)

        for feature_type in feature_types:
            for feature_group in feature_groups:
                filename = self.format_filename(
                    item=item,
                    feature_type=feature_type,
                    feature_group=feature_group,
                    **filename_args
                    )
                figure = feature_extractor.plot_feature_group(features,\
                        feature_type, feature_group)
                self.ensure_directory(filename)
                figure.savefig(filename, format="png")
Пример #38
0
    def get_file_list(self):
        result_list = []
        is_reachable = True

        try:
            sftp_module = import_module('pysftp')
            if sftp_module is not None:
                server = self.server
                with sftp_module.Connection(server.server_url,
                                            username=server.username,
                                            password=server.password) as sftp:
                    if server.server_directory is not None and len(
                            server.server_directory) > 0:
                        sftp.chdir(server.server_directory)

                    result_list = sftp.listdir()
        except:
            logger.exception('SFTPServer hit exception')
            is_reachable = False

        return result_list, is_reachable
Пример #39
0
def create_luvae(args, vocabs):
    luvae = utils.import_module(f"model.luvae")
    if args.model_path is None:
        model_cls = manager.get_module_classes(luvae)[0]
        modargs = get_optarg_template(model_cls)
    else:
        namemap = manager.get_module_namemap(luvae)
        opts = utils.load_yaml(args.model_path)
        name, modargs = opts.get("type"), opts.get("vargs")
        model_cls = namemap[name]
    def setup_embed(vocab, freeze):
        unfrozen_idx = {vocab[w] for w in [args.bos, args.eos, args.unk]}
        return {
            "type": "finetunable-embedding",
            "vargs": {
                "unfrozen_idx": unfrozen_idx,
                "freeze": freeze,
                "allow_padding": True
            }
        }
    for vocab, mode in zip(vocabs, MODES):
        modargs[f"{mode}_embed"] = \
            setup_embed(vocab, getattr(args, f"{mode}_freeze"))
    dim_keys = [f"{mode}_dim" for mode in MODES]
    dims = [getattr(args, k, 300) for k in dim_keys]
    z_dim = getattr(args, "z_dim", 300)
    caster = common.get_caster(model_cls)
    creator = caster({
        "type": model_cls.name,
        "vargs": modargs
    })
    return creator(
        z_dim=z_dim,
        word_dim=dims[0],
        label_dim=dims[1],
        intent_dim=dims[2],
        num_words=len(vocabs[0]),
        num_labels=len(vocabs[1]),
        num_intents=len(vocabs[2])
    )
Пример #40
0
    def get_file_and_directory_dict(self, sub_directory=None):
        result_list = []
        is_reachable = True

        try:
            sftp_module = import_module('pysftp')
            if sftp_module is not None:
                with sftp_module.Connection(
                        self.server.server_url,
                        username=self.server.username,
                        password=self.server.password) as sftp:
                    remote_directory = concatenate_dirs(
                        self.server.server_directory, sub_directory)
                    if len(remote_directory) > 0:
                        sftp.chdir(remote_directory)

                    file_info_list = sftp.listdir()
                    for file_info in file_info_list:
                        file = {}
                        lstatout = str(sftp.lstat(file_info)).split()[0]
                        if 'd' in lstatout:
                            if sub_directory is None or len(
                                    sub_directory) == 0:
                                file['filename'] = file_info
                            else:
                                file[
                                    'filename'] = sub_directory + '/' + file_info
                            file['is_directory'] = True
                        else:
                            file['filename'] = file_info
                            file['is_directory'] = False

                        result_list.append(file)
        except:
            logger.exception('SFTPServer hit exception')
            is_reachable = False

        return result_list, is_reachable
Пример #41
0
 def init_network(self):
     """
     Create a network model instance with the definition in configure
     `self.exp_dict["network"]`. If pretrained model parameters exist, use 
     them to initialize the new created model.
     """
     self.model_defs = self.exp_dict['network']
     network = import_x(self.model_defs["module"],
                        import_module(
                            self.model_defs["package"]))  # load module meta
     if network is None:
         raise ValueError("Not supported or recognized network: " +
                          self.model_defs["package"] + "." +
                          self.model_defs["module"])
     self.logger.debug('==> Initiliaze network with params: %s' %
                       (self.model_defs["param"]))
     self.model = network(
     ) if self.model_defs["param"] is None else network(
         **(self.model_defs["param"]))
     self.init_model_callback()
     self.model.apply(init_weights)  # randomly initialization
     # load pretrained model
     self.pretrained_model_params = self.model_defs.get("pretrained", None)
     if self.pretrained_model_params is not None:
         self.load_pretrained(self.model, self.pretrained_model_params)
     #
     self.init_network_callback()
     #
     self.init_record_of_best_model()
     # load checkpoint
     self.checkpoint_params = self.load_checkpoint(
         self.exp_dict['checkpoint'],
         suffix='.best') or self.load_checkpoint(
             self.exp_dict['checkpoint'], suffix='')
     # load model into GPU devices
     if self.is_cuda: self.load_to_gpu(parallel=self.is_cuda_parallel)
     # init optimizer
     self.init_optimizer()
Пример #42
0
    def execute(self, item):
        """Compares two features.

        :argument item: a dict containing the following keys:
            metric
                The name of the module containing the metric
            query_features
                The features of the query image
            comparison_features
                The features of the image to compare the query to
        :return: `item` augmented with the following keys:
            distance
                The distance between the query and comparison images computed
                using the given metric
        """
        inputs = Bunch.fromDict(item)

        metric = utils.import_module(inputs.metric).apply_metric
        distance = metric(inputs.query_features.features,\
                inputs.comparison_features.features)

        item["distance"] = distance
        return item
Пример #43
0
def create_mmvae(args, vocabs):
    mmvae = utils.import_module(f"model.mmvae")
    model_cls = mmvae.MultimodalVariationalAutoencoder
    if args.model_path is None:
        modargs = get_optarg_template(model_cls)
        modargs["encoders"] *= 3
        modargs["decoders"] *= 3
    else:
        modargs = utils.load_yaml(args.model_path)
    for enc, dec, vocab, mode in \
            zip(modargs["encoders"], modargs["decoders"], vocabs, MODES):
        freeze = getattr(args, f"{mode}_freeze", False)
        unfrozen_idx = {vocab[w] for w in [args.bos, args.eos, args.unk]}
        for coder in (enc, dec):
            coder["vargs"]["embed"] = {
                "type": "finetunable-embedding",
                "vargs": {
                    "unfrozen_idx": unfrozen_idx,
                    "freeze": freeze,
                    "allow_padding": True
                }
            }
    dim_keys = [f"{mode}_dim" for mode in MODES]
    dims = [getattr(args, k, 300) for k in dim_keys]
    z_dim = getattr(args, "z_dim", 300)
    caster = common.get_caster(model_cls)
    creator = caster({
        "type": "multimodal-variational-autoencoder",
        "vargs": modargs
    })
    return creator(
        num_modes=len(MODES),
        z_dim=z_dim,
        vocab_sizes=[len(vocab) for vocab in vocabs],
        word_dims=dims
    )
Пример #44
0
def mk_jolokia():
    return import_module("mk_jolokia.py")
Пример #45
0
def mk_mongodb():
    return import_module("mk_mongodb.py")
Пример #46
0
def mk_postgres():
    return import_module("mk_postgres.py")
Пример #47
0
def mk_filestats():
    return import_module("mk_filestats.py")
Пример #48
0
 task = judge.fetch_task()
 task_type = task['task_type']
 if task_type == 'grade':
   task['run_metadata'] = json.loads(task['run_metadata'])
   print 'Grading run_id %s (team %s, problem %s) of type %s... ' % (task['run_id'], task['team_username'], task['alias'], task['problem_type']),
   utils.reset_progress(False)
   
   problem_metadata, division_metadata = judge.get_cached_metadata(task['problem_id'], task['division_id'], task['problem_metadata_hash'], task['division_metadata_hash'])
   if problem_metadata is None or division_metadata is None:
     utils.progress('Refreshing metadata')
     problem_metadata, division_metadata = judge.update_cached_metadata(task['problem_id'], task['division_id'], task['problem_metadata_hash'], task['division_metadata_hash'])
   
   task['problem_metadata'] = problem_metadata
   task['division_metadata'] = division_metadata
   
   module = utils.import_module(judge.contest_type, task['problem_type'])
   q = multiprocessing.Queue()
   grader = multiprocessing.Process(target=module.grade, args=(q, task, False))
   grader.start()
   result = q.get()
   grader.join()
   print
   judge.submit_judgment(judgment_id=int(task['judgment_id']), **result)
 elif task_type == 'reset':
   judge = AutoJudge()
   print 'Reset judge to %s' % judge
 elif task_type == 'halt':
   print 'Shutting down'
   break
 elif task_type == 'poll':
   print 'Waiting for task...',
Пример #49
0
def validate_settings(app_dir_path, django_settings_module, django_config=None,
                      prev_version_component_list=None):
    """This is the main settings validation function. It takes the following arguments:
        app_dir_path           - path to the top level directory of the extracted application
        django_settings_module - fully qualified name of the django settings module
        django_config          - if provided, this is the django_config data generated
                                 during the original packaging of the app. We validate
                                 that it is still consistent with the current app.

    This function returns an intance of SettingValidationResults.

    Note that validate_settings() must be run after generate_settings(). We import
    the deployed_settings module rather than the user's django_settings_module so
    we can see whether they've overridden any of the settings.
    """
    # normalize the target install path
    app_dir_path = os.path.abspath(os.path.expanduser(app_dir_path))
    
    results = SettingValidationResults(VERSION, logger)
    python_path_dir = find_python_module(django_settings_module, app_dir_path)
    if not python_path_dir:
        raise ValidationError("Unable to find django settings module %s under %s" % (django_settings_module, app_dir_path))
    # we store only the subdirectory part of the python path, since the rest depends
    # on where we install the app.
    if os.path.dirname(app_dir_path)==python_path_dir:
        results.python_path_subdirectory = ""
    else:
        results.python_path_subdirectory = _get_subdir_component(os.path.dirname(app_dir_path), python_path_dir)
    # get the settings file directory
    settings_file_directory = get_settings_file_directory(python_path_dir, django_settings_module)

    # do the import of app's settings
    sys.path = [python_path_dir] + sys.path

    deployed_settings_module = get_deployed_settings_module(django_settings_module)
    logger.debug("Importing settings module %s" % deployed_settings_module)
    try:
        settings_module = import_module(deployed_settings_module)
    except:
        (exc_type, exc_value, exc_traceback) = sys.exc_info()
        logger.exception("Exception in settings file import: %s(%s)" %
                         (exc_type.__name__, str(exc_value)))
        raise SettingsImportError("Error in settings import: %s(%s)" %
                                  (exc_type.__name__, str(exc_value)))
        
    # Check that the settings controlled by engage weren't overridden by app.
    # If any are overridden, we treat them as warnings.
    check_if_setting_overridden('TIME_ZONE', settings_module, results)
    check_if_setting_overridden('SECRET_KEY', settings_module, results)
    check_if_setting_overridden('ADMINS', settings_module, results)
    check_if_setting_overridden('DATABASES', settings_module, results)
    check_if_setting_overridden('LOGGING_CONFIG', settings_module,
                                results)

    # Check that settings which point to a directory are either not set or
    # point to a valid directory
    if hasattr(settings_module, "MEDIA_ROOT"):
        check_directory_setting("MEDIA_ROOT",
                                settings_module.MEDIA_ROOT,
                                '', app_dir_path, results)
    if hasattr(settings_module, "TEMPLATE_DIRS"):
        check_directory_tuple_setting("TEMPLATE_DIRS",
                                      settings_module.TEMPLATE_DIRS,
                                      app_dir_path, results)
    
    # Get the packages in requirements.txt. We use this in validating
    # the django apps. We defer the validation of the actual packages
    # until we have parsed and validated the engage_components.json file.
    user_required_packages = get_user_required_packages(app_dir_path)
    
    # check that all INSTALLED_APPS are pointing to apps accessible in the target system
    if hasattr(settings_module, "INSTALLED_APPS"):
        installed_apps = []
        packages = PREINSTALLED_PACKAGES + user_required_packages
        known_apps = set(get_apps_for_packages(packages))
        for app_name in settings_module.INSTALLED_APPS:
            validate_installed_app(app_name, python_path_dir, known_apps,
                                   app_dir_path, django_settings_module, results)
            installed_apps.append(app_name)
    else:
        installed_apps = []
    results.installed_apps = installed_apps

    if hasattr(settings_module, "FIXTURE_DIRS"):
        fixture_dirs = _tuple_setting_to_list(settings_module.FIXTURE_DIRS)
        check_directory_tuple_setting("FIXTURE_DIRS", fixture_dirs,
                                      app_dir_path, results)
    else:
        fixture_dirs = []
    # check that ENGAGE_APP_DB_FIXTURES points to valid fixture files
    if hasattr(settings_module, "ENGAGE_APP_DB_FIXTURES"):
        results.fixtures = _tuple_setting_to_list(settings_module.ENGAGE_APP_DB_FIXTURES)
        for fixture in results.fixtures:
            validate_fixture_file(fixture, results.installed_apps, fixture_dirs,
                                  python_path_dir, settings_file_directory, known_apps, results)
    else:
        results.fixtures = []

    # check ENGAGE_MIGRATION_APPS, if present
    if hasattr(settings_module, "ENGAGE_MIGRATION_APPS"):
        results.migration_apps = _tuple_setting_to_list(settings_module.ENGAGE_MIGRATION_APPS)
        if len(results.migration_apps)>0 and not ("south" in results.installed_apps):
            results.error("Django apps to upgraded specified in ENGAGE_MIGRATION_APPS, but south not included in INSTALLED_APPS")
        validate_migration_apps(results.migration_apps, results.installed_apps, results)
    else:
        results.migration_apps = []

    # check the static files directories, if present. Each entry could be a source
    # directory, or a tuple of (target_subdir, source_path)
    if hasattr(settings_module, "STATICFILES_DIRS"):
        staticfiles_dirs = _tuple_setting_to_list(settings_module.STATICFILES_DIRS)
        for dirpath in staticfiles_dirs:
            if isinstance(dirpath, tuple):
                dirpath = dirpath[1]
            if not os.path.isdir(dirpath):
                results.error("Setting STATICFILES_DIRS references '%s', which does not exist" % dirpath)
            elif string.find(os.path.realpath(dirpath),
                             os.path.realpath(app_dir_path)) != 0:
                results.error("Setting STATICFILES_DIRS references '%s', which is not a subdirectory of '%s'" % (dirpath, app_dir_path))
                 
        check_directory_tuple_setting("STATICFILES_DIRS", staticfiles_dirs,
                                      app_dir_path, results)
    # gather the values of static files related settings for use during
    # installation.
    extract_static_files_settings(settings_module, app_dir_path, results)
        
    # check each command in ENGAGE_DJANGO_POSTINSTALL_COMMANDS is actually present in manager
    if hasattr(settings_module, "ENGAGE_DJANGO_POSTINSTALL_COMMANDS"):
        results.post_install_commands = list(settings_module.ENGAGE_DJANGO_POSTINSTALL_COMMANDS)
        validate_post_install_commands(app_name, settings_module, results)
    else:
        results.post_install_commands = []

    # read the additional components file and put the data into the results
    additional_comp_file = os.path.join(app_dir_path, COMPONENTS_FILENAME)
    if os.path.exists(additional_comp_file):
        with open(additional_comp_file, "rb") as cf:
            results.components = read_components_file(cf, additional_comp_file, None)
    else:
        results.components = []

    # validate the user required packages, taking into account the components requested
    # by the user.
    validate_package_list(user_required_packages, results.components, results)
    
    # extract the product name and version, if present
    if hasattr(settings_module, "ENGAGE_PRODUCT_NAME"):
        results.product = settings_module.ENGAGE_PRODUCT_NAME
    if hasattr(settings_module, "ENGAGE_PRODUCT_VERSION"):
        results.product_version = settings_module.ENGAGE_PRODUCT_VERSION

    # if provided, check that the django_config matches the settings values
    if django_config:
        django_config_ok = True
        if installed_apps != django_config.installed_apps:
            results.error("INSTALLED_APPS in configuration file (%s) does not match INSTALLED_APPS in settings file (%s). Your configuration file is likely out of date. Try re-running prepare." %
                          (django_config.installed_apps.__repr__(),
                           installed_apps.__repr__()))
            django_config_ok = False
        if results.fixtures != django_config.fixtures:
            # TODO: this was originally an error, which caused some issues.
            # See ticket #166.
            results.warning("ENGAGE_APP_DB_FIXTURES in configuration file (%s) does not match value in settings file (%s). If this is not what you expect, your configuration file is likely out of date: try re-running prepare." %
                          (django_config.fixtures.__repr__(),
                           results.fixtures.__repr__()))
            django_config_ok = False
        if results.migration_apps != django_config.migration_apps:
            results.error("ENGAGE_MIGRATION_APPS in configuration file (%s) does not match value in settings file (%s). Your configuration file is likely out of date. Try re-running prepare." %
                          (django_config.migration_apps.__repr__(),
                           results.migration_apps.__repr__()))
            django_config_ok = False
        if results.product and results.product != django_config.product:
            results.error("ENGAGE_PRODUCT_NAME in configuration file (%s) does not match value in settings file (%s). Your configuration file is likely out of date. Try re-running prepare." % (django_config.product, results.product))
            django_config_ok = False
        if results.product_version and results.product_version != django_config.product_version:
            results.error("ENGAGE_PRODUCT_VERSION in configuration file (%s) does not match value in settings file (%s). Your configuration file is likely out of date. Try re-running prepare." % (django_config.product_version, results.product_version))
            django_config_ok = False
        if django_config_ok:
            logger.debug("Verified config file is consistent with settings file")

    return results # all done
Пример #50
0
 def execute(self, item):
     feature_extractor = utils.import_module(\
             item["parameters"]["feature_extractor"]).apply_descriptor
     item["features"] = feature_extractor(item["coefficients"],\
             item["parameters"])
     return item
Пример #51
0
    
if __name__ == '__main__':
  utils.init()
  if len(sys.argv) != 3:
    print 'Usage: %s <team_username> <problem_alias>' % sys.argv[0]
    sys.exit(1)
  
  team_username = sys.argv[1]
  problem_alias = sys.argv[2]
  
  utils.reset_progress(True)
    
  task = utils.call(action='fetch_run', team_username=team_username, problem_alias=problem_alias)
  if not task['success']:
    raise Exception('Failed to fetch run.')
  
  print 'Grading run_id %s (team %s, problem %s) of type %s... ' % (task['run_id'], task['team_username'], task['alias'], task['problem_type']),
  
  module = utils.import_module(task['contest_type'], task['problem_type'])
  
  for key in ['run_metadata', 'problem_metadata', 'division_metadata']:
    task[key] = json.loads(task[key])
  
  q = multiprocessing.Queue()
  grader = multiprocessing.Process(target=module.grade, args=(q, task, True))
  grader.start()
  result = q.get()
  grader.join()
  
  print 'Final judgment: %s' % ('CORRECT' if result['correct'] else 'INCORRECT')