Пример #1
0
def loadDefaultConfig():
    ret = {
        'host': utils.loadConfig("dataBase", "host"),
        'user': utils.loadConfig("dataBase", "user"),
        'password': utils.loadConfig("dataBase", "password")
    }
    return utils.success(ret)
Пример #2
0
def getTranscribedPatterns(ptr = ['TA', 'TA', 'KI', 'TA']):
    """
    Gets the transcribed sequences for the pattern (ptr) provided bsed on the onset information in the transcribed data.
    """
    result = []
    l = len(ptr)
    # Getting the masterdata
    config = ut.loadConfig('/home/swapnil/SMC/MasterThesis/gitPercPatterns/code/rlcs/config')
    transFolder = config['transFolder']
    #transFolder = '/home/swapnil/Desktop/temp'
    lblDir = config['lblDir']
    onsDir = config['onsDir']
    masterData = ut.getAllSylbData(tPath = transFolder, lblDir = lblDir, onsDir = onsDir)
    
    # Find the start and the end point of patters in
    for comp in masterData:
        compName = comp[2]
        print 'Working for composition:' + compName
        transScore = comp[0][0]
        transOnset = comp[0][1]
        origScore = comp[1][0]
        origOnset = comp[1][1]

        # Get the starting indices for the pattern in the composition comp
        ptrStartIndices = ut.getSubstringPos(origScore, ptr)

        # Get the dictionaries of set for the indices of patterns in the ground truth
        dictPtrIndices = populateDictPtrIndices(ptrStartIndices, l)

        # Get the closest set onsets for the pattern in the transcription
        ptrIndicesInTrans = getIndicesInTrans(origOnset, transOnset, dictPtrIndices)
        ptrnsInTrans = getPatternsInTrans(transScore, ptrIndicesInTrans)
        result.append((compName, ptrnsInTrans))

    return result
Пример #3
0
def loaded(ev, server, plugin):
  global cfg
  if ev["name"] == name:
    cfg = utils.loadConfig("matterbridge", {"enabled": False, "remote": "", "token": ""})
    if cfg["enabled"]:
      pass
      plugin.asyncRun(name, tracker, {'Authorization': 'Bearer ' + cfg["token"]}, cfg["remote"])
Пример #4
0
def test_directory_check():
    config = utils.loadConfig(CURRENT_DIRECTORY + 'settings.conf')

    # Check for PUBLIC_HTML, ERROR_DIR, OTHER_TEMPLATES
    for key in ['PUBLIC_HTML', 'ERROR_DIR', 'OTHER_TEMPLATES']:
        config[key] = config[key].strip(" ")
        assert config[key] != "", key + ": Field cannot be left blank"
        assert config[key][
            -1] != "/", key + ": Directory name must not end with a '/'"
Пример #5
0
def test(verbose):
    """Test database connectivity."""
    utils.configureLogging(verbose)
    config = utils.loadConfig()
    logging.debug(f"Testing DB connection...")
    db_conn = db.connect(config["DB_HOST"], config["DB_NAME"],
                         config["DB_USER"], config["DB_PASS"])
    logging.info(db.show_version(db_conn))
    db.close(db_conn)
Пример #6
0
    def run(self, optimizer, no_steps=20, loss="MIN", kernel="rbf", no_runs=5):

        np.random.seed(20)
        seeds = np.random.randint(0, 10000, (no_runs))

        output_dir = utils.loadConfig()['MNIST_RESULT_BASE_DIR']

        total_seeds = no_runs
        print('Running for %s runs with %s' % (total_seeds, optimizer))

        results_x = np.zeros((total_seeds, no_steps + 1, 2))
        results_y = np.zeros((total_seeds, no_steps + 1))

        method = optimizer
        for i in range(total_seeds):
            s = seeds[i]
            print('Seed %d' % s)
            if optimizer is 'lstm':
                x0 = np.array([-1, -1]).reshape(-1, 2)
                obj_func = lambda x: self.objective_function(
                    x, s, scaling=True).reshape(1, -1)
                model = utils.get_trained_model(dim=2,
                                                kernel=kernel,
                                                loss=loss)
                samples_x, samples_y = self.optimize_lstm(
                    x0, model, obj_func, no_steps)

                samples_x = np.array(samples_x).flatten().reshape(-1, 2)
                samples_x = self.scaler.inverse_transform(samples_x)

                samples_x = np.power(2, samples_x)
                samples_y = (samples_y - 1) / 2
                method = '%s-%s-%s' % (optimizer, loss, kernel)
            else:
                x0 = np.array([DIMENSIONS[0][0], DIMENSIONS[1][0]],
                              dtype=np.float)
                obj_func = lambda x: self.objective_function(x, s)
                if optimizer is 'random':
                    print('Optimize Randomly')
                    samples_x, samples_y = self.optimize_random(
                        x0, no_steps + 1, obj_func)
                elif optimizer is 'gp':
                    samples_x, samples_y = self.optimize_gp(
                        x0, no_steps + 1, obj_func)
                elif optimizer is 'basinhopping':
                    samples_x, samples_y = self.optimize_basinhopping(
                        x0, no_steps + 1, obj_func)

                samples_x = np.array(samples_x).flatten().reshape(-1, 2)

            results_x[i, :, :] = np.array(samples_x).reshape(1, -1, 2)
            results_y[i, :] = samples_y

        print('Saving result to %s' % (method))
        np.save('%s/%s-samples_x' % (output_dir, method), results_x)
        np.save('%s/%s-samples_y' % (output_dir, method), results_y)
Пример #7
0
def testConnection():
    data = utils.getRequestData()
    try:
        conn = pymysql.connect(data.get("host"),
                               data.get("user"),
                               data.get("password"),
                               data.get("db"),
                               charset=utils.loadConfig("dataBase", "charset"))
        conn.ping()
        return utils.success(msg='连接成功')
    except:
        return utils.error('请检查你的连接')
Пример #8
0
def test_valid_hostname():
    config = utils.loadConfig(CURRENT_DIRECTORY + 'settings.conf')
    hostname = config['HOST_NAME']
    if len(hostname) > 15:
        raise AssertionError(
            'HOST_NAME: hostname cannot be more than 15 character long.')

    try:
        assert hostname == "localhost", "HOST_NAME: Invalid Host Name"
        return
    except:
        allowed = re.compile(IP_REGEX, re.IGNORECASE)
        assert allowed.match(hostname), "HOST_NAME: Invalid Host Name"
Пример #9
0
def main(argv):
    print("starting ledticker backend with flashlex. ")

    # Read in command-line parameters
    parser = argparse.ArgumentParser()

    parser.add_argument("-c",
                        "--config",
                        action="store",
                        required=True,
                        dest="config",
                        help="the YAML configuration file")

    args = parser.parse_args()
    configFile = args.config

    config = loadConfig(args.config)['ledtickerbe']

    local_tz = pytz.timezone(config['timezone'])

    for job_item in config['jobs']:

        if (job_item['rate_type'] == 'day'):
            schedule_gmt = convert_localtime_to_gmt(job_item['rate_value'],
                                                    local_tz).strftime("%H:%M")
            schedule.every().day.at(schedule_gmt).do(
                make_job,
                builder_config=job_item,
                sender_config=config['sender'],
                cache=cache,
                tz=local_tz)
        elif (job_item['rate_type'] == 'hours'):
            schedule.every(job_item['rate_value']).hours.do(
                make_job,
                builder_config=job_item,
                sender_config=config['sender'],
                cache=cache,
                tz=local_tz)
        elif (job_item['rate_type'] == 'minutes'):
            schedule.every(job_item['rate_value']).minutes.do(
                make_job,
                builder_config=job_item,
                sender_config=config['sender'],
                cache=cache,
                tz=local_tz)
        else:
            pass

    while True:
        schedule.run_pending()
        time.sleep(1)
Пример #10
0
    def run(self, kernel, dimension, log_dir, epochs):
        # todo: check if log dir doesn't exists otherwise fail
        config = utils.loadConfig()
        log_location = config['BASE_LOG_DIR'] + '/' + log_dir
        model_location = config['MODEL_DIR']

        assert not os.path.isdir(
            log_location
        ), "%s already exists, please specify new one" % log_location

        os.makedirs(log_location)

        experiment_conf = config['experiments']['%dD' % dimension]

        hyper_params = experiment_conf['hyperparameters']
        param_grid = list(ParameterGrid(hyper_params))

        total_combinations = len(param_grid)
        print(
            'Running experiments of %dD-%s with %d hyperparameter combinations'
            % (dimension, kernel, total_combinations))
        print('log output to %s' % log_location)
        print('save model to %s' % model_location)

        start = time.time()
        for i in range(total_combinations):
            params = param_grid[i]
            filename = str(int(time.time()))
            filepath = log_location + '/' + filename
            f = open(filepath, 'w')

            print('%3d/%d - [log-id: %s] %s ' %
                  (i + 1, total_combinations, filename, params))

            start_inner = time.time()
            lstm_model.train(dimension,
                             kernel=kernel,
                             epochs=epochs,
                             logger=f,
                             save_model_path=model_location,
                             **params)
            tf.reset_default_graph()
            end_inner = time.time()

            print('>> took %.4f mins' % ((end_inner - start_inner) / 60))

        end = time.time()

        print("Finished %d combinations using %.4f mins" %
              (total_combinations, (end - start) / 60.0))
        print("==========================================")
Пример #11
0
def main(argv):
    parser = argparse.ArgumentParser()

    parser.add_argument("-c", "--config", action="store", required=True, dest="config", help="the YAML configuration file")

    args = parser.parse_args()
    configFile = args.config

    config = loadConfig(args.config)['ledtickerbe']

    local_tz = pytz.timezone(config['timezone'])

    covid_us_summary_config = list(filter(lambda x: 'wikicovid.covid_us_summary' in x['builder'], config['jobs']))

    wikicovid.covid_us_summary(covid_us_summary_config[0],config['sender'], cache, local_tz)
    def run(self,
            dim,
            kernel,
            n_steps=21,
            no_testing_func=10,
            optimizer='gp',
            dataset='normal'):
        print(
            "Optimizing for first %d functions of %d-%s testing data using %s optimizer with %d steps"
            % (no_testing_func, dim, kernel, optimizer, n_steps))
        print('dataset %s' % dataset)
        conf = utils.loadConfig()

        x0 = conf['experiments']["%dD" %
                                 dim]['hyperparameters']['starting_point'][0]

        X, A, minv, maxv = utils.loadData(dim, 'testing', kernel)

        n_bumps = 6
        l = 2 / n_bumps * np.sqrt(dim)

        dataset_func = gp.dataset_function(dataset)
        kernel_func = gp.kernel_function(kernel)

        samples_x, samples_y = self.get_samples_sk(X,
                                                   A,
                                                   minv,
                                                   maxv,
                                                   l,
                                                   dim,
                                                   n_steps,
                                                   dataset_func,
                                                   kernel_func,
                                                   no_testing_func,
                                                   x0,
                                                   optimizer=optimizer)

        base = kernel
        if dataset != 'normal':
            base = '%s-%s' % (kernel, dataset)

        directory = '%s/%sd-%s' % (optimizer, dim, base)

        print('Saving data with prefix %s' % directory)
        self.save_samples(samples_x, samples_y, directory)
Пример #13
0
def inGameNotice(c):
    utils.show("Starting ingame auto notice...")
    while True:
        sql = utils.loadConfig(utils.dbFile)
        enabled = sql["ingame_notice.enabled"]
        if enabled.lower() == "false":
            utils.show(
                "Ingame notice enabled = %s!!! You might want to change ingame_notice.enabled=true to start ingame notice."
                % enabled)
            break

        messages = sql["ingame_notice.messages"]
        for m in messages.split(messageDelimeter):
            utils.show("Sending \"%s\" to ps_game..." % m)
            utils.executeCommand(c % ("/nt " + m))
            time.sleep(15)

        interval = int(sql["ingame_notice.interval"])
        utils.show("Waiting for %d second(s) to notice \"%s\"" %
                   (interval, messages))
        time.sleep(interval)
Пример #14
0
def main(argv):
    parser = argparse.ArgumentParser()

    parser.add_argument("-c",
                        "--config",
                        action="store",
                        required=True,
                        dest="config",
                        help="the YAML configuration file")

    args = parser.parse_args()
    configFile = args.config

    config = loadConfig(args.config)['ledtickerbe']

    local_tz = pytz.timezone(config['timezone'])

    next_president_config = list(
        filter(lambda x: 'electionbettingodds.next_president' in x['builder'],
               config['jobs']))

    electionbettingodds.next_president(next_president_config[0],
                                       config['sender'], cache, local_tz)
Пример #15
0
        msgBox = QMessageBox()
        msgBox.setWindowTitle('Fatal error')
        msgBox.setText(
            'Unhandled exception occured. Check the log file for details.')
        msgBox.setIcon(QMessageBox.Critical)
        msgBox.setDetailedText(details)
        iconPath = resourcePath('ico\\favicon.ico')
        msgBox.setWindowIcon(QIcon(iconPath))
        msgBox.exec_()

        sys.exit(0)


if __name__ == '__main__':

    exceptionHandler = ExceptionHandler()
    #sys._excepthook = sys.excepthook
    sys.excepthook = exceptionHandler.handler

    utils.loadConfig()

    log('Starting %s build %s' % (version, build_date))
    log('qt version: %s' % (QtCore.QT_VERSION_STR))

    app = QApplication(sys.argv)

    ex = hslWindow.hslWindow()

    sys.exit(app.exec_())
    app.exec_()
Пример #16
0
MCDaemonReloaded 服务器 增量备份插件 (Increasing Backup)
提供多个槽位的增量备份与回档功能。
要求 rdiff-backup.
"""
import utils, uuid, json, os, time, shlex, datetime, schedule
from pathlib import Path
from event import TRIGGER
from subprocess import Popen
from utils import CC
from logging import getLogger
l = getLogger(__name__)

cfg = utils.loadConfig(
    "backup", {
        "world": "",
        "slot": "16",
        "backupdir": "/backup",
        "backupwhenrollback": True,
        "logfile": "rdiff.log"
    })
data = utils.loadData("backup", {"backups": []})
aborted = False
confirmed = False
inProgress = False
savedGame = False
dest = None


def syncBackupList():
    global cfg, data
    fl = []
    tf = {}
Пример #17
0
def create(service_name, database_name, readonly_role_name,
           readwrite_role_name, user_name, verbose):
    """Create database, roles and user."""

    # -------------------------------------------------------------------------
    # 0. Init
    # -------------------------------------------------------------------------
    utils.configureLogging(verbose)

    # Look for DB credentials in .env file
    logging.debug(f"Looking for DB credentials in .env file...")
    config = utils.loadConfig()

    # -------------------------------------------------------------------------
    # 1. Connect to a default database to create the service database
    # -------------------------------------------------------------------------
    # Connect to db server
    initial_db_conn = db.connect(config["DB_HOST"], config["DB_NAME"],
                                 config["DB_USER"], config["DB_PASS"])

    # Create a db for the service
    service_db_name = database_name if database_name else f"{service_name}"
    logging.info(
        f"Create a database with name={service_db_name} for service={service_name}"
    )
    db.create_database(initial_db_conn, service_db_name)

    # Close the db connection
    db.close(initial_db_conn)

    # -------------------------------------------------------------------------
    # 2. Connect to new service db to create roles and a user for the service
    # -------------------------------------------------------------------------
    # Connect to the new service database
    db_conn = db.connect(config["DB_HOST"], service_db_name, config["DB_USER"],
                         config["DB_PASS"])

    # Revoke default, public permissions from public schema
    logging.info(f"Revoke default, public permissions from public schema")
    db.revoke_public_permissions(db_conn, service_db_name)

    # Create a read-only role
    service_readonly_role = readonly_role_name if readonly_role_name else f"role_{service_name}_readonly"
    logging.info(f"Create a read-only role with name={service_readonly_role}")
    db.create_readonly_role(db_conn, service_db_name, service_readonly_role)

    # Create a read/write role
    service_readwrite_role = readwrite_role_name if readwrite_role_name else f"role_{service_name}_readwrite"
    logging.info(
        f"Create a read/write role with name={service_readwrite_role}")
    db.create_readowrite_role(db_conn, service_db_name, service_readwrite_role)

    # Create a user for the service with read/write role grants
    service_user = user_name if user_name else f"user_{service_name}_app"
    service_pass = utils.getRandomPassword(30)
    logging.info(f"Create a user with name={service_user}")
    db.create_user(db_conn, service_user, service_pass, service_readwrite_role)

    # Output user pass to a file
    utils.writeFile(".pass", service_pass)

    # Close the db connection
    db.close(db_conn)
Пример #18
0
def main():

    # loading config file ...
    cfgPath = sys.argv[1] if len(sys.argv) > 1 else './config.toml'
    cfg = loadConfig(cfgPath)

    try:
        # ... and unpacking variables
        dictget = lambda d, *k: [d[i] for i in k]

        dataStats = cfg['data_stats']
        modelParams = cfg['model_params']
        trainCSV, testCSV = dictget(cfg['database'], 'train', 'test')
        seqLength, stepSize = dictget(cfg['model_params'], 'seqLength',
                                      'stepSize')
        modelArch, modelDir, modelName = dictget(cfg['model_arch'],
                                                 'modelArch', 'modelDir',
                                                 'modelName')
        optimizer, lossFunc, metricFuncs = dictget(cfg['training_params'],
                                                   'optimizer', 'lossFunc',
                                                   'metricFuncs')
        lr, epochs, batchSize, patience, = dictget(cfg['training_params'],
                                                   'learningRate', 'epochs',
                                                   'batchSize', 'patience')
    except KeyError as err:
        print("\n\nERROR: not all parameters defined in config.toml : ", err)
        print("Exiting ... \n\n")
        sys.exit(1)

    print("Loading training data ...")
    xTrain, yTrain, stats = getData(trainCSV,
                                    seqLength=seqLength,
                                    stepSize=stepSize,
                                    stats=dataStats)
    print("Training Data Shape : ", xTrain.shape, "\n")

    print("Loading testing data ...")
    xTest, yTest, stats = getData(testCSV,
                                  seqLength=seqLength,
                                  stepSize=stepSize,
                                  stats=dataStats)
    print("Testing Data Shape : ", xTest.shape, "\n")

    yTrain = np.expand_dims(
        yTrain, -1)  # adding extra axis as model expects 2 axis in the output
    yTest = np.expand_dims(yTest, -1)

    print("Compiling Model")
    opt = getOptimizer(optimizer, lr)
    model = makeModel(modelArch, modelParams, verbose=True)
    model.compile(loss=lossFunc, optimizer=opt, metrics=metricFuncs)

    # setting up directories
    modelFolder = os.path.join(modelDir, modelName)
    weightsFolder = os.path.join(modelFolder, "weights")
    bestModelPath = os.path.join(weightsFolder, "best.hdf5")
    ensureDir(bestModelPath)

    saveConfig(cfgPath, modelFolder)

    # callbacks
    monitorMetric = 'val_loss'
    check1 = ModelCheckpoint(os.path.join(weightsFolder,
                                          modelName + "_{epoch:03d}.hdf5"),
                             monitor=monitorMetric,
                             mode='auto')
    check2 = ModelCheckpoint(bestModelPath,
                             monitor=monitorMetric,
                             save_best_only=True,
                             mode='auto')
    check3 = EarlyStopping(monitor=monitorMetric,
                           min_delta=0.01,
                           patience=patience,
                           verbose=0,
                           mode='auto')
    check4 = CSVLogger(os.path.join(modelFolder,
                                    modelName + '_trainingLog.csv'),
                       separator=',',
                       append=True)
    check5 = ReduceLROnPlateau(monitor=monitorMetric,
                               factor=0.1,
                               patience=patience // 3,
                               verbose=1,
                               mode='auto',
                               min_delta=0.001,
                               cooldown=0,
                               min_lr=1e-10)

    cb = [check2, check3, check4, check5]
    if cfg['training_params']['saveAllWeights']:
        cb.append(check1)

    print("Starting Training ...")
    model.fit(x=xTrain,
              y=yTrain,
              batch_size=batchSize,
              epochs=epochs,
              verbose=1,
              callbacks=cb,
              validation_data=(xTest, yTest),
              shuffle=True)
Пример #19
0
from utils import agentFactory, loadConfig, predToInput

backupFolder = sys.argv[1]
config = sys.argv[2]

fnList = sorted(glob.glob(backupFolder + 'iter_*'), reverse=True)

fn = fnList[0]

print(fn)

chromosomesList = pickle.load(open(fn, 'rb'))

chromosomesList = sorted(chromosomesList, key=lambda x: x[0])

nnConstruct, gaConfig, maxIter = loadConfig(config)

print(nnConstruct)

agents = [(score, agentFactory(nnConstruct, chromosome=chro))
          for score, chro in chromosomesList]

renderer = cv2Renderer(13, 13)
renderer = NoGUIRenderer(13, 13)
renderer.setRefreshRate(100)

envi = Enviroment(13, 13, renderer)

ga = GA([], envi, predToInput)

random.seed(gaConfig['seed'])
Пример #20
0
import utils
import pytz
import sys
import random
from datetime import datetime

logger = utils.createLogger("scheduling")
config = utils.loadConfig()
scheduling_config = config["scheduling"]
timezone_name = scheduling_config["local_timezone"]
try:
    timezone = pytz.timezone(timezone_name)
except pytz.UnknownTimeZoneError:
    logger.error("timezone {} not found in tz data. make sure it is in the list - https://en.wikipedia.org/wiki/List_of_tz_database_time_zones")
    sys.exit(1)
working_days = scheduling_config["working_days"]
min_days_per_week = scheduling_config.get("min_days_per_week", 1)
if min_days_per_week == -1:
    min_days_per_week = len(working_days)
if min_days_per_week < 0 or min_days_per_week > len(working_days):
    logger.error("min_days_per_work should be in [0, len(working_days)] or -1")
    sys.exit(1)
working_hours = scheduling_config["working_hours"]
if len(working_hours) == 0:
    logger.error("working hours must contain atleast one start-end period")
    sys.exit(1)

def getRandomHour(seed):
    total_duration = sum([(period["end"] - period["start"]) for period in working_hours])
    rng = random.Random(seed)
    random_offset = rng.random() * total_duration
Пример #21
0
                    server.reloadPlugins = False
        except Exception:
            if not server.stopped():
                __import__("traceback").print_exc(file=sys.stdout)
                l.error("在 TICK Daemon 时发生了错误!")
                time.sleep(3)


logging.basicConfig(filename='mcd.log', filemode='a')
l = logging.getLogger("daemon")

if __name__ == "__main__":

    l.info("程序启动。")
    ## read cfg
    cfg = loadConfig("daemon", False)
    if not isinstance(cfg, dict):
        l.critical("无法读取配置文件。")
        os._exit(1)

    l.info("配置文件加载完毕。")

    stopserver = False
    mainthread = None
    server = Server(cfg)
    event = Event()
    plugin = Plugin(server, event)
    event.setParm(server, plugin)
    handler = Handler(server, event)
    l.info("基础类初始化完毕。")
    try:
Пример #22
0
def createLayOutFile(data):
    columns = json.loads(data.get("columns"))
    strprefix = data.get("strprefix")
    dialogFilter = utils.loadConfig("columnFilter", "layoutDialog").split(",")
    queryFilter = utils.loadConfig("columnFilter", "layoutQuery").split(",")
    layColumns = []
    factoryIdExist, delFlagExist = False, False
    for column in columns:
        if column.get("column_name") == "factoryid":
            factoryIdExist = True
            continue
        if column.get("column_name") == "delflag":
            delFlagExist = True
        easyuiClass = ""
        if column.get("data_type") == "int":
            easyuiClass = "easyui-numberbox"
        elif column.get("data_type") == "date":
            easyuiClass = "easyui-datebox"
        elif column.get("data_type") == "datetime":
            easyuiClass = "easyui-datetimebox"
        elif column.get("data_type") == "string":
            easyuiClass = "easyui-textbox"
        elif column.get("data_type") == "enum":
            easyuiClass = "easyui-combobox"
        tempColumn = {
            "columnName": column.get("column_name"),
            "columnType":
            "int" if column.get("data_type") == "int" else "string",
            "columnWidth": column.get("column_width"),
            "isHidden": "true" if column.get("is_hidden") == "on" else "false",
            "isQuery": "true" if column.get("is_query") == "on" else "false",
            "isAddHidden":
            "true" if column.get("is_add_hidden") == "on" else "false",
            "isEditHidden":
            "true" if column.get("is_edit_hidden") == "on" else "false",
            "isEdit": "true" if column.get("is_edit") == "on" else "false",
            "isRequired":
            "true" if column.get("is_required") == "on" else "false",
            "creatI18N":
            "true" if column.get("creat_i18n") == "on" else "false",
            "cnName": column.get("cn_name"),
            "enName": column.get("en_name"),
            "dataType": column.get("data_type"),
            "enumCode": column.get("enum_code"),
            "easyuiClass": easyuiClass,
            "layout": column.get("layout")
        }
        if column.get("creat_i18n") == 'on':
            if column.get("data_type") in ("date", "datetime"):
                tempColumn["columnSearchI18N"] = strprefix + column.get(
                    "column_name") + "_search"
            tempColumn["columnI18N"] = strprefix + column.get("column_name")
        layColumns.append(tempColumn)
    title = strprefix + data.get("pageName").lower() + "_maintain"
    cns, ens = [], []
    cns.append({
        "key": title,
        "value": utils.to_unicode(data.get("pageName") + "维护")
    })
    ens.append({"key": title, "value": data.get("pageName") + "Maintain"})
    cns.append({
        "key": "com.zhiyin.mes.app.add_information",
        "value": utils.to_unicode("添加信息")
    })
    ens.append({
        "key": "com.zhiyin.mes.app.add_information",
        "value": "Add Information"
    })
    cns.append({
        "key": "com.zhiyin.mes.app.edit_information",
        "value": utils.to_unicode("修改信息")
    })
    ens.append({
        "key": "com.zhiyin.mes.app.edit_information",
        "value": "Edit Information"
    })
    creatData = {
        "gridName":
        data.get("gridName"),
        "layColumns":
        layColumns,
        "checkFactory":
        "true"
        if data.get("checkFactory") == "on" and factoryIdExist else "false",
        "checkDelFlag":
        "true"
        if data.get("checkDelFlag") == "on" and delFlagExist else "false",
        "dialogFilter":
        dialogFilter,
        "queryFilter":
        queryFilter,
        "pageName":
        data.get("pageName"),
        "packageName":
        data.get("packageName"),
        "title":
        title,
        "checkColumnCg":
        "true"
        if data.get("checkDelFlag") == "on" and delFlagExist else "false"
    }
    utils.create_file(
        data.get("packageName") + "/layout",
        render_template('/generate/layout.xml', data=creatData),
        data.get("pageName") + ".xml")
    utils.create_file(
        data.get("packageName") + "/jsp",
        render_template('/generate/entity.jsp', data=creatData),
        data.get("pageName") + ".jsp")
    utils.create_file(
        data.get("packageName") + "/i18n",
        render_template('/generate/i18n.txt', data=cns),
        data.get("packageName") + "_zh_CN.properties")
    utils.create_file(
        data.get("packageName") + "/i18n",
        render_template('/generate/i18n.txt', data=ens),
        data.get("packageName") + "_en_US.properties")
    if data.get("checkSpringBoot") == "on":
        utils.create_file(
            data.get("packageName") + "/html",
            render_template('/generate/entity.html', data=creatData),
            data.get("pageName") + ".html")
Пример #23
0
        return
    utils.executeCommand(sqlExecuteFile % sqlFile)


def executeSqlQuery(query):
    utils.show("Executing %s" % query)
    utils.executeCommand(query)


if __name__ == "__main__":
    operation = sys.argv[1].lower()
    if not operation:
        utils.show("No operation has been given!!!")
        sys.exit()

    sqlConf = utils.loadConfig(utils.dbFile)

    if sqlConf:
        sqlCommon = sqlCommon % (sqlConf["db.user"], sqlConf["db.pass"],
                                 sqlConf["db.host"])
        sqlExecuteFile = sqlCommon + " -i \"%s\""
        sqlExecuteQuery = sqlCommon + " -Q \"%s\""

    if operation == "executefile":
        sqlFile = sys.argv[2]
        executeSqlFile(sqlFile)
    elif operation == "executequery":
        sqlQuery = sys.argv[2]
        executeSqlQuery(sqlExecuteQuery % sqlQuery)
    elif operation == "backup":
        currentTime = utils.getCurrentTime()
Пример #24
0
import os
import sys
from src import impl as rlcs
import utils as ut
import analysis as anls
import matplotlib.pyplot as plt
import logging
import pickle as pkl
import time

config = ut.loadConfig('config')

sylbSimFolder=config['sylbSimFolder']
transFolder=config['transFolder']
lblDir=config['lblDir']
onsDir=config['onsDir']
resultDir=config['resultDir']
sylbListFile=config['sylbListFile']

print sylbListFile

queryList = [['DHE','RE','DHE','RE','KI','TA','TA','KI','NA','TA','TA','KI','TA','TA','KI','NA'],['TA','TA','KI','TA','TA','KI','TA','TA','KI','TA','TA','KI','TA','TA','KI','TA'], ['TA','KI','TA','TA','KI','TA','TA','KI'], ['TA','TA','KI','TA','TA','KI'], ['TA', 'TA','KI', 'TA'],['KI', 'TA', 'TA', 'KI'], ['TA','TA','KI','NA'], ['DHA','GE','TA','TA']]
queryLenCheck = [4,6,8,16]

for query in queryList:
    if len(query) not in queryLenCheck:
        print 'The query is not of correct length!!'
        sys.exit()

# Checking if we want to run for baseline or not.
baseline = False
 def save_samples(self, samples_x, samples_y, directory):
     base_dir = utils.loadConfig()['SKOPT_RESULT_BASE_DIR']
     np.save('%s/%s-samples_x' % (base_dir, directory), samples_x)
     np.save('%s/%s-samples_y' % (base_dir, directory), samples_y)
Пример #26
0
if __name__ == '__main__':

    global ryba

    exceptionHandler = ExceptionHandler()
    #sys._excepthook = sys.excepthook
    sys.excepthook = exceptionHandler.handler

    app = QApplication(sys.argv)

    loadConfig = True

    while loadConfig:

        ok = utils.loadConfig()

        if not ok:
            loadConfig = utils.yesNoDialog(
                'Config error', 'Cannot load/parse config.yaml\nTry again?')
        else:
            loadConfig = False

    log('Starting %s build %s' % (version, build_date))
    log('qt version: %s' % (QtCore.QT_VERSION_STR))

    #ex = hslWindow.hslWindow()
    ryba = hslWindow.hslWindow()
    #ex = hslWindow.hslWindow()

    loadConfig = True
Пример #27
0
import fileinput
import utils
import tableFunctions


def generateFileNames(file_prefix, currentTimestamp):
    cpuFileName = file_prefix + '_cpu_' + currentTimestamp.replace(':', '')
    memFileName = file_prefix + '_mem_' + currentTimestamp.replace(':', '')
    return cpuFileName, memFileName


currentTimestamp = ''
cpuMetDict = {}
memMetDict = {}
config = utils.loadConfig('config.ini')
for line in fileinput.input():
    lineArray = line.strip().split()

    if config.getboolean('cfg', 'save_continous_stream') and (
            currentTimestamp != '') and (lineArray[0] != currentTimestamp):
        cpuFile, memFile = generateFileNames(config.get('cfg', 'file_prefix'),
                                             lineArray[0])
        cpuTable = tableFunctions.metDictToTable(
            cpuMetDict, config.get('cfg', 'folder_name'), cpuFile + '.svg')
        memTable = tableFunctions.metDictToTable(
            memMetDict, config.get('cfg', 'folder_name'), memFile + '.svg')
        utils.saveToFile(config.get('cfg', 'folder_name'), cpuFile + '.csv',
                         cpuTable)
        utils.saveToFile(config.get('cfg', 'folder_name'), memFile + '.csv',
                         memTable)
        print 'Saved for timestamp ' + lineArray[0]
Пример #28
0
"""
MCDaemonReloaded 服务器 MatterBridge 插件
提供简单的 MatterBridge 消息同步。
需要 requests 库!
"""
import utils, requests, json, sys, time
from event import TRIGGER
from utils import CC
from logging import getLogger, Logger, WARNING
l = getLogger(__name__)
cfg = utils.loadConfig("matterbridge", {"enabled": False, "remote": "", "token": ""})
def stopped(ev,server,plugin):
  sendadmin("服务器停止了。")
def stopping(ev,server,plugin):
  sendadmin("服务器正在停止...")
def starting(ev,server,plugin):
  sendadmin("服务器正在开启...")
def started(ev,server,plugin):
  sendadmin("服务器开启了。")
  server.execute("save-on")
def tracker(server,plugin,header,remote):
  global cfg
  getLogger("requests").setLevel(__import__("logging").WARNING)
  getLogger("urllib3").setLevel(__import__("logging").WARNING)
  getLogger("urllib3.connection").setLevel(__import__("logging").WARNING)
  getLogger("urllib3.connectionpool").setLevel(__import__("logging").WARNING)
  while True:
    try:
      with requests.get(remote + '/api/messages', headers=header, stream=False) as r:
        r.raise_for_status()
        for i in r.json():
Пример #29
0
def test_loadConfig():
    expected = '{"STATUS_STRING": {"200": "200 OK", "404": "404 Not Found", "403": "403 Forbidden", "500": "500 Server Error"}, "MAX_CLIENT_QUEUE": 5, "SERVER_NAME": "Nex - Simple HTTP server/v.0.2", "HOST_ALLOWED": ["*"], "BIND_PORT": 12345, "ERROR_DIR": "error_pages", "MAX_REQUEST_LEN": 1024, "SERVER_SHORT_NAME": "Nex/v.0.2", "HOST_NAME": "localhost", "PUBLIC_HTML": "public_html", "OTHER_TEMPLATES": "other_templates"}'
    config = utils.loadConfig(TEST_DIRECTORY + 'settings.conf')
    assert json.dumps(
        config) == expected, "loadConfig: Failed to load the configFile."
Пример #30
0
 def menuReloadConfig(self):
     loadConfig()
     self.statusMessage('Configuration file reloaded.', False)
Пример #31
0
    def run(self,
            optimizer='lstm',
            dim=2,
            no_steps=20,
            loss="MIN",
            kernel="rbf",
            normalization=100):
        print('Optimize Airfoil with %s optimizer' % optimizer)

        config = utils.loadConfig()['airfoil_optimization']

        output_dir = config['output_dir']

        alphas = np.linspace(config['alpha_range'][0],
                             config['alpha_range'][1], config['no_alpha'])

        y_pairs = [(0, 1, 2, 3, 4, 5)]

        print('Generate foils with %d alphas' % len(alphas))
        print('and %d pairs of y' % len(y_pairs))

        params = {'alpha': alphas, 'y_pair': y_pairs}
        param_grid = ParameterGrid(params)

        total_combination = len(param_grid)
        print('We have %d foils to run.' % total_combination)

        results_x = np.zeros((total_combination, no_steps + 1, dim))
        results_y = np.zeros((total_combination, no_steps + 1))

        method = optimizer
        for i in range(total_combination):
            param = param_grid[i]

            input_space = [config['y_input_space'][i] for i in param['y_pair']]

            print(param)
            del param['y_pair']

            print(input_space)

            print('Evaluating - %d ' % (i + 1))
            print(param)

            if optimizer is 'lstm':
                x_0 = np.array([config['x_start']] * dim).reshape(-1, dim)
                print('Using LTSM')
                model = utils.get_trained_model(dim=dim,
                                                kernel=kernel,
                                                loss=loss)
                samples_x, samples_y = self.optimize_lstm(
                    x_0, model, param, no_steps, normalization, input_space)

                method = '%s-%s-%s' % (optimizer, loss, kernel)
            else:
                obj_func = lambda x: self.obj_airfoil_lift_drag(
                    x, param, normalization)

                x_0 = np.array([x[0] for x in input_space])
                print('Starting %s' % x_0)

                if optimizer is 'random':
                    print('Optimize Randomly')
                    samples_x, samples_y = self.optimize_random(
                        x_0.flatten(), no_steps + 1, obj_func, input_space)
                    samples_x = np.array(samples_x).flatten()
                elif optimizer is 'skopt':
                    samples_x, samples_y = self.optimize_skopt(
                        x_0.flatten(), no_steps + 1, obj_func, input_space)
                elif optimizer is 'basinhopping':
                    print('Using basinhopping')
                    samples_x, samples_y = self.optimize_basinhopping(
                        x_0.flatten(), no_steps + 1, obj_func, input_space)

            results_x[i, :, :] = np.array(samples_x).reshape(1, -1, dim)
            results_y[i, :] = samples_y

        print('Saving result to %s' % (method))
        np.save(
            '%s/normalize-%d/%dd-%s-samples_x' %
            (output_dir, normalization, dim, method), results_x)
        np.save(
            '%s/normalize-%d/%dd-%s-samples_y' %
            (output_dir, normalization, dim, method), results_y)
Пример #32
0
        # print(request)
        try:
            # create a socket to connect to the web server
            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            s.settimeout(self.config['CONNECTION_TIMEOUT'])
            s.connect((webserver, port))
            s.sendall(request)                   # send request to webserver

            while 1:
                data = s.recv(self.config['MAX_REQUEST_LEN'])          # receive dataprintout from web server
                if (len(data) > 0):
                    conn.send(data)                   # send to browser
                else:
                    break

            s.close()
            conn.close()
        except socket.error as error_msg:
            self.log("ERROR", client_addr, error_msg)
            if s:
                s.close()
            if conn:
                conn.close()
            self.log("WARNING", client_addr, "Peer Reset: " + first_line)


if __name__ == "__main__":
    config = utils.loadConfig('settings.conf')
    server = Server(config)
    server.listenForClient()
Пример #33
0
        try:
            # create a socket to connect to the web server
            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            s.settimeout(self.config['CONNECTION_TIMEOUT'])
            s.connect((webserver, port))
            s.sendall(request)  # send request to webserver

            while 1:
                data = s.recv(self.config['MAX_REQUEST_LEN']
                              )  # receive dataprintout from web server
                if (len(data) > 0):
                    conn.send(data)  # send to browser
                else:
                    break

            s.close()
            conn.close()
        except socket.error as error_msg:
            self.log("ERROR", client_addr, error_msg)
            if s:
                s.close()
            if conn:
                conn.close()
            self.log("WARNING", client_addr, "Peer Reset: " + first_line)


if __name__ == "__main__":
    config = utils.loadConfig('settings.conf')
    server = Server(config)
    server.listenForClient()