Beispiel #1
0
def repl(prompt="spill> "):
    "The spill read-eval-print loop"
    try:
        while True:
            inp = input(prompt)
            if inp:
                evaluate(parse(tokenize(inp)))
    except EOFError:
        print()
Beispiel #2
0
def main(args):
    env_info = get_sys_env()
    place = 'gpu' if env_info['Paddle compiled with cuda'] and env_info[
        'GPUs used'] else 'cpu'

    paddle.set_device(place)
    if not args.cfg:
        raise RuntimeError('No configuration file specified.')

    cfg = Config(args.cfg)
    # Only support for the DeepLabv3+ model
    if args.data_format == 'NHWC':
        if cfg.dic['model']['type'] != 'DeepLabV3P':
            raise ValueError(
                'The "NHWC" data format only support the DeepLabV3P model!')
        cfg.dic['model']['data_format'] = args.data_format
        cfg.dic['model']['backbone']['data_format'] = args.data_format
        loss_len = len(cfg.dic['loss']['types'])
        for i in range(loss_len):
            cfg.dic['loss']['types'][i]['data_format'] = args.data_format

    val_dataset = cfg.val_dataset
    if val_dataset is None:
        raise RuntimeError(
            'The verification dataset is not specified in the configuration file.'
        )
    elif len(val_dataset) == 0:
        raise ValueError(
            'The length of val_dataset is 0. Please check if your dataset is valid'
        )

    msg = '\n---------------Config Information---------------\n'
    msg += str(cfg)
    msg += '------------------------------------------------'
    logger.info(msg)

    model = cfg.model
    if args.model_path:
        utils.load_entire_model(model, args.model_path)
        logger.info('Loaded trained params of model successfully')

    test_config = get_test_config(cfg, args)
    config_check(cfg, val_dataset=val_dataset)

    evaluate(model,
             val_dataset,
             num_workers=args.num_workers,
             is_view=args.is_view,
             save_dir=args.save_dir,
             **test_config)
Beispiel #3
0
def main(args):
    env_info = get_sys_env()
    place = 'gpu' if env_info['Paddle compiled with cuda'] and env_info[
        'GPUs used'] else 'cpu'

    paddle.set_device(place)
    if not args.cfg:
        raise RuntimeError('No configuration file specified.')

    cfg = Config(args.cfg)
    val_dataset = cfg.val_dataset
    if val_dataset is None:
        raise RuntimeError(
            'The verification dataset is not specified in the configuration file.'
        )
    elif len(val_dataset) == 0:
        raise ValueError(
            'The length of val_dataset is 0. Please check if your dataset is valid'
        )

    msg = '\n---------------Config Information---------------\n'
    msg += str(cfg)
    msg += '------------------------------------------------'
    logger.info(msg)

    model = cfg.model
    if args.model_path:
        paddleseg.utils.utils.load_entire_model(model, args.model_path)
        logger.info('Loaded trained params of model successfully')

    config_check(cfg, val_dataset=val_dataset)

    evaluate(
        model,
        val_dataset,
        threshold=args.threshold,
        nms_kernel=args.nms_kernel,
        top_k=args.top_k,
        num_workers=args.num_workers,
    )
Beispiel #4
0
def eval(s):
    tokens = tokenization.tokenize(s)
    if DEBUG:
        print("tokens:", tokens)

    tokens, tree = parse.parse(tokens)
    if DEBUG:
        print("tree:", tree)

    tree = parse.quote(tree)
    if DEBUG:
        print("post quote:", tree)
    return [core.evaluate({}, x) for x in tree]
Beispiel #5
0
def interpret(filename):
    with open(filename, "rt") as file:
        inp = file.read()
        if inp:
            evaluate(parse(tokenize(inp)))
Beispiel #6
0
    histfile = os.path.join(os.path.expanduser("~"), ".pyclojurehist")
    try:
        readline.read_history_file(histfile)
    except IOError:
        # Pass here as there isn't any history file, so one will be
        # written by atexit
        pass
    import atexit
    atexit.register(readline.write_history_file, histfile)

parse = lispparser()
lexer = lisplexer()

if __name__ == "__main__":
    global_scope = Scope()
    scopechain = [global_scope]
    while True:
        try:
            txt = raw_input("pylisp> ")
            if re.search('^\s*$', txt):
                continue
            else:
                print(tostring(evaluate(parse(txt), scopechain)))
        except EOFError:
            break
        except KeyboardInterrupt:
            print  # Give user a newline after Cntrl-C for readability
            break
        except Exception, e:
            print e
Beispiel #7
0
 def evalparse(x):
     return evaluate(parse(x), scopechain)
Beispiel #8
0
    print(F)
    print(">>> F_1 model <<<")
    print(F_1)
    print(">>> F_2 model <<<")
    print(F_2)
    print(">>> F_t model <<<")
    print(F_t)

    # pre-train on source dataset
    print("=== Pre-train networks ===")
    if cfg.model_trained["pretrain"]:
        print("pass")
    else:
        pre_train(F, F_1, F_2, F_t, source_data_loader)
        print(">>> evaluate F+F_1")
        evaluate(F, F_1, source_data_loader_test)
        print(">>> evaluate F+F_2")
        evaluate(F, F_2, source_data_loader_test)
        print(">>> evaluate F+F_t")
        evaluate(F, F_t, source_data_loader_test)

    print("=== Adapt F_t ===")
    if cfg.model_trained["domain_adapt"]:
        print("pass")
    else:
        # generate pseudo labels on target dataset
        print("--- Generate Pseudo Label ---")
        excerpt, pseudo_labels = \
            genarate_labels(F, F_1, F_2, target_dataset, cfg.num_target_init)
        print(">>> Genrate pseudo labels {}".format(len(pseudo_labels)))
Beispiel #9
0
# In[5]:


encoder, decoder = train(series[:-20], series[-20:], 
      n_steps=200, attn_model="dot",
      hidden_size=16, n_layers=1, dropout=0, batch_size=128,
      elr=0.001, dlr=0.005, clip=50.0, print_every=20,
      teacher_forcing_ratio=lambda x: 1 if x < 20 else 0.5)


# In[6]:


TARGET_IDX = series.shape[1] - 9
preds, attentions = evaluate(series[:-20, TARGET_IDX:(TARGET_IDX+1)], 20, encoder, decoder)
print(np.mean(np.square((preds.numpy() - series[-20:, TARGET_IDX:(TARGET_IDX+1)]))))
plt.plot(np.arange(40), series[:-20, TARGET_IDX])
plt.plot(np.arange(40, 60), preds[:, 0].numpy(), "g-")
plt.plot(np.arange(40, 60), series[-20:, TARGET_IDX], "ro")
plt.title("Pure Sine Wave Prediction")


# In[7]:


show_attention(attentions.numpy()[:, 0, :])


# ## Noisy Data
 def evaluate_wrapper(*args, **kargs):
     return round(core.evaluate(*args, **kargs), 17)