コード例 #1
0
def build_encoder_layer(params):
  p = HyperParams()
  p.d_f = params.filter 
  p.n_I = params.n_heads 
  p.d_x = params.hidden 
  p.d_v = p.d_x // p.n_I 
  p.d_r = p.d_x // p.n_I 
  p.d_k = p.d_x // p.n_I 
  p.d_q = p.d_x // p.n_I 
  p.dropout = params.dropout 
  update_vis_params(params,p)
  encoderlayer = EncoderLayer(p)
  return encoderlayer
コード例 #2
0
ファイル: datasets.py プロジェクト: chiayewken/sutd-materials
def run_test():
    # loader = IntentWordIndicesMetaLoader(HyperParams(), data_split=Splits.train)
    # print(next(iter(loader)).keys())
    # IntentEmbedWordMetaLoader(HyperParams(), data_split=Splits.train)
    IntentEmbedBertMetaLoader(HyperParams(),
                              data_split=Splits.train,
                              do_test=True)
コード例 #3
0
ファイル: reptile.py プロジェクト: chiayewken/sutd-materials
def get_hparams_intent(algo: str) -> HyperParams:
    return HyperParams(
        algo=algo,
        bs_inner=1,
        num_shots=5,
        early_stop=True,
        steps_inner=1000,
        steps_outer=50,
    )
コード例 #4
0
def run_intent(root: str):
    hparams = HyperParams(root=root,
                          steps_outer=500,
                          steps_inner=50,
                          bs_inner=10)
    loaders = {
        s: IntentEmbedBertMetaLoader(hparams, s)
        for s in ["train", "val"]
    }
    net = LinearClassifier(num_in=loaders[Splits.train].embed_size, hp=hparams)
    system = ReptileSystem(hparams, loaders, net)
    system.run_train()
コード例 #5
0
def main(
    path_results_search="results_search.pt",
    path_results_train="results_train.pt",
    dev_run=False,
):
    hparams = search_hparams(path_results_search, dev_run)
    default = HyperParams()
    hparams.verbose = default.verbose
    hparams.epochs = default.epochs

    manager = ResultsManager(path_results_train)
    if not manager.check_hparams_exist(hparams):
        manager.add(CharGenerationSystem(hparams).run_train())
        manager.save()
コード例 #6
0
def run_intent(root: str):
    hp = HyperParams(
        root=root,
        bs_inner=1,
        num_shots=5,
        early_stop=True,
        # steps_inner=50,
        steps_inner=1000,
        steps_outer=50,
        # steps_outer=500,
    )
    # loader_class = IntentEmbedBertMetaLoader
    loader_class = IntentEmbedWordMeanMetaLoader
    loaders = {s: loader_class(hp, s) for s in Splits.get_all()}
    load = loaders[Splits.train]
    net = LinearClassifier(num_in=load.embed_size, hp=hp)
    # net = LSTMClassifier(num_in=load.embed_size, hp=hp)
    system = ReptileSystem(hp, loaders, net)
    system.run_train()
コード例 #7
0
def search_hparams(path, dev_run) -> HyperParams:
    manager = ResultsManager(path)
    grid = dict(
        model=["lstm", "gru", "tcn"],
        n_layers=[1, 2, 3],
        n_hidden=[128, 256],
        bs=[32, 128, 512],
    )
    for kwargs in tqdm(enumerate_grid(grid)):
        hparams = HyperParams(epochs=1,
                              verbose=False,
                              dev_run=dev_run,
                              **kwargs)
        if not manager.check_hparams_exist(hparams):
            result = CharGenerationSystem(hparams).run_train()
            result.weights = {}
            manager.add(result)
    manager.save()
    print(manager.get_summary())
    return manager.get_best().hparams
コード例 #8
0
 def from_dict(s: dict):
     s["hparams"] = HyperParams(**s["hparams"])
     return TrainResult(**s)
コード例 #9
0
ファイル: datasets.py プロジェクト: chiseng/DeepLearning
def main():
    #Testing
    glove_loader = IntentEmbedGloveMetaLoader(HyperParams(), data_split=Splits.train)
コード例 #10
0
def run_omniglot(root: str):
    hparams = HyperParams(root=root)
    loaders = {s: OmniglotMetaLoader(hparams, s) for s in ["train", "val"]}
    net = ConvClassifier(num_in=1, hp=hparams)
    system = ReptileSystem(hparams, loaders, net)
    system.run_train()
コード例 #11
0
def build_transformer(params, pad_idx):
  p = HyperParams()
  p.d_vocab = params.input_dim
  p.d_pos = 200 # max input size

  p.d_f = params.filter

  p.n_L = params.n_layers
  p.n_I = params.n_heads

  p.d_x = params.hidden  # token embedding dimension
  p.d_p = params.hidden  # position embedding dimension

  p.d_v = p.d_x // p.n_I  # value dimension
  p.d_r = p.d_x // p.n_I  # role dimension

  p.d_k = p.d_x // p.n_I  # key dimension
  p.d_q = p.d_x // p.n_I  # query dimension

  p.dropout = params.dropout

  update_vis_params(params,p)

  embedding = EmbeddingMultilinearSinusoidal(d_vocab=params.input_dim,
                                             d_x=p.d_x,
                                             d_r=p.d_r,
                                             dropout=params.dropout,
                                             max_length=200)
  encoder = Encoder(p=p)
  decoder = Decoder(p=p)
  model = Seq2Seq(p=p,
                  embedding=embedding,
                  encoder=encoder,
                  decoder=decoder,
                  pad_idx=pad_idx)

  return model
コード例 #12
0
ファイル: datasets.py プロジェクト: yunyikang/DL_project
def main():
    # Testing purposes only
    IntentEmbedBertMetaLoader(HyperParams(), data_split=Splits.train)
    OmniglotMetaLoader(HyperParams(), data_split=Splits.train)
コード例 #13
0
def main():
    system = BaselineSystem(HyperParams())
    system.run_train()
コード例 #14
0
ファイル: test_train.py プロジェクト: chiseng/DeepLearning
def run_intent(root: str, model):
    hparams = HyperParams(root=root)
    loader = IntentEmbedBertMetaLoader(hparams, Splits.val)
    system = BaselineSystem(hparams, loader, model)
    system.run_train()