コード例 #1
0
def init(args):
    """
        Load data, build model, create optimizer, create vars to hold metrics, etc.
    """
    #need to handle really large text fields
    csv.field_size_limit(sys.maxsize)

    #load vocab and other lookups
    desc_embed = args.lmbda > 0
    print("loading lookups...")
    dicts = datasets.load_lookups(args, desc_embed=desc_embed)

    model = tools.pick_model(args, dicts)
    print(model)

    if not args.test_model:
        optimizer = optim.Adam(model.parameters(),
                               weight_decay=args.weight_decay,
                               lr=args.lr)
    else:
        optimizer = None

    params = tools.make_param_dict(args)

    return args, model, optimizer, params, dicts
コード例 #2
0
def init(args):
    """
        Load data, build model, create optimizer, create vars to hold metrics, etc.
    """
    #need to handle really large text fields
    csv.field_size_limit(
        sys.maxsize)  # Sets field size to max available for strings

    #    freq_params = None
    #    if args.samples or args.lmbda > 0:
    #        print("loading code frequencies...")
    #        code_freqs, n = datasets.load_code_freqs(args.data_path, version=args.version)
    #        print("code_freqs:", sorted(code_freqs.items(), key=operator.itemgetter(1), reverse=True)[:10], "n:", n)
    #        freq_params = (code_freqs, n)

    #load vocab and other lookups
    #    desc_embed = args.lmbda > 0
    #    dicts = datasets.load_lookups(args.data_path, args.vocab, Y=args.Y, desc_embed=desc_embed, version=args.version)

    # LOAD VOCAB DICTS
    dicts = datasets.load_vocab_dict(
        args.vocab_path)  # CHANGED args.vocab --> args.vocab_path

    model = tools.pick_model(args, dicts)
    print(model)

    optimizer = optim.Adam(model.params_to_optimize(),
                           weight_decay=args.weight_decay,
                           lr=args.lr)

    params = tools.make_param_dict(args)

    return args, model, optimizer, params, dicts
コード例 #3
0
def init(args):
    """
        Load data, build model, create optimizer, create vars to hold metrics, etc.
    """
    #need to handle really large text fields
    csv.field_size_limit(sys.maxsize)

    #load vocab and other lookups
    desc_embed = args.lmbda > 0
    print("loading lookups...")
    dicts = datasets.load_lookups(args, desc_embed=desc_embed)

    META_TEST = args.test_model is not None
    model, start_epoch, optimizer = tools.pick_model(args, dicts, META_TEST)
    print(model)

    params = tools.make_param_dict(args)

    return args, model, optimizer, params, dicts, start_epoch
コード例 #4
0
def init(args):
    """
        Load data, build model, create optimizer, create vars to hold metrics, etc.
    """
    #need to handle really large text fields
    csv.field_size_limit(
        sys.maxsize)  # Sets field size to max available for strings

    # LOAD VOCAB DICTS
    dicts = datasets.load_vocab_dict(
        args.vocab_path)  # CHANGED args.vocab --> args.vocab_path

    model = tools.pick_model(args, dicts)
    print(model)

    optimizer = optim.Adam(model.params_to_optimize(),
                           weight_decay=args.weight_decay,
                           lr=args.lr)

    params = tools.make_param_dict(args)

    return args, model, optimizer, params, dicts
コード例 #5
0
def init(args):
    """
        Load data, build model, create optimizer, create vars to hold metrics, etc.
    """
    # need to handle really large text fields
    csv.field_size_limit(sys.maxsize)

    # load vocab and other lookups
    desc_embed = args.lmbda > 0
    print("loading lookups...")
    dicts = datasets.load_lookups(args, desc_embed=desc_embed)

    model = transformer.TransformerAttn(args.Y, args.embed_file, dicts,
                                        args.lmbda, args.gpu, args.embed_size,
                                        args.num_layers, args.heads, args.d_ff,
                                        args.dropout,
                                        args.max_relative_positions)
    if args.gpu:
        model.cuda()
    print(model)

    if not args.test_model:
        optimizer = optim.Adam(model.parameters(),
                               weight_decay=args.weight_decay,
                               lr=args.lr)
        optimizer = NoamOpt(
            100, 2, 4000,
            torch.optim.Adam(model.parameters(),
                             lr=0,
                             betas=(0.9, 0.98),
                             eps=1e-9))
    else:
        optimizer = None

    params = tools.make_param_dict(args)

    return args, model, optimizer, params, dicts