def getModel(config_file):
    with open(config_file,'r') as f:
        config = f.readlines()
    weights = torchfile.load(config[-2].strip())
    biases = torchfile.load(config[-1].strip())

    model = Model()
    il = 0
    for desc in config[1:-2]:
        desc = desc.split()
        if desc[0] == 'linear':
            in_features, out_features = int(desc[1]), int(desc[2])
            layer = Linear(in_features, out_features)
            layer.W = torch.Tensor(weights[il])
            layer.B = torch.Tensor(biases[il]).view(out_features, 1)
            il += 1
        elif desc[0] == 'relu':
            layer = ReLU()
        elif desc[0] == 'dropout':
            layer = Dropout(float(desc[1]), isTrain=False)
        else:
            print(desc[0] + ' layer not implemented!')
        model.addLayer(layer)

    return model
Пример #2
0
def getModel(
    config_file
):  # No need to specify last_layer as we don't need to do backward pass
    with open(config_file, 'r') as f:
        config = f.readlines()
    Whh = torch.load(config[-5].strip())
    Wxh = torch.load(config[-4].strip())
    Why = torch.load(config[-3].strip())
    Bhh = torch.load(config[-2].strip())
    Bhy = torch.load(config[-1].strip())

    model = Model()
    il = 0
    for desc in config[1:-5]:
        desc = desc.split()
        if desc[0] == 'rnn':
            in_features, hidden_features, out_features = int(desc[1]), int(
                desc[2]), int(desc[3])
            layer = RNN(in_features, hidden_features, out_features)
            layer.Whh = torch.Tensor(Whh[il])
            layer.Wxh = torch.Tensor(Wxh[il])
            layer.Why = torch.Tensor(Why[il])
            layer.Bhh = torch.Tensor(Bhh[il]).view(hidden_features, 1)
            layer.Bhy = torch.Tensor(Bhy[il]).view(out_features, 1)
            il += 1
        else:
            print(desc[0] + ' layer not implemented!')
        model.addLayer(layer)

    return model
Пример #3
0
def create_model(config_file):

    model = Model()

    with open(config_file, 'r') as f:
        num_layers = int(f.readline().strip())
        for i in range(num_layers):
            layer_info = f.readline().strip().split(' ')
            layer_type = layer_info[0]

            if layer_type == LINEAR:
                num_inputs = int(layer_info[1])
                num_outputs = int(layer_info[2])
                model.addLayer(Linear(num_inputs, num_outputs))
            elif layer_type == RELU:
                model.addLayer(ReLU())

        weight_file = f.readline().strip()
        bias_file = f.readline().strip()

        weights = load_file(weight_file)
        biases = load_file(bias_file)

    linear_index = 0
    for layer in model.Layers:
        if isinstance(layer, Linear):
            layer.W = weights[linear_index]
            layer.B = biases[linear_index]
            linear_index += 1

    return model
def createModel(spec_file):
    with open(spec_file, 'r') as f:
        spec = f.readlines()
    lenspec = len(spec)
    model = Model()
    num_layers = 0
    for desc_ind in range(lenspec):
        desc = spec[desc_ind]
        desc = desc.split()
        if desc[0] == 'rnn':
            in_features, hidden_features, out_features = int(desc[1]), int(
                desc[2]), int(desc[3])
            layer = RNN(in_features, hidden_features, out_features)
            num_layers += 1
        else:
            print(desc[0] + ' layer not implemented!')
        model.addLayer(layer)
    return model, (spec, num_layers)
def createModel(spec_file):
    with open(spec_file,'r') as f:
        spec = f.readlines()
    model = Model()
    num_linear_layers = 0
    for desc in spec:
        desc = desc.split()
        if desc[0] == 'linear':
            in_features, out_features = int(desc[1]), int(desc[2])
            layer = Linear(in_features, out_features)
            num_linear_layers += 1
        elif desc[0] == 'relu':
            layer = ReLU()
        elif desc[0] == 'dropout':
            layer = Dropout(float(desc[1]), isTrain=True)
        else:
            print(desc[0] + ' layer not implemented!')
        model.addLayer(layer)
    return model, (spec, num_linear_layers)