Example #1
0
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""vgg_train_export."""

import sys
import numpy as np
from train_utils import save_inout, train_wrap
from official.cv.vgg16.src.vgg import vgg16
import mindspore.common.dtype as mstype
from mindspore import context, Tensor, nn
from mindspore.train.serialization import export

context.set_context(mode=context.PYNATIVE_MODE,
                    device_target="GPU",
                    save_graphs=False)

batch = 2

n = vgg16(num_classes=10)
loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=False)
optimizer = nn.Momentum(n.trainable_params(), 0.01, 0.9, use_nesterov=False)
net = train_wrap(n, loss_fn, optimizer)

x = Tensor(np.random.randn(batch, 3, 224, 224), mstype.float32)
label = Tensor(np.zeros([batch, 10]).astype(np.float32))
export(net, x, label, file_name="mindir/vgg_train", file_format='MINDIR')

if len(sys.argv) > 1:
    save_inout(sys.argv[1] + "vgg", x, label, n, net)
Example #2
0
HEAD.weight.set_data(
    Tensor(np.random.normal(0, 0.1, HEAD.weight.data.shape).astype("float32")))
HEAD.bias.set_data(Tensor(np.zeros(HEAD.bias.data.shape, dtype="float32")))

n = TransferNet(BACKBONE, HEAD)
trainable_weights_list = []
trainable_weights_list.extend(n.head.trainable_params())
trainable_weights = ParameterTuple(trainable_weights_list)
sgd = nn.SGD(trainable_weights,
             learning_rate=0.01,
             momentum=0.9,
             dampening=0.01,
             weight_decay=0.0,
             nesterov=False,
             loss_scale=1.0)
net = train_wrap(n, optimizer=sgd, weights=trainable_weights)

BATCH_SIZE = 8
X = Tensor(np.random.randn(BATCH_SIZE, 3, 224, 224), mstype.float32)
label = Tensor(np.zeros([BATCH_SIZE, 10]).astype(np.float32))
export(net,
       X,
       label,
       file_name="mindir/effnet_tune_train",
       file_format='MINDIR')

if len(sys.argv) > 1:
    name_prefix = sys.argv[1] + "effnet_tune"
    x_name = name_prefix + "_input1.bin"
    save_t(Tensor(X.asnumpy().transpose(0, 2, 3, 1)), x_name)
Example #3
0
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""lenet_export."""

import numpy as np
from mindspore import context, Tensor
import mindspore.common.dtype as mstype
from mindspore.train.serialization import export
from lenet import LeNet5
from train_utils import train_wrap

n = LeNet5()
n.set_train()
context.set_context(mode=context.PYNATIVE_MODE,
                    device_target="CPU",
                    save_graphs=False)

BATCH_SIZE = 32
x = Tensor(np.ones((BATCH_SIZE, 1, 32, 32)), mstype.float32)
label = Tensor(np.zeros([BATCH_SIZE]).astype(np.int32))
net = train_wrap(n)
export(net, x, label, file_name="lenet_tod", file_format='MINDIR')

print("finished exporting")
Example #4
0
        self.head = head

    def construct(self, x):
        x = self.backbone(x)
        x = self.head(x)
        return x


BACKBONE = effnet(num_classes=1000)
load_checkpoint("efficient_net_b0.ckpt", BACKBONE)

M.context.set_context(mode=M.context.PYNATIVE_MODE,
                      device_target="GPU", save_graphs=False)
BATCH_SIZE = 16
X = M.Tensor(np.ones((BATCH_SIZE, 3, 224, 224)), M.float32)
export(BACKBONE, X, file_name="transfer_learning_tod_backbone", file_format='MINDIR')

label = M.Tensor(np.zeros([BATCH_SIZE, 10]).astype(np.float32))
HEAD = M.nn.Dense(1000, 10)
HEAD.weight.set_data(M.Tensor(np.random.normal(
    0, 0.1, HEAD.weight.data.shape).astype("float32")))
HEAD.bias.set_data(M.Tensor(np.zeros(HEAD.bias.data.shape, dtype="float32")))

sgd = M.nn.SGD(HEAD.trainable_params(), learning_rate=0.015, momentum=0.9,
               dampening=0.01, weight_decay=0.0, nesterov=False, loss_scale=1.0)
net = train_wrap(HEAD, optimizer=sgd)
backbone_out = M.Tensor(np.zeros([BATCH_SIZE, 1000]).astype(np.float32))
export(net, backbone_out, label, file_name="transfer_learning_tod_head", file_format='MINDIR')

print("Exported")