from train_utils import SaveInOut, TrainWrap
from official.cv.mobilenetv2.src.mobilenetV2 import MobileNetV2Backbone, MobileNetV2Head, mobilenet_v2
import mindspore.common.dtype as mstype
from mindspore import context, Tensor, nn
from mindspore.train.serialization import export

context.set_context(mode=context.PYNATIVE_MODE,
                    device_target="GPU",
                    save_graphs=False)
batch = 16

backbone_net = MobileNetV2Backbone()
head_net = MobileNetV2Head(input_channel=backbone_net.out_channels,
                           num_classes=10)
n = mobilenet_v2(backbone_net, head_net)

loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=False)
optimizer = nn.Momentum(n.trainable_params(), 0.01, 0.9, use_nesterov=False)
net = TrainWrap(n, loss_fn, optimizer)

x = Tensor(np.random.randn(batch, 3, 224, 224), mstype.float32)
label = Tensor(np.zeros([batch, 10]).astype(np.float32))
export(net,
       x,
       label,
       file_name="mindir/mobilenetv2_train",
       file_format='MINDIR')

if len(sys.argv) > 1:
    SaveInOut(sys.argv[1] + "mobilenetv2", x, label, n, net, sparse=False)
import sys
import numpy as np
from train_utils import SaveInOut, TrainWrap
from effnet import effnet
import mindspore.common.dtype as mstype
from mindspore import context, Tensor, nn
from mindspore.train.serialization import export

context.set_context(mode=context.PYNATIVE_MODE,
                    device_target="GPU",
                    save_graphs=False)

n = effnet(num_classes=10)
loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=False)
optimizer = nn.SGD(n.trainable_params(),
                   learning_rate=0.01,
                   momentum=0.9,
                   dampening=0.0,
                   weight_decay=0.0,
                   nesterov=True,
                   loss_scale=1.0)
net = TrainWrap(n, loss_fn, optimizer)

x = Tensor(np.random.randn(2, 3, 224, 224), mstype.float32)
label = Tensor(np.zeros([2, 10]).astype(np.float32))
export(net, x, label, file_name="mindir/effnet_train", file_format='MINDIR')

if len(sys.argv) > 1:
    SaveInOut(sys.argv[1] + "effnet", x, label, n, net)
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""vgg_train_export."""

import sys
import numpy as np
from train_utils import SaveInOut, TrainWrap
from official.cv.vgg16.src.vgg import vgg16
import mindspore.common.dtype as mstype
from mindspore import context, Tensor, nn
from mindspore.train.serialization import export

context.set_context(mode=context.PYNATIVE_MODE,
                    device_target="GPU",
                    save_graphs=False)

batch = 2

n = vgg16(num_classes=10)
loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=False)
optimizer = nn.Momentum(n.trainable_params(), 0.01, 0.9, use_nesterov=False)
net = TrainWrap(n, loss_fn, optimizer)

x = Tensor(np.random.randn(batch, 3, 224, 224), mstype.float32)
label = Tensor(np.zeros([batch, 10]).astype(np.float32))
export(net, x, label, file_name="mindir/vgg_train", file_format='MINDIR')

if len(sys.argv) > 1:
    SaveInOut(sys.argv[1] + "vgg", x, label, n, net)
import sys
import numpy as np
from train_utils import SaveInOut, TrainWrap
from NetworkInNetwork import NiN
import mindspore.common.dtype as mstype
from mindspore import context, Tensor, nn
from mindspore.train.serialization import export

context.set_context(mode=context.PYNATIVE_MODE,
                    device_target="GPU",
                    save_graphs=False)

n = NiN(num_classes=10)
loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=False)
optimizer = nn.SGD(n.trainable_params(),
                   learning_rate=0.01,
                   momentum=0.9,
                   dampening=0.0,
                   weight_decay=5e-4,
                   nesterov=True,
                   loss_scale=0.9)
net = TrainWrap(n, loss_fn, optimizer)

batch = 2
x = Tensor(np.random.randn(batch, 3, 32, 32), mstype.float32)
label = Tensor(np.zeros([batch, 10]).astype(np.float32))
export(net, x, label, file_name="mindir/nin_train", file_format='MINDIR')

if len(sys.argv) > 1:
    SaveInOut(sys.argv[1] + "nin", x, label, n, net)
from mindspore import context, Tensor, nn
from mindspore.train.serialization import export

context.set_context(mode=context.PYNATIVE_MODE,
                    device_target="GPU",
                    save_graphs=False)

n = MobileNetV1(10)
loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=False)
optimizer = nn.SGD(n.trainable_params(),
                   learning_rate=1e-2,
                   momentum=0.9,
                   dampening=0.1,
                   weight_decay=0.0,
                   nesterov=False,
                   loss_scale=1.0)
net = TrainWrap(n, loss_fn, optimizer)

batch = 2
x = Tensor(np.random.randn(batch, 3, 224, 224), mstype.float32)
label = Tensor(np.zeros([batch, 10]).astype(np.float32))

export(net,
       x,
       label,
       file_name="mindir/mobilenetv1_train",
       file_format='MINDIR')

if len(sys.argv) > 1:
    SaveInOut(sys.argv[1] + "mobilenetv1", x, label, n, net)
# limitations under the License.
# ============================================================================
"""inceptionv4_train_export"""

import sys
import numpy as np
from train_utils import SaveInOut, TrainWrap
from official.cv.xception.src.Xception import Xception
import mindspore.common.dtype as mstype
from mindspore import context, Tensor, nn
from mindspore.train.serialization import export

context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU", save_graphs=False)


n = Xception(num_classes=1000)
n.dropout = nn.Dropout(keep_prob=1.0)

loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=False)
optimizer = nn.SGD(n.trainable_params(), learning_rate=0.01, momentum=0.9, dampening=0.0, weight_decay=0.0,
                   nesterov=True, loss_scale=1.0)
net = TrainWrap(n, loss_fn, optimizer)

batch = 2
x = Tensor(np.random.randn(batch, 3, 299, 299), mstype.float32)
label = Tensor(np.zeros([batch, 1000]).astype(np.float32))
export(net, x, label, file_name="mindir/xception_train", file_format='MINDIR')

if len(sys.argv) > 1:
    SaveInOut(sys.argv[1] + "xception", x, label, n, net)
import numpy as np
from train_utils import SaveInOut, TrainWrap
from official.cv.lenet.src.lenet import LeNet5
import mindspore.common.dtype as mstype
from mindspore import context, Tensor, nn
from mindspore.train.serialization import export

context.set_context(mode=context.PYNATIVE_MODE,
                    device_target="GPU",
                    save_graphs=False)

n = LeNet5()
loss_fn = nn.MSELoss()
optimizer = nn.Adam(n.trainable_params(),
                    learning_rate=1e-2,
                    beta1=0.5,
                    beta2=0.7,
                    eps=1e-2,
                    use_locking=True,
                    use_nesterov=False,
                    weight_decay=0.0,
                    loss_scale=0.3)
net = TrainWrap(n, loss_fn, optimizer)

x = Tensor(np.random.randn(32, 1, 32, 32), mstype.float32)
label = Tensor(np.zeros([32, 10]).astype(np.float32))
export(net, x, label, file_name="mindir/lenet_train", file_format='MINDIR')

if len(sys.argv) > 1:
    SaveInOut(sys.argv[1] + "lenet", x, label, n, net, sparse=False)
                    device_target="GPU",
                    save_graphs=False)

# Mini alexnet is designed for MNIST data
batch = 2
number_of_classes = 10
n = AlexNet(phase='test')

loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=False)
optimizer = nn.Adam(n.trainable_params(),
                    learning_rate=1e-3,
                    beta1=0.9,
                    beta2=0.999,
                    eps=1e-8,
                    use_locking=False,
                    use_nesterov=False,
                    weight_decay=0.0,
                    loss_scale=1.0)
net = TrainWrap(n, loss_fn, optimizer)

x = Tensor(np.ones([batch, 1, 32, 32]).astype(np.float32) * 0.01)
label = Tensor(np.zeros([batch, number_of_classes]).astype(np.float32))
export(net,
       x,
       label,
       file_name="mindir/mini_alexnet_train",
       file_format='MINDIR')

if len(sys.argv) > 1:
    SaveInOut(sys.argv[1] + "mini_alexnet", x, label, n, net, sparse=False)
Beispiel #9
0
import sys
import os
import numpy as np
from train_utils import SaveInOut, TrainWrap
import mindspore.common.dtype as mstype
from mindspore import context, Tensor, nn
from mindspore.train.serialization import export

sys.path.append(os.environ['CLOUD_MODEL_ZOO'] + 'official/cv/densenet121/')
#pylint: disable=wrong-import-position
from official.cv.densenet121.src.network.densenet import DenseNet121




context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU", save_graphs=False)

n = DenseNet121(num_classes=10)
loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=False)
optimizer = nn.SGD(n.trainable_params(), learning_rate=0.001, momentum=0.9, dampening=0.0, weight_decay=0.0,
                   nesterov=True, loss_scale=0.9)
net = TrainWrap(n, loss_fn, optimizer)

batch = 2
x = Tensor(np.random.randn(batch, 3, 224, 224), mstype.float32)
label = Tensor(np.zeros([batch, 10]).astype(np.float32))
export(net, x, label, file_name="mindir/densenet_train", file_format='MINDIR')

if len(sys.argv) > 1:
    SaveInOut(sys.argv[1] + "densenet", x, label, n, net)
Beispiel #10
0
import sys
import numpy as np
from train_utils import SaveInOut, TrainWrap
from official.cv.shufflenetv2.src.shufflenetv2 import ShuffleNetV2
import mindspore.common.dtype as mstype
from mindspore import context, Tensor, nn
from mindspore.train.serialization import export

context.set_context(mode=context.PYNATIVE_MODE,
                    device_target="GPU",
                    save_graphs=False)

n = ShuffleNetV2(n_class=10)
loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=False)
optimizer = nn.Momentum(n.trainable_params(), 0.01, 0.9, use_nesterov=False)

net = TrainWrap(n, loss_fn, optimizer)

batch = 2
x = Tensor(np.random.randn(batch, 3, 224, 224), mstype.float32)
label = Tensor(np.zeros([batch, 10]).astype(np.float32))
export(net,
       x,
       label,
       file_name="mindir/shufflenetv2_train",
       file_format='MINDIR')

if len(sys.argv) > 1:
    SaveInOut(sys.argv[1] + "shufflenetv2", x, label, n, net)
Beispiel #11
0
import sys
import numpy as np
from train_utils import SaveInOut, TrainWrap
from official.cv.googlenet.src.googlenet import GoogleNet
import mindspore.common.dtype as mstype
from mindspore import context, Tensor, nn
from mindspore.train.serialization import export

context.set_context(mode=context.PYNATIVE_MODE,
                    device_target="GPU",
                    save_graphs=False)

n = GoogleNet(num_classes=10)
loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=False)
optimizer = nn.SGD(n.trainable_params(),
                   learning_rate=0.01,
                   momentum=0.9,
                   dampening=0.0,
                   weight_decay=5e-4,
                   nesterov=True,
                   loss_scale=0.9)
net = TrainWrap(n, loss_fn, optimizer)

batch = 2
x = Tensor(np.random.randn(batch, 3, 224, 224), mstype.float32)
label = Tensor(np.zeros([batch, 10]).astype(np.float32))
export(net, x, label, file_name="mindir/googlenet_train", file_format='MINDIR')

if len(sys.argv) > 1:
    SaveInOut(sys.argv[1] + "googlenet", x, label, n, net)
Beispiel #12
0
import sys
import numpy as np
from train_utils import SaveInOut, TrainWrap
from official.cv.resnet.src.resnet import resnet50
import mindspore.common.dtype as mstype
from mindspore import context, Tensor, nn
from mindspore.train.serialization import export

context.set_context(mode=context.PYNATIVE_MODE,
                    device_target="GPU",
                    save_graphs=False)
batch = 4

n = resnet50(class_num=10)
loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=False)
optimizer = nn.SGD(n.trainable_params(),
                   learning_rate=0.01,
                   momentum=0.9,
                   dampening=0.0,
                   weight_decay=0.0,
                   nesterov=True,
                   loss_scale=1.0)
net = TrainWrap(n, loss_fn, optimizer)

x = Tensor(np.random.randn(batch, 3, 224, 224), mstype.float32)
label = Tensor(np.zeros([batch, 10]).astype(np.float32))
export(net, x, label, file_name="mindir/resnet_train", file_format='MINDIR')

if len(sys.argv) > 1:
    SaveInOut(sys.argv[1] + "resnet", x, label, n, net)