Exemplo n.º 1
0

###############################################################
# AutoTorch HPO
# -------------
#
# In this section, we cover how to define a searchable network architecture, convert the training function to be searchable, create the scheduler, and then launch the experiment.
# - Define a Searchable Network Achitecture
#
# Let's define a 'dynamic' network with searchable configurations by simply adding a decorator :func:`autotorch.obj`. In this example, we only search two arguments `hidden_conv` and `hidden_fc`, which represent the hidden channels in convolutional layer and fully connected layer.  More info about searchable space is available at :meth:`autotorch.space`.

import autotorch as at


@at.obj(
    hidden_conv=at.Int(6, 12),
    hidden_fc=at.Choice(80, 120, 160),
)
class Net(nn.Module):
    def __init__(self, hidden_conv, hidden_fc):
        super().__init__()
        self.conv1 = nn.Conv2d(1, hidden_conv, 5)
        self.pool = nn.MaxPool2d(2, 2)
        self.conv2 = nn.Conv2d(hidden_conv, 16, 5)
        self.fc1 = nn.Linear(16 * 4 * 4, hidden_fc)
        self.fc2 = nn.Linear(hidden_fc, 84)
        self.fc3 = nn.Linear(84, 10)

    def forward(self, x):
        x = self.pool(F.relu(self.conv1(x)))
        x = self.pool(F.relu(self.conv2(x)))
ax.plot_surface(X, Y, Z, cmap='plasma')
ax.set_zlim(0, np.max(Z) + 2)
plt.show()

###############################################################
# Create Training Function
# ~~~~~~~~~~~~~~~~~~~~~~~~
#
# We can simply define an AutoTorch searchable function with a decorator `at.gargs`.
# The `reporter` is used to communicate with AutoTorch search and scheduling algorithms.

import autotorch as at


@at.args(
    x=at.Int(0, 99),
    y=at.Int(0, 99),
)
def toy_simulation(args, reporter):
    x, y = args.x, args.y
    reporter(accuracy=Z[y][x])


###############################################################
# Random Search
# ~~~~~~~~~~~~~
#

random_scheduler = at.scheduler.FIFOScheduler(toy_simulation,
                                              resource={
                                                  'num_cpus': 1,
import torch
import autotorch as at
from mmcv import Config

from mmcls.apis import multi_gpu_test, single_gpu_test
from mmcls.datasets import build_dataloader, build_dataset
from mmcls.models import build_classifier

try:
    from mmcv.cnn import get_model_complexity_info
except ImportError:
    raise ImportError('Please upgrade mmcv to >0.6.2')


@at.obj(
    layer0_channels=at.Int(8,64),
    layer1_channels=at.Int(8,64),
    last_stride=at.Int(1,2),
    stage_blocks=at.List(
        at.Int(1,6),
        at.Int(1,6),
        at.Int(1,6),
        at.Int(1,6),
        ),
    stage_expands=at.List(
        at.Int(2,6),
        at.Int(2,6),
        at.Int(2,6),
        at.Int(2,6),
        ),
    stage_planes_ratio=at.List(
Exemplo n.º 4
0
            self.add_module("relu", nn.ReLU())


class GlobalAvgPool2d(nn.Module):
    def __init__(self):
        """Global average pooling over the input's spatial dimensions"""
        super(GlobalAvgPool2d, self).__init__()

    def forward(self, inputs):
        return nn.functional.adaptive_avg_pool2d(inputs,
                                                 1).view(inputs.size(0), -1)


@at.obj(
    bottleneck_ratio=1,  #at.Int(1, 2),
    initial_width=at.Int(16, 320),
    slope=at.Real(24, 128, log=True),
    quantized_param=at.Real(2.0, 3.2),
    network_depth=at.Int(12, 28),
    group_width=at.Int(8, 240),
)
class GenConfg(BaseGen):
    def dump_config(self, config_file=None):
        config = configparser.ConfigParser()
        config['DEFAULT'] = {'bottleneck_ratio': '1'}
        config['net'] = {}
        self.group_width = self.group_width if self.group_width <= self.initial_width \
            else self.initial_width
        self.group_width = int(self.group_width // 8 * 8)
        #self.initial_width = int(self.initial_width // self.group_width * self.group_width)
        for k, v in self.items():
Exemplo n.º 5
0
    name=at.Choice('auto', 'torch'), )
class myobj:
    def __init__(self, name):
        self.name = name


@at.func(
    framework=at.Choice('mxnet', 'pytorch'), )
def myfunc(framework):
    return framework


@at.args(
    a=at.Real(1e-3, 1e-2, log=True),
    b=at.Real(1e-3, 1e-2),
    c=at.Int(1, 10),
    d=at.Choice('a', 'b', 'c', 'd'),
    e=at.Bool(),
    f=at.List(
        at.Int(1, 2),
        at.Choice(4, 5),
    ),
    g=at.Dict(
        a=at.Real(0, 10),
        obj=myobj(),
    ),
    h=at.Choice('test', myobj()),
    i=myfunc(),
)
def train_fn(args, reporter):
    a, b, c, d, e, f, g, h, i = args.a, args.b, args.c, args.d, args.e, \
Exemplo n.º 6
0
import torch
import autotorch as at
from mmcv import Config

from mmdet.models import build_detector

try:
    from mmcv.cnn import get_model_complexity_info
except ImportError:
    raise ImportError('Please upgrade mmcv to >0.6.2')


@at.obj(
    block=at.Choice('BasicBlock', 'Bottleneck'),
    base_channels=at.Int(8, 64),
    stage_blocks=at.List(
        at.Int(1, 10),
        at.Int(1, 10),
        at.Int(1, 10),
        at.Int(1, 10),
    ),
    stage_planes_ratio=at.List(
        at.Real(1.0, 4.0),
        at.Real(1.0, 4.0),
        at.Real(1.0, 4.0),
    ),
)
class GenConfigBackbone:
    def __init__(self, **kwargs):
        d = {}
Exemplo n.º 7
0
(Choice, List, Dict). Each search space describes the set of possible values for a hyperparameter, from which the searcher will try particular values during hyperparameter optimization. AutoTorch also enables search spaces in user-defined objects using the decorator
`at.obj` and user-defined functions using the decorator `at.func`.
"""

import autotorch as at

###############################################################
# Search Space
# ------------
# Simple Search Space
# ~~~~~~~~~~~~~~~~~~~
# - Integer Space :class:`autotorch.space.Int`
#
# An integer is chosen between lower and upper value during the searcher sampling.

a = at.Int(lower=0, upper=10)
print(a)

###############################################################
# Get default value:
print(a.default)

###############################################################
# Change default value, which is the first configuration that a random searcher
# :class:`autotorch.searcher.RandomSearcher` will try:

a = at.Int(lower=0, upper=10, default=2)
print(a.default)

###############################################################
# Pick a random value.