Пример #1
0
    def _encode_block_string(self, block):
        """Encodes a block to a string."""
        def _encode_multiple_ints(arr):
            if not is_iterable(arr):
                arr = [arr]
            for k in arr:
                assert int(
                    k
                ) == k, "I haven't used parsed expand ratio with float values"
            return ','.join([str(int(k)) for k in arr])

        from graph.stage import _get_conv_cls
        conv_type = block.conv_type
        if not isinstance(conv_type, int):
            conv_type = _get_conv_cls(conv_type, get_cls_num=True)

        args = [
            'r%d' % block.num_repeat,
            'k%s' % _encode_multiple_ints(block.kernel_size),
            's%d%d' % (block.strides[0], block.strides[1]),
            'e%s' % _encode_multiple_ints(block.expand_ratio),
            'i%d' % block.input_filters,
            'o%d' % block.output_filters,
            'c%d' % conv_type
        ]
        se_ratio = block.se_ratio
        if se_ratio is not None and (se_ratio > 0):
            args.append('se%s' % se_ratio)
        if block.id_skip is False:
            args.append('noskip')
        if hasattr(block, "act_fn"):
            if block.act_fn is not None:
                args.append(block.act_fn)
        return '_'.join(args)
Пример #2
0
    def _get_block_name_and_str(self, block_args):
        decoder = BlockArgsDecoder
        if isinstance(block_args, str):
            block_args = decoder._decode_blocks_string(block_args)
        block_name = _get_conv_cls(block_args.conv_type).__name__
        block_args_str = decoder._encode_block_string(block_args)

        return block_name, block_args_str
Пример #3
0
def __build_block(block_args):
    """
    Makes block with only using block_args. Ignores GlobalParams.
    Shouldn't use this in general case. We only use this for parse purpose
    """
    conv_cls = _get_conv_cls(block_args.conv_type)
    is_supergraph_training_tensor = tf.cast(0, tf.float32)
    block = conv_cls(
        block_args,
        GlobalParams(
            is_supergraph_training_tensor=is_supergraph_training_tensor))
    return block
Пример #4
0
def parse_stages_args(tb_path, base_model_args):
    tf_size_guidance = {
        'compressedHistograms': 10,
        'images': 0,
        'scalars': 100,
        'histograms': 1
    }
    event_acc = EventAccumulator(tb_path, tf_size_guidance)
    event_acc.Reload()

    decoder = BlockArgsDecoder()
    for stage_args in base_model_args.stages_args:
        stage_args['blocks_args'] = decoder.span_blocks_args(
            stage_args['blocks_args'])

    res_stage_args = []
    for stage_idx, stage_args in enumerate(base_model_args.stages_args):
        blocks_args = stage_args['blocks_args']
        res_blocks_args = []

        for block_idx, block_args in enumerate(blocks_args):
            conv_class = _get_conv_cls(block_args.conv_type)
            search_block_name_prefix = ['Searchable']
            if not any(
                (s in conv_class.__name__) for s in search_block_name_prefix):
                args = block_args

            # If skip_op, the block_args will be changed to 'None'
            elif conv_class in [SearchableMixConvBlock]:
                args = __get_searchable_mixconvblock_args(
                    block_args, stage_idx, block_idx, event_acc)
            elif conv_class in [
                    SearchableMBConvBlock, SearchableConvBlock_kxk1x1
            ]:
                args = __get_searchable_block_args(block_args, stage_idx,
                                                   block_idx, event_acc)
            else:
                raise NotImplementedError

            is_skipop = args is None
            if not is_skipop:
                res_blocks_args.append(args)

        if len(res_blocks_args) > 0:
            stage_args['blocks_args'] = res_blocks_args
            res_stage_args.append(stage_args)

    return res_stage_args
Пример #5
0
    if args.set_conv_type:
        assert args.mode == 'stage_i'

    if args.mode == 'stage_i':
        assert args.set_stage_i is not None

        stage_is = [int(i) for i in args.set_stage_i.split(',')]
        if args.set_block_i is not None:
            block_is = [int(i) for i in args.set_block_i.split(',')]

        for stage_i, stage_args in enumerate(model_args.stages_args):
            if stage_i in stage_is:
                for block_i, block_args in enumerate(stage_args.blocks_args):
                    if args.set_conv_type is not None:
                        if args.set_conv_type != _get_conv_cls(block_args.conv_type).__name__:
                            continue
                    if args.set_block_i is not None:
                        if block_i not in block_is:
                            continue

                    if args.se_ratio is not None:
                        set_and_print(model_args, stage_i, block_i, args, 'se_ratio')
                    if args.act_fn is not None:
                        set_and_print(model_args, stage_i, block_i, args, 'act_fn')

    elif args.mode == 'select_by_rank':
        ranks = load_json_as_attrdict(args.rank_json)
        for i, rank_stagei_blocki in enumerate(ranks):
            if i >= args.set_blocks_num:
                break
Пример #6
0
def __build_block(block_args, global_params, input_shape):
    conv_cls = _get_conv_cls(block_args.conv_type)
    block = conv_cls(block_args, global_params)
    features = tf.random.normal(shape=(1, ) + input_shape)
    block(features)  # build block
    return block