Exemplo n.º 1
0
def ReLU_OPT_Create_Prototxt(original_prototxt_path, original_model_path,
                             optimized_prototxt_path):
    net_param = caffe_pb2.NetParameter()
    new_net_param = caffe_pb2.NetParameter()
    with open(original_prototxt_path, 'rt') as f:
        Parse(f.read(), net_param)
    for layer_idx in range(0, len(net_param.layer)):
        layer = net_param.layer[layer_idx]
        pre_layer = net_param.layer[layer_idx - 1]
        if layer.type == 'ReLU' and \
            (pre_layer.type == 'Convolution' or pre_layer.type == 'ConvolutionDepthwise' or pre_layer.type == 'DepthwiseConvolution' or \
                pre_layer.type == 'InnerProduct' or pre_layer.type == 'Eltwise'):
            if pre_layer.type == 'Convolution' or pre_layer.type == 'ConvolutionDepthwise' or pre_layer.type == 'DepthwiseConvolution':
                new_net_param.layer[-1].type = 'ConvolutionReLU'
            elif pre_layer.type == 'Eltwise':
                new_net_param.layer[-1].type = 'EltwiseReLU'
            else:
                new_net_param.layer[-1].type = 'InnerProductReLU'

            if layer.top[0] == layer.bottom[0]:
                continue
            else:
                new_net_param.layer[-1].top[0] = layer.top[0]
        else:
            new_net_param.layer.extend([layer])
    new_net_param.name = net_param.name
    with open(optimized_prototxt_path, 'wt') as f:
        f.write(MessageToString(new_net_param))
    print "ReLU OPT : Create Optimized Prototxt Done."
    print bcolors.OKGREEN + "ReLU OPT : Model at " + original_model_path + "." + bcolors.ENDC
    print bcolors.OKGREEN + "ReLU OPT : Prototxt at " + optimized_prototxt_path + "." + bcolors.ENDC
Exemplo n.º 2
0
    def NetInit(self, prototxt, phase='TRAIN'):
        self._net = pb.NetParameter()
        Parse(open(prototxt,'r').read(), self._net)
        self._phase = phase
        self._layers = []
        if not hasattr(self, '_blobs'): self._blobs = {}
        self._params = {}; self._swap_blobs = {}
        self._inputs_to_tensors = {}
        self._costs = []; self._wrts = []
        self._lr_mults = []; self._decay_mults = []

        if len(self._net.input) > 0:
            for input in self._net.input:
                if not input in self._blobs:
                    # create new tensors
                    self._blobs[input] = {'data':Tensor(input).Variable(),
                                          'diff': Tensor(input + '_grad')}
                self._inputs_to_tensors[input] =  self._blobs[input]['data']

        for layer in self._net.layer:
            if not self.FilterNet(layer): continue
            self._layers.append(getattr(layers, layer.type + 'Layer')(layer))

        self.Setup()

        for layer in self._net.layer:
            if not self.FilterNet(layer): continue
            self.CheckBackward(layer)
Exemplo n.º 3
0
def _replace_graph_node_names(graph, mapping):
    # regex, match all mapped name
    all_nodes_regex = re.compile(
        _node_name_regex_tpl.format('|'.join(mapping.keys())))

    # old graph text
    graph_text = MessageToString(graph)

    # replace all node name
    obfuscated_graph_text = io.StringIO()
    last_match_end = 0
    while True:
        match = all_nodes_regex.search(graph_text, last_match_end)
        if match is None:
            break

        # prefix
        match_beg, match_end = match.span('name')
        obfuscated_graph_text.write(graph_text[last_match_end:match_beg])
        last_match_end = match_end

        # node name
        node_name = graph_text[match_beg:match_end]
        obfuscated_graph_text.write(mapping.get(node_name, node_name))

    obfuscated_graph_text.write(graph_text[last_match_end:])

    obfuscated_graph = GraphDef()
    Parse(obfuscated_graph_text.getvalue(), obfuscated_graph)
    obfuscated_graph_text.close()
    return obfuscated_graph
Exemplo n.º 4
0
    def __init__(self, prototxt):
        """Construct a Solver.

        Parameters
        ----------
        prototxt : str
            The path of ``.prototxt`` file.

        Returns
        -------
        Solver
            The solver.

        Examples
        --------
        >>> solver = Solver('solver.prototxt')

        """
        self._param = pb.SolverParameter()
        Parse(open(prototxt, 'r').read(), self._param)
        self.ParseUpdateParam()
        self._net = None
        self._test_nets = []
        self._layer_blobs = []
        self._iter = self._current_step = 0
        self._optimizer = None
        self.scalar_writer = sw.ScalarSummary() if root_solver() else None

        self.InitTrainNet()
        self.InitTestNets()
        self.BuildNets()
def AFFine_OPT_Create_Prototxt(original_prototxt_path, optimized_prototxt_path):
    net_param = caffe_pb2.NetParameter()
    new_net_param = caffe_pb2.NetParameter()
    with open(original_prototxt_path, 'rt') as f:
        Parse(f.read(), net_param)
    layer_num = len(net_param.layer)

    parameter_layer_type = ['Convolution', 'InnerProduct']
    merge_layer_type = ['Scale', 'BatchNorm']

    for layer_idx in range(0, layer_num):
        layer = net_param.layer[layer_idx]
        if layer.type not in merge_layer_type:
            new_net_param.layer.extend([layer])
        else:
            if layer.type == 'Scale' and len(layer.bottom) != 1:
                # In case, scale layer has two bottom blob, then scale layer can't be merged into CONV/IP.
                new_net_param.layer.extend([layer])
            else:
                continue
        if layer.type in parameter_layer_type:
            if layer_idx+1 < layer_num:
                if net_param.layer[layer_idx+1].type in merge_layer_type and len(net_param.layer[layer_idx+1].bottom)==1:
                    # In case, scale layer has two bottom blob, then scale layer can't be merged into CONV/IP.
                    if layer.type == 'Convolution':
                        new_net_param.layer[-1].convolution_param.bias_term = True
                    else:
                        new_net_param.layer[-1].inner_product_param.bias_term = True
    new_net_param.name = net_param.name
    with open(optimized_prototxt_path, 'wt') as f:
        f.write(MessageToString(new_net_param))
    print "BN SCALE OPT : Create Optimized Prototxt Done."
    print bcolors.OKGREEN + "BN SCALE OPT : Prototxt at " + optimized_prototxt_path + "." + bcolors.ENDC
Exemplo n.º 6
0
def Inpt_OPT_New_Weight(original_prototxt_path, original_model_path,
                        optimized_prototxt_path, new_model_path, scale):
    net_param = caffe_pb2.NetParameter()
    with open(original_prototxt_path, 'rt') as f:
        Parse(f.read(), net_param)
    layer_num = len(net_param.layer)
    input_layer_type = ['Data', 'Input', 'AnnotatedData']
    for layer_idx in range(0, layer_num):
        layer = net_param.layer[layer_idx]
        if layer.type not in input_layer_type:
            assert (layer.type == 'Convolution' or layer.type == 'InnerProduct'
                    ), "## ERROR : First Layer MUST BE CONV or IP. ##"
            target_layer_name = layer.name
            break
        else:
            try:
                net_param.layer[layer_idx].transform_param.scale = 1.0
            except:
                print bcolors.WARNING + "INPUT PREPROCESS (SCALE) OPT : ** WARNING ** NO SCALE found in DATA layer." + bcolors.ENDC

    new_net = caffe.Net(original_prototxt_path, str(original_model_path),
                        caffe.TEST)
    new_net.params[target_layer_name][0].data[
        ...] = new_net.params[target_layer_name][0].data[...] * scale
    new_net.save(new_model_path)

    with open(optimized_prototxt_path, 'wt') as f:
        f.write(MessageToString(net_param))

    print "INPUT PREPROCESS (SCALE) OPT : Merge Input Scale Done."
    print bcolors.OKGREEN + "INPUT PREPROCESS (SCALE) OPT : Model at " + new_model_path + "." + bcolors.ENDC
    print bcolors.OKGREEN + "INPUT PREPROCESS (SCALE) OPT : Prototxt at " + optimized_prototxt_path + "." + bcolors.ENDC
Exemplo n.º 7
0
def main(unused_args):
  # params
  in_path = FLAGS.input  # type: str
  in_is_text = FLAGS.text_proto  # type: bool
  out_path = FLAGS.output  # type: str
  skip = FLAGS.skip  # type: list
  output_nodes = FLAGS.output_node  # type: list

  # validate param
  if in_path is None or len(in_path) == 0:
    raise RuntimeError("in_path must be provided")

  if out_path is None or len(out_path) == 0:
    raise RuntimeError("output must be provided")

  # read graph
  in_graph = GraphDef()
  if in_is_text:
    with open(in_path, "r") as fp:
      Parse(fp.read(), in_graph)
  else:
    with open(in_path, "rb") as fp:
      in_graph.ParseFromString(fp.read())

  # quantize
  quantized = quantize_graph_def(in_graph, set(skip), output_nodes)

  # write
  with open(out_path, "wb") as fp:
    fp.write(quantized.SerializeToString())
Exemplo n.º 8
0
def Memo_OPT_Inplace_Memory(original_prototxt_path, original_model_path,
                            optimized_prototxt_path):
    inplace_operation_type = [
        'Scale', 'BatchNorm', 'ReLU', 'PReLU', 'Softmax', 'TanH', 'ELU',
        'Dropout'
    ]
    net_param = caffe_pb2.NetParameter()
    with open(original_prototxt_path, 'rt') as f:
        Parse(f.read(), net_param)
    layer_num = len(net_param.layer)
    parameter_blob_name = []
    blob_pair = {}
    for layer_idx in range(0, layer_num):
        layer = net_param.layer[layer_idx]
        if layer.type in inplace_operation_type and len(layer.bottom) == 1:
            if layer.bottom[0] in parameter_blob_name:
                if layer.bottom[0] != layer.top[0]:
                    # inplace opt
                    blob_pair[layer.top[0]] = layer.bottom[0]
                    print "MEMORY In-PLACE OPT : " + layer.name + " : Top Blob [" + layer.top[
                        0] + "] => [" + layer.bottom[0] + "]"
                    net_param.layer[layer_idx].top[0] = layer.bottom[0]
                else:
                    # optimized
                    continue
            else:
                if blob_pair.has_key(layer.bottom[0]):
                    # change bottom blob name
                    blob_pair[layer.top[0]] = blob_pair[layer.bottom[0]]
                    print "MEMORY In-PLACE OPT : " + layer.name + " : Top Blob [" + layer.top[
                        0] + "] => [" + blob_pair[layer.bottom[0]] + "]"
                    print "MEMORY In-PLACE OPT : " + layer.name + " : Bottom Blob [" + layer.bottom[
                        0] + "] => [" + blob_pair[layer.bottom[0]] + "]"
                    net_param.layer[layer_idx].top[0] = blob_pair[
                        layer.bottom[0]]
                    net_param.layer[layer_idx].bottom[0] = blob_pair[
                        layer.bottom[0]]
                else:
                    assert (
                        1 > 2
                    ), "MEMORY In-PLACE OPT : **ERROR** Should Not Reach Here. ##"
        else:
            for i in range(0, len(layer.top)):
                parameter_blob_name.append(layer.top[i])
            for i in range(0, len(layer.bottom)):
                if blob_pair.has_key(layer.bottom[i]):
                    print "MEMORY In-PLACE OPT : " + layer.name + " : Bottom Blob [" + layer.bottom[
                        i] + "] => [" + blob_pair[layer.bottom[i]] + "]"
                    net_param.layer[layer_idx].bottom[i] = blob_pair[
                        layer.bottom[i]]
                else:
                    continue
    with open(optimized_prototxt_path, 'wt') as f:
        f.write(MessageToString(net_param))
    # shutil.copyfile(original_model_path, optimized_model_path)
    print "MEMORY In-PLACE OPT : In-place Memory Optimization Done."
    print bcolors.OKGREEN + "MEMORY In-PLACE OPT : Model at " + original_model_path + "." + bcolors.ENDC
    print bcolors.OKGREEN + "MEMORY In-PLACE OPT : Prototxt at " + optimized_prototxt_path + "." + bcolors.ENDC
Exemplo n.º 9
0
 def __init__(self, prototxt):
     self._param = pb.SolverParameter()
     Parse(open(prototxt, 'r').read(), self._param)
     self._net = None; self._test_nets = []
     self._iter = self._current_step = 0
     self.train = self.tests = self.update = self._updater = None
     self.scalar_writer = sw.ScalarSummary() if root_solver() else None
     self._lr_blobs = []
     self.InitTrainNet()
     self.InitTestNets()
     self.CheckUpdateParam()
Exemplo n.º 10
0
    def NetInit(self, proto_txt, phase='TRAIN'):
        """Construct a Net by the ``proto_txt`` file.

        Parameters
        ----------
        proto_txt : str
            The path of ``proto_txt`` file.
        phase : str
            The phase, ``TRAIN`` or ``TEST``.

        Returns
        -------
        Net
            The net.

        References
        ----------
        The implementation of `Net_Init(_caffe.cpp, L109)`_.

        """
        self._net = pb.NetParameter()
        Parse(open(proto_txt, 'r').read(), self._net)
        self._phase = phase
        self._layers = []
        if not hasattr(self, '_blobs'): self._blobs = {}
        self._params = {}
        self._swap_tensors = {}
        self._inputs_to_tensors = {}
        self._costs = []
        self._wrts = []
        self._lr_mults = []
        self._decay_mults = []

        if len(self._net.input) > 0:
            for input in self._net.input:
                if not input in self._blobs:
                    self._blobs[input] = {
                        'data': Tensor(input).Variable(),
                        'diff': Tensor(input + '_grad')
                    }
                self._inputs_to_tensors[input] = self._blobs[input]['data']

        for layer in self._net.layer:
            if not self.FilterLayer(layer): continue
            self._layers.append(getattr(layers, layer.type + 'Layer')(layer))

        self.Setup()

        for layer in self._net.layer:
            if not self.FilterLayer(layer): continue
            self.CheckBackward(layer)
Exemplo n.º 11
0
def main(unused_args):
    # params
    in_path = FLAGS.input  # type: str
    in_is_text = FLAGS.text_proto  # type: bool
    quantized = FLAGS.quantized  # type: bool
    out_path = FLAGS.output  # type: str
    out_mapping_path = FLAGS.output_mapping  # type: str
    keeps = [s if ':' not in s else tuple(s.split(':')) for s in FLAGS.keep]

    # validate param
    if in_path is None or len(in_path) == 0:
        raise RuntimeError("in_path must be provided")

    if out_path is None or len(out_path) == 0:
        raise RuntimeError("output must be provided")

    if out_mapping_path is None or len(out_mapping_path) == 0:
        raise RuntimeError("output_mapping must be provided")

    # read graph
    if quantized:
        in_graph = QuantizedGraph()
    else:
        in_graph = GraphDef()
    if in_is_text:
        with open(in_path, "r") as fp:
            Parse(fp.read(), in_graph)
    else:
        with open(in_path, "rb") as fp:
            in_graph.ParseFromString(fp.read())

    # obfuscate
    if quantized:
        obfuscated, mapping = obfuscate_quantized_graph(in_graph, keeps)
    else:
        obfuscated, mapping = obfuscate_graph_def(in_graph, keeps)

    # write graph
    with open(out_path, "wb") as fp:
        fp.write(obfuscated.SerializeToString())

    # write mapping
    with open(out_mapping_path, "w") as fp:
        for k, v in mapping.items():
            fp.write("{}:{}\n".format(k, v))
def DrpOut_OPT_Create_Prototxt(original_prototxt_path, original_model_path, optimized_prototxt_path):
    net_param = caffe_pb2.NetParameter()
    new_net_param = caffe_pb2.NetParameter()
    with open(original_prototxt_path, 'rt') as f:
        Parse(f.read(), net_param)
    for layer_idx in range(0, len(net_param.layer)):
        layer = net_param.layer[layer_idx]
        if layer.type == 'Dropout':
            if layer.top[0] == layer.bottom[0]:
                continue
            else:
                new_net_param.layer[-1].top[0] = layer.top[0]
        else:
            new_net_param.layer.extend([layer])
    new_net_param.name = net_param.name
    with open(optimized_prototxt_path, 'wt') as f:
        f.write(MessageToString(new_net_param))
    print "DROPOUT OPT : Create Optimized Prototxt Done."
    print bcolors.OKGREEN + "DROPOUT OPT : Model at " + original_model_path + "." + bcolors.ENDC
    print bcolors.OKGREEN + "DROPOUT OPT : Prototxt at " + optimized_prototxt_path + "." + bcolors.ENDC
Exemplo n.º 13
0
def Inpt_OPT_New_Bias(original_prototxt_path, original_model_path,
                      optimized_prototxt_path, new_model_path, mean_vector,
                      scale, H, W, input_channel):
    net_param = caffe_pb2.NetParameter()
    with open(original_prototxt_path, 'rt') as f:
        Parse(f.read(), net_param)
    layer_num = len(net_param.layer)

    new_net_param = caffe_pb2.NetParameter()
    new_net_param.name = 'calc_new_bias'
    new_net_param.layer.add()
    new_net_param.layer[-1].name = "data"
    new_net_param.layer[-1].type = 'Input'
    new_net_param.layer[-1].top.append('data')
    new_net_param.layer[-1].input_param.shape.add()
    new_net_param.layer[-1].input_param.shape[-1].dim.append(1)
    new_net_param.layer[-1].input_param.shape[-1].dim.append(
        int(input_channel))
    new_net_param.layer[-1].input_param.shape[-1].dim.append(int(H))
    new_net_param.layer[-1].input_param.shape[-1].dim.append(int(W))

    target_blob_name = ''
    target_layer_name = ''
    input_layer_type = ['Data', 'Input', 'AnnotatedData']
    for layer_idx in range(0, layer_num):
        layer = net_param.layer[layer_idx]
        if layer.type not in input_layer_type:
            assert (layer.type == 'Convolution' or layer.type == 'InnerProduct'
                    ), "## ERROR : First Layer MUST BE CONV or IP. ##"
            new_net_param.layer.extend([layer])
            if layer.type == 'Convolution':
                try:
                    assert (
                        new_net_param.layer[-1].convolution_param.pad[0] == 0
                    ), '## ERROR : MEAN cannot be mearged into CONV with padding > 0. ##'
                except:
                    # padding not set
                    pass
                target_blob_name = layer.top[0]
                target_layer_name = layer.name
            break

    new_proto_name = './tmpfile.prototxt'
    with open(new_proto_name, 'wt') as f:
        f.write(MessageToString(new_net_param))
    caffe.set_mode_cpu()
    net = caffe.Net(new_proto_name, str(original_model_path), caffe.TEST)

    mean_array = mean_vector * (-1.0) * scale
    mean_array = mean_array.reshape(input_channel, 1)
    mean_array = np.tile(mean_array,
                         (1, H * W)).reshape(1, input_channel, H, W)

    os.remove(new_proto_name)

    net.blobs['data'].data[...] = mean_array
    net.forward()
    mean_data = net.blobs[target_blob_name].data[...]
    mean_data = mean_data.reshape(mean_data.shape[1],
                                  mean_data.shape[2] * mean_data.shape[3])
    new_bias = np.mean(mean_data, 1)
    print "INPUT PREPROCESS (SUB MEAN) OPT : Calc New Bias Done."

    caffe.set_mode_cpu()
    net = caffe.Net(original_prototxt_path, str(original_model_path),
                    caffe.TEST)
    if len(net.params[target_layer_name]) == 2:
        # with bias
        net.params[target_layer_name][1].data[...] += new_bias[...]
        net.save(new_model_path)
        try:
            shutil.copyfile(original_prototxt_path, optimized_prototxt_path)
        except:
            # same file, not need to copy
            pass
        print "INPUT PREPROCESS (SUB MEAN) OPT : Merge Mean Done."
        print bcolors.OKGREEN + "INPUT PREPROCESS (SUB MEAN) OPT : Model at " + new_model_path + "." + bcolors.ENDC
        print bcolors.OKGREEN + "INPUT PREPROCESS (SUB MEAN) OPT : Prototxt at " + optimized_prototxt_path + "." + bcolors.ENDC
        print bcolors.WARNING + "INPUT PREPROCESS (SUB MEAN) OPT : ** WARNING ** Remember to set mean values to zero before test !!!" + bcolors.ENDC

    else:
        net_param = caffe_pb2.NetParameter()
        with open(original_prototxt_path, 'rt') as f:
            Parse(f.read(), net_param)
        layer_num = len(net_param.layer)
        for layer_idx in range(0, layer_num):
            layer = net_param.layer[layer_idx]
            if layer.name == target_layer_name:
                if layer.type == 'Convolution':
                    net_param.layer[
                        layer_idx].convolution_param.bias_term = True
                else:
                    net_param.layer[
                        layer_idx].inner_product_param.bias_term = True
                break
        with open(optimized_prototxt_path, 'wt') as f:
            f.write(MessageToString(net_param))

        new_net = caffe.Net(optimized_prototxt_path, caffe.TEST)
        for param_name in net.params.keys():
            for i in range(0, len(net.params[param_name])):
                new_net.params[param_name][i].data[
                    ...] = net.params[param_name][i].data[...]
        new_net.params[target_layer_name][1].data[...] = new_bias[...]
        new_net.save(new_model_path)
        print "INPUT PREPROCESS (SUB MEAN) OPT : Merge Mean Done."
        print bcolors.OKGREEN + "INPUT PREPROCESS (SUB MEAN) OPT : Model at " + new_model_path + "." + bcolors.ENDC
        print bcolors.OKGREEN + "INPUT PREPROCESS (SUB MEAN) OPT : Prototxt at " + optimized_prototxt_path + "." + bcolors.ENDC
        print bcolors.WARNING + "INPUT PREPROCESS (SUB MEAN) OPT : ** WARNING ** Remember to set mean values to zero before test !!!" + bcolors.ENDC
import numpy as np
import sys
from sys import argv
from utils import *
from config import *

sys.path.append(pycaffe_path)

import caffe
from caffe.proto import caffe_pb2
from google.protobuf.text_format import Parse, MessageToString

## Create BN update prototxt
net_param = caffe_pb2.NetParameter()
with open(full_precision_prototxt, 'rt') as f:
    Parse(f.read(), net_param)
layer_num = len(net_param.layer)

parameter_layer_idx = []

for layer_idx in range(layer_num):
    layer = net_param.layer[layer_idx]
    if layer.type == 'Convolution' or layer.type == 'InnerProduct' or layer.type == 'DepthwiseConvolution':
        parameter_layer_idx.append(layer_idx)

int8_layers = [parameter_layer_idx[0], parameter_layer_idx[-1]]

new_net_param = caffe_pb2.NetParameter()
new_net_param.name = net_param.name

for layer_idx in range(layer_num):
Exemplo n.º 15
0
from .cfg_pb2 import *
from google.protobuf.text_format import Parse
from google.protobuf.json_format import MessageToJson, MessageToDict
import json

with open('scripts/cfg.json', 'r') as f:
    
    cfg: ContextFreeGrammar = ContextFreeGrammar()
    Parse(f.read(), cfg)
    # print(cfg)

    cfg_dict = MessageToDict(cfg)
    # print(cfg)


    nodes = []

    x_axis = 10

    y_axis_terminal = 100
    y_axis_normal = 0


    for node in cfg_dict['nodes']:
        # print(node)
        n_node = {}
        n_node['id'] = str(node['canonicalId'])
        x_loc = x_axis
        y_loc = y_axis_normal
        if 'possibleValues' in node:
            n_node['title'] = ' '.join(node['possibleValues'])
Exemplo n.º 16
0
def Main():
    parser = ArgumentParser()
    parser.add_argument('--build_path', '-C', type=Path, required=True)
    parser.add_argument('--build',
                        '-B',
                        action='store_true',
                        help='Whether the API list should be rebuilt')
    parser.add_argument('--target_path', '-t', type=Path, default=Path.cwd())
    parser.add_argument('--verbose', '-v', action='store_true')
    parser.add_argument(
        '--commit',
        action='store_true',
        help='Git commit after a successful extraction of the API list')

    args = parser.parse_args()

    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)

    if not args.build_path.exists():
        logging.critical('Build directory does not exist. Checked {}'.format(
            args.build_path))
        sys.exit(1)

    if not args.target_path.exists():
        logging.critical('Target path does not exist')
        sys.exit(2)

    if args.build:
        logging.info(
            'Invoking autoninja to build {}'.format(API_LIST_BUILD_TARGET))
        check_call(['autoninja', API_LIST_BUILD_TARGET],
                   cwd=args.build_path.as_posix())

    api_list_file = args.build_path.joinpath(API_LIST_FILE)
    if not api_list_file.exists():
        logging.critical(
            'The API list file was not found at {}'.format(api_list_file))
        sys.exit(3)

    if not api_list_file.is_file():
        logging.critical('Unexpected file type for {}'.format(api_list_file))
        sys.exit(4)

    commit_hash = str(check_output(['git', 'rev-parse', 'HEAD'],
                                   cwd=args.build_path),
                      encoding='utf-8').strip()
    commit_position = str(check_output(
        ['git', 'footers', '--position', 'HEAD'],
        cwd=args.build_path.parent.parent),
                          encoding='utf-8').strip()

    target_file = args.target_path.joinpath(API_LIST_TARGET_CSV_FILE)
    with api_list_file.open('r') as f:
        snapshot = Snapshot()
        Parse(f.read(), snapshot)

    if not snapshot.chromium_revision:
        snapshot.chromium_revision = commit_hash
    CanonicalizeSnapshot(snapshot)
    WriteSnapshotAsCsv(snapshot, target_file)

    with args.target_path.joinpath(API_LIST_FILE).open('w') as f:
        f.write(MessageToString(snapshot, as_utf8=True, indent=2))

    if args.commit:
        git_status = str(check_output(['git', 'status', '--porcelain=v1'],
                                      cwd=args.target_path),
                         encoding='utf-8').splitlines()
        should_commit = True

        if len(git_status) == 0:
            logging.info('No change to API list')
            should_commit = False

        for s in git_status:
            if s.split()[1] not in [API_LIST_FILE, API_LIST_TARGET_CSV_FILE]:
                logging.error(
                    f'Unexpected changes found in the repository:"{s}"'
                    '. Those changes should be committed separately from the '
                    'ones introduced by this tool')
                should_commit = False
                break
        if should_commit:
            commit_message = f'''Blink API list update from {commit_position!s}

Source Chromium revision is https://crrev.com/{commit_hash!s}

See https://github.com/asankah/chromium-api-list for details on how the
list was generated.
'''
            check_call([
                'git', 'commit', '-m', commit_message, '--',
                API_LIST_TARGET_CSV_FILE, API_LIST_FILE
            ],
                       cwd=args.target_path)
Exemplo n.º 17
0
from utils import *
from config import *

sys.path.append(pycaffe_path)

import caffe
from caffe.proto import caffe_pb2
from google.protobuf.text_format import Parse, MessageToString

print ">> start activation quantization ..."

caffe.set_mode_cpu()

net_param = caffe_pb2.NetParameter()
with open(act_int8_prototxt, 'rt') as f:
    Parse(f.read(), net_param)
layer_num = len(net_param.layer)

relu_top = []
relu_layer_idx = []

for layer_idx in range(layer_num):
    layer = net_param.layer[layer_idx]
    if layer.type == 'ReLU':
        relu_top.append(layer.top[0])
        relu_layer_idx.append(layer_idx)

del net_param

net = caffe.Net(act_int8_prototxt, BN_quantized_caffemodel, caffe.TRAIN)
net.forward()
Exemplo n.º 18
0
def AFFine_OPT_Create_Caffemodel(original_prototxt_path, original_model_path,
                                 optimized_prototxt_path, new_model_path):
    net_param = caffe_pb2.NetParameter()
    with open(original_prototxt_path, 'rt') as f:
        Parse(f.read(), net_param)

    param_layer_type_list = [layer.type for layer in net_param.layer]
    param_layer_name_list = [layer.name for layer in net_param.layer]
    target_layer_type = ['Convolution', 'InnerProduct']
    merge_layer_type = ['Scale', 'BatchNorm']

    caffe.set_mode_cpu()
    net = caffe.Net(original_prototxt_path, original_model_path, caffe.TEST)
    new_net = caffe.Net(optimized_prototxt_path, caffe.TEST)
    for param_name in new_net.params.keys():
        param_layer_idx = param_layer_name_list.index(param_name)
        param_layer_type = param_layer_type_list[param_layer_idx]
        if param_layer_type not in target_layer_type:
            # OTHER LAYERS
            for i in range(0, len(net.params[param_name])):
                new_net.params[param_name][i].data[
                    ...] = net.params[param_name][i].data[...]
        else:
            kernel_num = net.params[param_name][0].num
            new_net.params[param_name][0].data[
                ...] = net.params[param_name][0].data[...]
            if len(net.params[param_name]) == 2:
                new_net.params[param_name][1].data[
                    ...] = net.params[param_name][1].data[...]
            #else:
            #    print new_net.params[param_name][1].data[...]
            if param_layer_idx + 1 < len(param_layer_type_list):
                for i in range(param_layer_idx + 1,
                               len(param_layer_type_list)):
                    # CHECK : CONV + BN +SCALE / CONV + BN / IP + ...
                    affine_layer_type = param_layer_type_list[i]
                    affine_layer_name = param_layer_name_list[i]
                    if affine_layer_type in merge_layer_type:
                        # MERGE BN/SCALE
                        if affine_layer_type == "Scale":
                            if len(net_param.layer[i].bottom) >= 2:
                                # NOT In-place Scale
                                try:
                                    for j in range(
                                            0,
                                            len(net.params[affine_layer_name])
                                    ):
                                        new_net.params[affine_layer_name][
                                            j].data[...] = net.params[
                                                affine_layer_name][j].data[...]
                                except:
                                    # no parameter
                                    break
                            else:
                                # In-place Scale
                                scale = net.params[affine_layer_name][0].data
                                if len(net.params[affine_layer_name]) == 2:
                                    bias = net.params[affine_layer_name][
                                        1].data
                                else:
                                    bias = 0.0 * scale
                                for k in range(0, kernel_num):
                                    new_net.params[param_name][0].data[
                                        k] = new_net.params[param_name][
                                            0].data[k] * scale[k]
                                    new_net.params[param_name][1].data[
                                        k] = new_net.params[param_name][
                                            1].data[k] * scale[k] + bias[k]
                        elif affine_layer_type == "BatchNorm":
                            scale = net.params[affine_layer_name][2].data[0]
                            if scale != 0:
                                mean = net.params[affine_layer_name][
                                    0].data / scale
                                std = np.sqrt(
                                    net.params[affine_layer_name][1].data /
                                    scale)
                            else:
                                mean = net.params[affine_layer_name][0].data
                                std = np.sqrt(
                                    net.params[affine_layer_name][1].data)
                            for k in range(0, kernel_num):
                                new_net.params[param_name][0].data[
                                    k] = new_net.params[param_name][0].data[
                                        k] / std[k]
                                new_net.params[param_name][1].data[k] = (
                                    new_net.params[param_name][1].data[k] -
                                    mean[k]) / std[k]
                        else:
                            # TODO
                            assert (
                                1 > 2
                            ), "## TODO ## : Other layers haven't been supported yet. ##"
                    else:
                        # NOT BN or SCALE, then BREAK
                        break
            else:
                # LAST LAYER, then BREAK
                break
    new_net.save(new_model_path)
    print bcolors.OKGREEN + "BN SCALE OPT : Model at " + new_model_path + "." + bcolors.ENDC
Exemplo n.º 19
0
def load_config(path, proto):
    return Parse(read(path), proto())