def do_fusions(self, args): """ Carry out the default set of fusions on the graph""" self._check_graph() if args.list: self.ppaged("\n".join( ["%s - %s" % (name, desc) for name, desc in get_fusions()])) return if args.apply: fusions = [get_fusion(name) for name in args.apply] if not fusions: self.perror('fusion %s not found' % args.apply) return elif args.pow2: fusions = [get_pow2_match_group()] elif args.scale8: fusions = [get_scale8_match_group()] else: self.perror( "No fusion set selected. Nothing to do. Select --pow2 or --scale8." ) return for fusion in fusions: fusion.match(self.G) self.G.add_dimensions() if self.G.quantization and not self.G.quantization.verify_quantization( self.G): self.G.quantization = None
def test_adjust8(qvww_graph): tfi = TfliteImporter() G = tfi.create_graph(qvww_graph, {'load_tensors': True}) G.add_dimensions() G.adjust_order() matcher = get_fusion("fuse_external_bias") matcher.match(G) G.add_dimensions()
def do_fusions(self, args): """ Carry out the default set of fusions on the graph""" if args.list: table = texttable.Texttable() table.set_cols_align(['l', 'l']) table.set_max_width(120) table.add_rows([['Name', 'Description']] + get_fusions()) self.ppaged(table.draw()) return self._check_graph() state = ConstantInputParameters.save_compression_state(self.G) try: if args.apply: fusions = [get_fusion(name) for name in args.apply] invalid_names = [ args.apply[idx] for idx, fusion in enumerate(fusions) if fusion is None ] if invalid_names: self.perror( f'fusion{"s" if len(invalid_names) > 1 else ""} {", ".join(invalid_names)} not found' ) return elif args.pow2: fusions = [get_pow2_match_group()] elif args.scale8: fusions = [get_scale8_match_group()] else: self.perror( "No fusion set selected. Nothing to do. Select --pow2 or --scale8." ) return for fusion in fusions: fusion.match(self.G) self.G.add_dimensions() if self.G.quantization and verify_quantization(self.G): quantizer = NewQuantizer(self.G) quantizer.quantize() problems = verify_quantization(self.G) if problems: self.perror('quantization issue after fusions') for problem in problems: self.perror(problem) finally: ConstantInputParameters.restore_compression_state(self.G, state)
def do_fusions(self, args): """ Carry out the default set of fusions on the graph""" if args.list: table = texttable.Texttable() table.set_cols_align(['l', 'l']) table.set_max_width(120) table.add_rows([['Name', 'Description']] + get_fusions()) self.ppaged(table.draw()) return self._check_graph() if args.apply: fusions = [get_fusion(name) for name in args.apply] invalid_names = [ args.apply[idx] for idx, fusion in enumerate(fusions) if fusion is None ] if invalid_names: self.perror( f'fusion{"s" if len(invalid_names) > 1 else ""} {", ".join(invalid_names)} not found' ) return elif args.pow2: fusions = [get_pow2_match_group()] elif args.scale8: fusions = [get_scale8_match_group()] else: self.perror( "No fusion set selected. Nothing to do. Select --pow2 or --scale8." ) return for fusion in fusions: fusion.match(self.G) self.G.add_dimensions() if self.G.quantization and not self.G.quantization.verify_quantization( self.G): self.G.quantization = None
def test_activatiofusion(actfusion_graph): G = actfusion_graph matcher = get_fusion('scale8_match_group') matcher.match(G) G.add_dimensions() astat_col = ActivationStatsCollector() astats = astat_col.collect_stats( G, [np.full([10, 10, 2], 1), np.full([10, 10, 2], 1)]) astats = astat_col.reduce_stats() quantizer = MultQuantizer(astats, force_width=8, quantized_dimension="channel") G.quantization = quantizer.quantize(G) with tempfile.TemporaryDirectory() as tempdir: opts = { 'default_input_location': 'ARG_LOC_L2', 'default_output_location': 'ARG_LOC_L2', 'default_global_location': 'ARG_LOC_L3_HFLASH', 'default_local_location': 'AT_MEM_UNDEF', 'tensor_directory': tempdir } code_gen = CodeGenerator(G, DefaultNamingConvension(G), opts) ATModel_code = default_template(G, code_generator=code_gen)
def load_state(graph_file: str, return_extra=False): graph_base, _ = os.path.splitext(graph_file) state_filename = graph_base + STATE_EXTENSION state_file = Path(state_filename) LOG.info("loading graph state from %s", state_filename) if not state_file.is_file(): raise ValueError("state file not found") with state_file.open('r') as json_fp: info_state = json.load(json_fp, cls=StateDecoder) info_state['info'] = convert_str_to_keys(info_state['info']) if 'node_options' in info_state: info_state['node_options'] = convert_str_to_keys( info_state['node_options']) else: info_state['node_options'] = {} if info_state['load_parameters']: pickle_filename = graph_base + ARRS_EXTENSION LOG.info("loading tensors from %s", pickle_filename) arrs_file = Path(pickle_filename) if not arrs_file.is_file(): raise ValueError("arrays file not found") with arrs_file.open('rb') as arrs_fp: parameters = pickle.load(arrs_fp) else: parameters = None # Here load the orignal graph and replay the transforms that were done to it if info_state['info'].get('has_quantized_parameters'): opts = {'load_tensors': True, 'load_quantization': True} else: opts = { 'load_tensors': False, } # Retrieve the identity of the saved state identity = GraphIdentity(None) identity.identity = info_state['identity'] LOG.info("loading graph from %s", identity.filename) G = create_graph(identity.filename, opts=opts) if 'name' in info_state: G.name = info_state['name'] G.add_dimensions() freeze_options = { k: v for k, v in info_state['node_options'].items() if 'FIXED_ORDER' in list(v.set_options) } set_options(G, freeze_options) if identity.is_adjusted: # If weights were saved then don't reshaoe them since it was already done # before they were saved LOG.info("adjusting dimensions") G.adjust_order(reshape_weights=not info_state['load_parameters']) G.add_dimensions() if identity.is_fused: LOG.info("fusing nodes") # replay the fusions that were carried out for fusion_name in identity.fusions: fusion = get_fusion(fusion_name) fusion.match(G) G.add_dimensions() set_parameters(G, parameters) # Update the identity to match the saved graph G.info = info_state['info'] G.changes.replay(G) G.graph_identity = identity G.node_options = info_state['node_options'] set_options(G, info_state['node_options'], info_state['node_options']) if identity.extracted_step is not None: extract_node(G, G.graph_state.steps[identity.extracted_step]['node']) G.add_dimensions() if return_extra: return G, info_state['extra'] return G