def extend(op: Node): op['dst_type'] = destination_type_to_np_data_type(op.destination_type)
def extend(op: Node): op['dst_type'] = destination_type_to_np_data_type(op.destination_type) # CompressQuantizeWeights generates IR with constant sub-graph, that should not be ConstFolded: # Const(u8) -> Convert(to fp) -> (some eltwise operations) -> FakeQuantize if op.in_node().in_node().soft_get('type') == 'Const': op['stop_value_propagation'] = True
def extend(op: Node): if op.graph.graph['cmd_params'].framework in ('tf', 'caffe'): op['remove_values_output'] = True if op.has_valid('index_element_type'): op['index_element_type'] = destination_type_to_np_data_type( op.index_element_type)
def extend(op: Node): if op.get_opset() != "extension": op['output_type'] = destination_type_to_np_data_type( op.output_type)
def extend(op: Node): op['output_type'] = destination_type_to_np_data_type(op.output_type)
def extend(op: Node): if op.has_valid('output_type'): op['output_type'] = destination_type_to_np_data_type( op.output_type)
def extend(op: Node): assert op.has_valid( 'element_type' ), 'Parameter node {} has missed element_type attr!'.format(op.name) op['data_type'] = destination_type_to_np_data_type(op.element_type) Extender.attr_to_list(op, 'shape')
def extend(op: Node): if op.has_valid('classes_index_type'): op['classes_index_type'] = destination_type_to_np_data_type(op.classes_index_type) if op.has_valid('sequence_length_type'): op['sequence_length_type'] = destination_type_to_np_data_type(op.sequence_length_type)
def __read_old_api_map_element_type(attr, layer_type): version = int(attr.attrib['version']) element_type = destination_type_to_np_data_type(attr.attrib['value']) old_api_map = OldAPIMapElementType(version=version) old_api_map.set_legacy_type(element_type) return {('old_api_map_element_type', version): old_api_map}