def print_attributes(): s = "" if InteractiveModule.was_param_set('customParams'): attrs = InteractiveModule.get_param('customParams') for i in range(1, len(attrs) + 1): s = s + " {}. {}\n".format(i, attrs[i - 1][1]) return s
def print_ir_attrs(): s = '' if InteractiveModule.was_param_set('supportedAttrs'): s = s + "Parameters included in IR: " for sup_attr in InteractiveModule.get_param('supportedAttrs'): s = s + '{}, '.format(sup_attr[1]) s = s + '\n' return s
def set_supportedattrs(param, answer): if param != 'supportedAttrs': log.error("Internal error") sup_attrs = [] int_attrs = [] attrs = InteractiveModule.get_param('customParams') while answer.lower() != 'q': sup_attrs.append(attrs[int(answer) - 1]) answer = input() InteractiveModule.set_answer_to_param_standard(param, sup_attrs) for attr in attrs: if attr not in sup_attrs: int_attrs.append(attr) InteractiveModule.set_answer_to_param_standard('internalAttrs', int_attrs)
def set_opname_as_layername(param, answer): if param != 'isLayerNameOpName': log.error("Internal error") InteractiveModule.set_answer_to_param_bool('isLayerNameOpName', answer) if answer.lower() == 'y' or answer.lower() == 'yes': InteractiveModule.set_answer_to_param_standard( 'opName', InteractiveModule.get_param('name')) else: InteractiveModule.reset_param('opName')
def check_plugin_name(param_name, answer): while answer != 'cldnn' and answer != 'cpu': print("Incorrect plugin name, please choose on of [cpu, cldnn]") answer = input() InteractiveModule.set_answer_to_param_standard(param_name, answer)
def is_layername_set_and_opname_not_set(): return not InteractiveModule.was_param_set( 'opName') and InteractiveModule.was_param_set('name')
def is_not_set_opname(): return not InteractiveModule.was_param_set('opName')
def is_gpu_and_supported_not_empty(): return InteractiveModule.get_param('plugin') == 'cldnn' and \ (InteractiveModule.was_param_set('supportedAttrs') and len(InteractiveModule.get_param('supportedAttrs')) != 0 or not InteractiveModule.was_param_set('supportedAttrs') and (InteractiveModule.was_param_set('customParams') and len(InteractiveModule.get_param('customParams')) != 0 or not InteractiveModule.was_param_set('customParams')))
# Generated file for Caffe layer extractor for Model Optimizer # # You need to modify this file if you need several attributes of the layer # to appear in the IR in different format than the default one. Then you # need to implement pre-processing logic here. # # Refer to the section "Extending Model Optimizer with New Primitives" in # OpenVINO* documentation (either online or offline in # <INSTALL_DIR>/deployment_tools/documentation/docs/index.html an then navigate # to the corresponding section). # =============================================================================== # [[[cog from ext_gen.interactive_module import InteractiveModule is_pythonic = InteractiveModule.get_param('isPythonic') name = InteractiveModule.get_param('name') op_name = InteractiveModule.get_param('opName') all_copy = InteractiveModule.get_param('allCopy') custom_params = InteractiveModule.get_param('customParams') attrs = 'mapping_rule = {}' if len(custom_params): attrs = ''' update_attrs = {{ {params} }} '''.format(params='\n'.join([ ' \'{}\': param.{},'.format(c[0], c[1]) for c in custom_params
# 2. lessen number of attributes to appear in the IR # (specify such a list in backend_attrs() method) # 3. handle the layer which output blob is different to the input one # (implement your own static method infer() and set it as attribute in # __init__() dictionary) # # Refer to the section "Extending Model Optimizer with New Primitives" in # OpenVINO* documentation (either online or offline in # <INSTALL_DIR>/deployment_tools/documentation/docs/index.html an then navigate # to the corresponding section). # =============================================================================== # [[[cog from ext_gen.interactive_module import InteractiveModule is_pythonic = InteractiveModule.get_param('isPythonic') op = InteractiveModule.get_param('opName') op_name = op + 'Python' if is_pythonic else op isCustom = InteractiveModule.get_param('changeShape') hasInfer = InteractiveModule.get_param('hasInfer') params = '' if hasattr(InteractiveModule.params, 'customParams') and InteractiveModule.params['customParams'][1]: for p in InteractiveModule.get_param('customParams'): params += "{}={},\n".format(p[1], str(p[2])) attrs = InteractiveModule.get_param('supportedAttrs') if attrs and len(attrs) > 0: sup_attrs = '''
def check_is_change_shape(): return InteractiveModule.get_param('changeShape')
def check_is_not_pythonic(): return not InteractiveModule.get_param('isPythonic')
def check_not_internalattrs_and_not_pythonic(): return not InteractiveModule.was_param_set('internalAttrs') and \ not InteractiveModule.get_param('isPythonic') and \ (InteractiveModule.was_param_set('customParams') and len(InteractiveModule.get_param('customParams')) != 0 or not InteractiveModule.was_param_set('customParams'))
def check_set_customparams_and_not_pythonic(): return InteractiveModule.was_param_set('customParams') and \ len(InteractiveModule.get_param('customParams')) != 0 and \ not InteractiveModule.get_param('isPythonic')
def driver(output_dir, is_mo_caffe_ext_gen, is_mo_mxnet_ext_gen, is_mo_tf_ext_gen, is_mo_op_gen, is_ie_cpu_gen, is_ie_gpu_gen=False, is_from_config=False): analysis = '\n'.join([ 'Generating:', '\tModel Optimizer: ', '\t\t Extractor for Caffe Custom Layer: {}'.format( is_mo_caffe_ext_gen), '\t\t Extractor for MxNet Custom Layer: {}'.format( is_mo_mxnet_ext_gen), '\t\t Extractor for TensorFlow Custom Layer: {}'.format( is_mo_tf_ext_gen), '\t\t Framework-agnostic operation extension: {}'.format(is_mo_op_gen), '\t Inference Engine: ', '\t\t CPU extension: {}'.format(is_ie_cpu_gen), '\t\t GPU extension: {}'.format(is_ie_gpu_gen), ]) print(analysis) if is_mo_caffe_ext_gen or is_mo_mxnet_ext_gen or is_mo_tf_ext_gen or is_mo_op_gen: [caffe_extr_path, mxnet_extr_path, tf_extr_path, op_path] = create_mo_folder_structure(output_dir) if is_ie_cpu_gen or is_ie_gpu_gen: [ie_cpu_path, ie_gpu_path] = create_ie_folder_structure(output_dir) if is_mo_caffe_ext_gen: mo_extr_descr = MOExtractorDescr(is_mo_op_gen) if not is_from_config: mo_extr_descr.create_extension_description() if is_mo_mxnet_ext_gen: mo_extr_descr = MOMXNetExtractorDescr(is_mo_op_gen) if not is_from_config: mo_extr_descr.create_extension_description() if is_mo_tf_ext_gen: mo_extr_descr = MOTFExtractorDescr(is_mo_op_gen) if not is_from_config: mo_extr_descr.create_extension_description() if is_mo_op_gen: mo_op = MOOpDescr(is_mo_caffe_ext_gen or is_mo_tf_ext_gen) if not is_from_config: mo_op.create_extension_description() if is_ie_cpu_gen and not is_from_config: ie_ext_descr_cpu = IEExtensionDescr('cpu') ie_ext_descr_cpu.create_extension_description() if is_ie_gpu_gen and not is_from_config: ie_ext_descr_gpu = IEExtensionDescr('cldnn') ie_ext_descr_gpu.create_extension_description() pathname = os.path.dirname(sys.argv[0]) path = os.path.abspath(pathname) jobs = [] op = lambda what, where, operation: lambda is_force=False: io_task( what, where, operation, force=is_force) if is_mo_caffe_ext_gen: what = InteractiveModule.params['name'][0].lower() + '_ext.py' command = [ '', '-d', '-o' + what, os.path.join(path, './templates/caffe_extractor.py') ] sub_jobs = [op(what, caffe_extr_path, move)] jobs.append((command, sub_jobs)) if is_mo_mxnet_ext_gen: what = InteractiveModule.params['name'][0].lower() + '_ext.py' command = [ '', '-d', '-o' + what, os.path.join(path, './templates/mxnet_extractor.py') ] sub_jobs = [op(what, mxnet_extr_path, move)] jobs.append((command, sub_jobs)) if is_mo_tf_ext_gen: what = InteractiveModule.params['name'][0].lower() + '_ext.py' command = [ '', '-d', '-o' + what, os.path.join(path, './templates/tf_extractor.py') ] sub_jobs = [op(what, tf_extr_path, move)] jobs.append((command, sub_jobs)) if is_mo_op_gen: what = InteractiveModule.get_param('opName').replace( ".", "_").lower() + '.py' command = [ '', '-d', '-o' + what, os.path.join(path, './templates/mo_op.py') ] sub_jobs = [op(what, op_path, move)] jobs.append((command, sub_jobs)) if is_ie_cpu_gen: #try to find out IE samples to copy ext_base files # 1. extgen and IE samples in one packet if os.path.exists( os.path.join( path, "../inference_engine/samples/extension/ext_base.cpp")): ext_base_path = os.path.join( path, "../inference_engine/samples/extension/") else: # 2. we have InferenceEngine_DIR path if os.getenv('InferenceEngine_DIR') and os.path.exists( os.path.join(os.getenv('InferenceEngine_DIR'), "../samples/extension/ext_base.cpp")): ext_base_path = os.path.join(os.getenv('InferenceEngine_DIR'), "../samples/extension/") else: # 3. we have path to extension sample explicitly (for development mainly) if os.getenv('IE_extension_sample') and os.path.exists( os.path.join(os.getenv('IE_extension_sample'), "./ext_base.cpp")): ext_base_path = os.getenv('IE_extension_sample') else: raise Exception( "Can not locate the Inference Engine extension sample.\n" + "Please run setupenv.sh from OpenVINO toolkit or set path to " + "IE sample extension explicitly in IE_extension_sample" ) what = 'ext_' + InteractiveModule.get_param('ie_name').replace( ".", "_").lower() + '.cpp' command = [ '', '-d', '-o' + what, os.path.join(path, './templates/ie_extension.cpp') ] sub_jobs = [ op(what, ie_cpu_path, move), op(os.path.join(path, './templates/CMakeLists.txt'), ie_cpu_path, copy), op(os.path.join(ext_base_path, './ext_base.cpp'), ie_cpu_path, copy), op(os.path.join(ext_base_path, './ext_base.hpp'), ie_cpu_path, copy), op(os.path.join(ext_base_path, './ext_list.cpp'), ie_cpu_path, copy), op(os.path.join(ext_base_path, './ext_list.hpp'), ie_cpu_path, copy) ] jobs.append((command, sub_jobs)) if is_ie_gpu_gen: for ext in ('cl', 'xml'): op_file = InteractiveModule.get_param( 'ie_name').lower() + '_kernel.{}'.format(ext) command = [ '', '-d', '-o' + op_file, os.path.join(path, './templates/ie_gpu_ext.{}'.format(ext)) ] sub_jobs = [op(op_file, ie_gpu_path, move)] jobs.append((command, sub_jobs)) for job, sub_jobs in jobs: Cog().main(job) for sub_job in sub_jobs: try: sub_job() except shutil.Error as e: file_name = str(e).split('\'')[1] if 'already exists' in str(e): res = "no" if not is_from_config: res = input( 'The file {} will be overwritten and all changes can be lost. ' .format(file_name) + 'Are you sure (y/n)? ') if res.lower() == 'yes' or res.lower() == 'y': sub_job(True) else: print( '[WARNING] File {} already exist. If you want to re-generate it, remove or move the file {} and try again' .format(file_name, file_name)) if is_mo_caffe_ext_gen: print('Stub file for Caffe Model Optimizer extractor is in {} folder'. format(str(os.path.abspath(caffe_extr_path)))) if is_mo_tf_ext_gen: print( 'Stub file for TensorFlow Model Optimizer extractor is in {} folder' .format(str(os.path.abspath(tf_extr_path)))) if is_mo_mxnet_ext_gen: print('Stub file for MxNet Model Optimizer extractor is in {} folder'. format(str(os.path.abspath(mxnet_extr_path)))) if is_mo_op_gen: print('Stub file for Model Optimizer operation is in {} folder'.format( str(op_path))) if is_ie_cpu_gen: print('Stub files for Inference Engine CPU extension are in {} folder'. format(str(ie_cpu_path))) if is_ie_gpu_gen: print('Stub files for Inference Engine GPU extension are in {} folder'. format(str(ie_gpu_path))) return 0
def is_gpu(self): return InteractiveModule.get_param('plugin') == 'cldnn'