def generate_code(ninja_global=None, declarations_path=None, nn_path=None, native_functions_path=None, install_dir=None, subset=None, disable_autograd=False, force_schema_registration=False, operator_selector=None): from tools.autograd.gen_autograd import gen_autograd, gen_autograd_python from tools.autograd.gen_annotated_fn_args import gen_annotated from tools.jit.gen_unboxing_wrappers import gen_unboxing_wrappers from tools.codegen.selective_build.selector import SelectiveBuilder # Build ATen based Variable classes if install_dir is None: install_dir = 'torch/csrc' python_install_dir = 'torch/testing/_internal/generated' else: python_install_dir = install_dir autograd_gen_dir = os.path.join(install_dir, 'autograd', 'generated') jit_gen_dir = os.path.join(install_dir, 'jit', 'generated') for d in (autograd_gen_dir, jit_gen_dir, python_install_dir): if not os.path.exists(d): os.makedirs(d) runfiles_dir = os.environ.get("RUNFILES_DIR", None) data_dir = os.path.join(runfiles_dir, 'pytorch') if runfiles_dir else '' autograd_dir = os.path.join(data_dir, 'tools', 'autograd') tools_jit_templates = os.path.join(data_dir, 'tools', 'jit', 'templates') if subset == "pybindings" or not subset: gen_autograd_python(declarations_path or DECLARATIONS_PATH, native_functions_path or NATIVE_FUNCTIONS_PATH, autograd_gen_dir, autograd_dir) if operator_selector is None: operator_selector = SelectiveBuilder.get_nop_selector() if subset == "libtorch" or not subset: gen_autograd( declarations_path or DECLARATIONS_PATH, native_functions_path or NATIVE_FUNCTIONS_PATH, autograd_gen_dir, autograd_dir, disable_autograd=disable_autograd, operator_selector=operator_selector, ) gen_unboxing_wrappers( declarations_path or DECLARATIONS_PATH, jit_gen_dir, tools_jit_templates, disable_autograd=disable_autograd, operator_selector=operator_selector, force_schema_registration=force_schema_registration) if subset == "python" or not subset: gen_annotated(declarations_path or DECLARATIONS_PATH, python_install_dir, autograd_dir)
def generate_code(ninja_global=None, declarations_path=None, nn_path=None, install_dir=None, subset=None, disable_autograd=False, selected_op_list_path=None, selected_op_list=None, force_schema_registration=False): # cwrap depends on pyyaml, so we can't import it earlier root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) sys.path.insert(0, root) from tools.autograd.gen_autograd import gen_autograd, gen_autograd_python from tools.autograd.gen_annotated_fn_args import gen_annotated from tools.autograd.utils import load_op_list_and_strip_overload from tools.jit.gen_unboxing_wrappers import gen_unboxing_wrappers # Build ATen based Variable classes if install_dir is None: install_dir = 'torch/csrc' python_install_dir = 'torch/testing/_internal/generated' else: python_install_dir = install_dir autograd_gen_dir = os.path.join(install_dir, 'autograd', 'generated') jit_gen_dir = os.path.join(install_dir, 'jit', 'generated') for d in (autograd_gen_dir, jit_gen_dir, python_install_dir): if not os.path.exists(d): os.makedirs(d) runfiles_dir = os.environ.get("RUNFILES_DIR", None) data_dir = os.path.join(runfiles_dir, 'pytorch') if runfiles_dir else '' autograd_dir = os.path.join(data_dir, 'tools', 'autograd') tools_jit_templates = os.path.join(data_dir, 'tools', 'jit', 'templates') if subset == "pybindings" or not subset: gen_autograd_python(declarations_path or DECLARATIONS_PATH, autograd_gen_dir, autograd_dir) if subset == "libtorch" or not subset: selected_op_list = load_op_list_and_strip_overload(selected_op_list, selected_op_list_path) gen_autograd( declarations_path or DECLARATIONS_PATH, autograd_gen_dir, autograd_dir, disable_autograd=disable_autograd, selected_op_list=selected_op_list, ) gen_unboxing_wrappers( declarations_path or DECLARATIONS_PATH, jit_gen_dir, tools_jit_templates, disable_autograd=disable_autograd, selected_op_list=selected_op_list, force_schema_registration=force_schema_registration) if subset == "python" or not subset: gen_annotated( declarations_path or DECLARATIONS_PATH, python_install_dir, autograd_dir)
def generate_code(ninja_global=None, declarations_path=None, nn_path=None, install_dir=None, subset=None, disable_autograd=False, selected_op_list_path=None, selected_op_list=None, force_schema_registration=False): # cwrap depends on pyyaml, so we can't import it earlier root = os.path.dirname( os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) sys.path.insert(0, root) from tools.autograd.gen_autograd import gen_autograd, gen_autograd_python from tools.jit.gen_unboxing_wrappers import gen_unboxing_wrappers # Build ATen based Variable classes install_dir = install_dir or 'torch/csrc' autograd_gen_dir = os.path.join(install_dir, 'autograd', 'generated') jit_gen_dir = os.path.join(install_dir, 'jit', 'generated') for d in (autograd_gen_dir, jit_gen_dir): if not os.path.exists(d): os.makedirs(d) runfiles_dir = os.environ.get("RUNFILES_DIR", None) data_dir = os.path.join(runfiles_dir, 'pytorch') if runfiles_dir else '' autograd_dir = os.path.join(data_dir, 'tools', 'autograd') tools_jit_templates = os.path.join(data_dir, 'tools', 'jit', 'templates') if subset == "pybindings" or not subset: gen_autograd_python(declarations_path or DECLARATIONS_PATH, autograd_gen_dir, autograd_dir) if subset == "libtorch" or not subset: # TODO: add selected op mechanism in augotrad to save learning size gen_autograd( declarations_path or DECLARATIONS_PATH, autograd_gen_dir, autograd_dir, disable_autograd=disable_autograd, ) gen_unboxing_wrappers( declarations_path or DECLARATIONS_PATH, jit_gen_dir, tools_jit_templates, disable_autograd=disable_autograd, selected_op_list_path=selected_op_list_path, selected_op_list=selected_op_list, force_schema_registration=force_schema_registration)