示例#1
0
def generate_code(ninja_global=None,
                  declarations_path=None,
                  nn_path=None,
                  install_dir=None,
                  subset=None):
    # cwrap depends on pyyaml, so we can't import it earlier
    root = os.path.dirname(
        os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
    sys.path.insert(0, root)
    from tools.autograd.gen_autograd import gen_autograd, gen_autograd_python
    from tools.jit.gen_jit_dispatch import gen_jit_dispatch

    # Build ATen based Variable classes
    autograd_gen_dir = install_dir or 'torch/csrc/autograd/generated'
    jit_gen_dir = install_dir or 'torch/csrc/jit/generated'
    for d in (autograd_gen_dir, jit_gen_dir):
        if not os.path.exists(d):
            os.makedirs(d)

    if subset == "pybindings" or not subset:
        gen_autograd_python(declarations_path or DECLARATIONS_PATH,
                            autograd_gen_dir, 'tools/autograd')

    if subset == "libtorch" or not subset:
        gen_autograd(declarations_path or DECLARATIONS_PATH, autograd_gen_dir,
                     'tools/autograd')
        gen_jit_dispatch(declarations_path or DECLARATIONS_PATH, jit_gen_dir,
                         'tools/jit/templates')
示例#2
0
def generate_code(ninja_global=None,
                  declarations_path=None,
                  nn_path=None,
                  install_dir=None,
                  subset=None,
                  disable_autograd=False,
                  selected_op_list_path=None,
                  selected_op_list=None,
                  force_schema_registration=False):
    # cwrap depends on pyyaml, so we can't import it earlier
    root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
    sys.path.insert(0, root)
    from tools.autograd.gen_autograd import gen_autograd, gen_autograd_python
    from tools.autograd.gen_annotated_fn_args import gen_annotated
    from tools.autograd.utils import load_op_list_and_strip_overload
    from tools.jit.gen_unboxing_wrappers import gen_unboxing_wrappers

    # Build ATen based Variable classes
    if install_dir is None:
        install_dir = 'torch/csrc'
        python_install_dir = 'torch/testing/_internal/generated'
    else:
        python_install_dir = install_dir
    autograd_gen_dir = os.path.join(install_dir, 'autograd', 'generated')
    jit_gen_dir = os.path.join(install_dir, 'jit', 'generated')
    for d in (autograd_gen_dir, jit_gen_dir, python_install_dir):
        if not os.path.exists(d):
            os.makedirs(d)
    runfiles_dir = os.environ.get("RUNFILES_DIR", None)
    data_dir = os.path.join(runfiles_dir, 'pytorch') if runfiles_dir else ''
    autograd_dir = os.path.join(data_dir, 'tools', 'autograd')
    tools_jit_templates = os.path.join(data_dir, 'tools', 'jit', 'templates')

    if subset == "pybindings" or not subset:
        gen_autograd_python(declarations_path or DECLARATIONS_PATH, autograd_gen_dir, autograd_dir)

    if subset == "libtorch" or not subset:
        selected_op_list = load_op_list_and_strip_overload(selected_op_list, selected_op_list_path)

        gen_autograd(
            declarations_path or DECLARATIONS_PATH,
            autograd_gen_dir,
            autograd_dir,
            disable_autograd=disable_autograd,
            selected_op_list=selected_op_list,
        )
        gen_unboxing_wrappers(
            declarations_path or DECLARATIONS_PATH,
            jit_gen_dir,
            tools_jit_templates,
            disable_autograd=disable_autograd,
            selected_op_list=selected_op_list,
            force_schema_registration=force_schema_registration)

    if subset == "python" or not subset:
        gen_annotated(
            declarations_path or DECLARATIONS_PATH,
            python_install_dir,
            autograd_dir)
示例#3
0
def generate_code(ninja_global=None,
                  declarations_path=None,
                  nn_path=None,
                  native_functions_path=None,
                  install_dir=None,
                  subset=None,
                  disable_autograd=False,
                  force_schema_registration=False,
                  operator_selector=None):
    from tools.autograd.gen_autograd import gen_autograd, gen_autograd_python
    from tools.autograd.gen_annotated_fn_args import gen_annotated
    from tools.jit.gen_unboxing_wrappers import gen_unboxing_wrappers
    from tools.codegen.selective_build.selector import SelectiveBuilder

    # Build ATen based Variable classes
    if install_dir is None:
        install_dir = 'torch/csrc'
        python_install_dir = 'torch/testing/_internal/generated'
    else:
        python_install_dir = install_dir
    autograd_gen_dir = os.path.join(install_dir, 'autograd', 'generated')
    jit_gen_dir = os.path.join(install_dir, 'jit', 'generated')
    for d in (autograd_gen_dir, jit_gen_dir, python_install_dir):
        if not os.path.exists(d):
            os.makedirs(d)
    runfiles_dir = os.environ.get("RUNFILES_DIR", None)
    data_dir = os.path.join(runfiles_dir, 'pytorch') if runfiles_dir else ''
    autograd_dir = os.path.join(data_dir, 'tools', 'autograd')
    tools_jit_templates = os.path.join(data_dir, 'tools', 'jit', 'templates')

    if subset == "pybindings" or not subset:
        gen_autograd_python(declarations_path or DECLARATIONS_PATH,
                            native_functions_path or NATIVE_FUNCTIONS_PATH,
                            autograd_gen_dir, autograd_dir)

    if operator_selector is None:
        operator_selector = SelectiveBuilder.get_nop_selector()

    if subset == "libtorch" or not subset:

        gen_autograd(
            declarations_path or DECLARATIONS_PATH,
            native_functions_path or NATIVE_FUNCTIONS_PATH,
            autograd_gen_dir,
            autograd_dir,
            disable_autograd=disable_autograd,
            operator_selector=operator_selector,
        )
        gen_unboxing_wrappers(
            declarations_path or DECLARATIONS_PATH,
            jit_gen_dir,
            tools_jit_templates,
            disable_autograd=disable_autograd,
            operator_selector=operator_selector,
            force_schema_registration=force_schema_registration)

    if subset == "python" or not subset:
        gen_annotated(declarations_path or DECLARATIONS_PATH,
                      python_install_dir, autograd_dir)
示例#4
0
def generate_code(
    gen_dir: pathlib.Path,
    native_functions_path: Optional[str] = None,
    tags_path: Optional[str] = None,
    install_dir: Optional[str] = None,
    subset: Optional[str] = None,
    disable_autograd: bool = False,
    force_schema_registration: bool = False,
    operator_selector: Any = None,
) -> None:
    from torchgen.selective_build.selector import SelectiveBuilder

    from tools.autograd.gen_annotated_fn_args import gen_annotated
    from tools.autograd.gen_autograd import gen_autograd, gen_autograd_python

    # Build ATen based Variable classes
    if install_dir is None:
        install_dir = os.fspath(gen_dir / "torch/csrc")
        python_install_dir = os.fspath(gen_dir /
                                       "torch/testing/_internal/generated")
    else:
        python_install_dir = install_dir
    autograd_gen_dir = os.path.join(install_dir, "autograd", "generated")
    for d in (autograd_gen_dir, python_install_dir):
        os.makedirs(d, exist_ok=True)
    autograd_dir = os.fspath(pathlib.Path(__file__).parent.parent / "autograd")

    if subset == "pybindings" or not subset:
        gen_autograd_python(
            native_functions_path or NATIVE_FUNCTIONS_PATH,
            tags_path or TAGS_PATH,
            autograd_gen_dir,
            autograd_dir,
        )

    if operator_selector is None:
        operator_selector = SelectiveBuilder.get_nop_selector()

    if subset == "libtorch" or not subset:

        gen_autograd(
            native_functions_path or NATIVE_FUNCTIONS_PATH,
            tags_path or TAGS_PATH,
            autograd_gen_dir,
            autograd_dir,
            disable_autograd=disable_autograd,
            operator_selector=operator_selector,
        )

    if subset == "python" or not subset:
        gen_annotated(
            native_functions_path or NATIVE_FUNCTIONS_PATH,
            tags_path or TAGS_PATH,
            python_install_dir,
            autograd_dir,
        )
示例#5
0
def generate_code(ninja_global=None,
                  declarations_path=None,
                  nn_path=None,
                  install_dir=None,
                  subset=None,
                  disable_autograd=False,
                  selected_op_list_path=None,
                  selected_op_list=None,
                  force_schema_registration=False):
    # cwrap depends on pyyaml, so we can't import it earlier
    root = os.path.dirname(
        os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
    sys.path.insert(0, root)
    from tools.autograd.gen_autograd import gen_autograd, gen_autograd_python
    from tools.jit.gen_unboxing_wrappers import gen_unboxing_wrappers

    # Build ATen based Variable classes
    install_dir = install_dir or 'torch/csrc'
    autograd_gen_dir = os.path.join(install_dir, 'autograd', 'generated')
    jit_gen_dir = os.path.join(install_dir, 'jit', 'generated')
    for d in (autograd_gen_dir, jit_gen_dir):
        if not os.path.exists(d):
            os.makedirs(d)
    runfiles_dir = os.environ.get("RUNFILES_DIR", None)
    data_dir = os.path.join(runfiles_dir, 'pytorch') if runfiles_dir else ''
    autograd_dir = os.path.join(data_dir, 'tools', 'autograd')
    tools_jit_templates = os.path.join(data_dir, 'tools', 'jit', 'templates')

    if subset == "pybindings" or not subset:
        gen_autograd_python(declarations_path or DECLARATIONS_PATH,
                            autograd_gen_dir, autograd_dir)

    if subset == "libtorch" or not subset:
        # TODO: add selected op mechanism in augotrad to save learning size
        gen_autograd(
            declarations_path or DECLARATIONS_PATH,
            autograd_gen_dir,
            autograd_dir,
            disable_autograd=disable_autograd,
        )
        gen_unboxing_wrappers(
            declarations_path or DECLARATIONS_PATH,
            jit_gen_dir,
            tools_jit_templates,
            disable_autograd=disable_autograd,
            selected_op_list_path=selected_op_list_path,
            selected_op_list=selected_op_list,
            force_schema_registration=force_schema_registration)