Exclude more files in torch/csrc/distributed when USE_DISTRIBUTED=0 (#28621) Summary: Changelog: - Guard inclusion of certain files in torch/csrc/distributed included in caffe2/CMakeLists.txt when USE_DISTRIBUTED=0 Pull Request resolved: https://github.com/pytorch/pytorch/pull/28621 Test Plan: - Builds should be successful - Tests should pass Differential Revision: D18145330 Pulled By: ezyang fbshipit-source-id: 7167a356b03ae783e6b0120f2ad3552db2b3ed86
diff --git a/caffe2/CMakeLists.txt b/caffe2/CMakeLists.txt index 5c88b75..d33a8c5 100644 --- a/caffe2/CMakeLists.txt +++ b/caffe2/CMakeLists.txt
@@ -481,37 +481,41 @@ if (NOT INTERN_BUILD_MOBILE) list(APPEND TORCH_SRCS ${TORCH_SRC_DIR}/csrc/api/src/jit.cpp - ${TORCH_SRC_DIR}/csrc/distributed/autograd/context/dist_autograd_container.cpp - ${TORCH_SRC_DIR}/csrc/distributed/autograd/context/dist_autograd_context.cpp - ${TORCH_SRC_DIR}/csrc/distributed/autograd/engine/dist_engine.cpp - ${TORCH_SRC_DIR}/csrc/distributed/autograd/functions/recvrpc_backward.cpp - ${TORCH_SRC_DIR}/csrc/distributed/autograd/functions/sendrpc_backward.cpp - ${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/autograd_metadata.cpp - ${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/propagate_gradients_req.cpp - ${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/propagate_gradients_resp.cpp - ${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/cleanup_autograd_context_req.cpp - ${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/cleanup_autograd_context_resp.cpp - ${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/rpc_with_autograd.cpp - ${TORCH_SRC_DIR}/csrc/distributed/autograd/utils.cpp - ${TORCH_SRC_DIR}/csrc/distributed/rpc/future_message.cpp - ${TORCH_SRC_DIR}/csrc/distributed/rpc/message.cpp - ${TORCH_SRC_DIR}/csrc/distributed/rpc/python_remote_call.cpp - ${TORCH_SRC_DIR}/csrc/distributed/rpc/python_udf_call.cpp - ${TORCH_SRC_DIR}/csrc/distributed/rpc/python_udf_resp.cpp - ${TORCH_SRC_DIR}/csrc/distributed/rpc/rpc_agent.cpp - ${TORCH_SRC_DIR}/csrc/distributed/rpc/request_callback.cpp - ${TORCH_SRC_DIR}/csrc/distributed/rpc/rref_proto.cpp - ${TORCH_SRC_DIR}/csrc/distributed/rpc/script_call.cpp - ${TORCH_SRC_DIR}/csrc/distributed/rpc/script_remote_call.cpp - ${TORCH_SRC_DIR}/csrc/distributed/rpc/script_resp.cpp - ${TORCH_SRC_DIR}/csrc/distributed/rpc/types.cpp - ${TORCH_SRC_DIR}/csrc/distributed/rpc/utils.cpp ${TORCH_SRC_DIR}/csrc/jit/export.cpp ${TORCH_SRC_DIR}/csrc/jit/import_legacy.cpp ${TORCH_SRC_DIR}/csrc/jit/netdef_converter.cpp ${TORCH_SRC_DIR}/csrc/jit/fuser/cpu/fused_kernel.cpp ${TORCH_SRC_DIR}/csrc/utils/byte_order.cpp ) + if (USE_DISTRIBUTED) + list(APPEND TORCH_SRCS + ${TORCH_SRC_DIR}/csrc/distributed/autograd/context/dist_autograd_container.cpp + ${TORCH_SRC_DIR}/csrc/distributed/autograd/context/dist_autograd_context.cpp + ${TORCH_SRC_DIR}/csrc/distributed/autograd/engine/dist_engine.cpp + ${TORCH_SRC_DIR}/csrc/distributed/autograd/functions/recvrpc_backward.cpp + ${TORCH_SRC_DIR}/csrc/distributed/autograd/functions/sendrpc_backward.cpp + ${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/autograd_metadata.cpp + ${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/propagate_gradients_req.cpp + ${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/propagate_gradients_resp.cpp + ${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/cleanup_autograd_context_req.cpp + ${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/cleanup_autograd_context_resp.cpp + ${TORCH_SRC_DIR}/csrc/distributed/autograd/rpc_messages/rpc_with_autograd.cpp + ${TORCH_SRC_DIR}/csrc/distributed/autograd/utils.cpp + ${TORCH_SRC_DIR}/csrc/distributed/rpc/future_message.cpp + ${TORCH_SRC_DIR}/csrc/distributed/rpc/message.cpp + ${TORCH_SRC_DIR}/csrc/distributed/rpc/python_remote_call.cpp + ${TORCH_SRC_DIR}/csrc/distributed/rpc/python_udf_call.cpp + ${TORCH_SRC_DIR}/csrc/distributed/rpc/python_udf_resp.cpp + ${TORCH_SRC_DIR}/csrc/distributed/rpc/rpc_agent.cpp + ${TORCH_SRC_DIR}/csrc/distributed/rpc/request_callback.cpp + ${TORCH_SRC_DIR}/csrc/distributed/rpc/rref_proto.cpp + ${TORCH_SRC_DIR}/csrc/distributed/rpc/script_call.cpp + ${TORCH_SRC_DIR}/csrc/distributed/rpc/script_remote_call.cpp + ${TORCH_SRC_DIR}/csrc/distributed/rpc/script_resp.cpp + ${TORCH_SRC_DIR}/csrc/distributed/rpc/types.cpp + ${TORCH_SRC_DIR}/csrc/distributed/rpc/utils.cpp + ) + endif() endif() if (USE_CUDA)
diff --git a/test/cpp/dist_autograd/CMakeLists.txt b/test/cpp/dist_autograd/CMakeLists.txt index 039a1df..d66d7af 100644 --- a/test/cpp/dist_autograd/CMakeLists.txt +++ b/test/cpp/dist_autograd/CMakeLists.txt
@@ -1,27 +1,29 @@ -set(DIST_AUTOGRAD_TEST_DIR "${TORCH_ROOT}/test/cpp/dist_autograd") -set(DIST_AUTOGRAD_TEST_SOURCES - ${TORCH_ROOT}/test/cpp/common/main.cpp - ${DIST_AUTOGRAD_TEST_DIR}/test_dist_autograd.cpp -) +if (USE_DISTRIBUTED) + set(DIST_AUTOGRAD_TEST_DIR "${TORCH_ROOT}/test/cpp/dist_autograd") + set(DIST_AUTOGRAD_TEST_SOURCES + ${TORCH_ROOT}/test/cpp/common/main.cpp + ${DIST_AUTOGRAD_TEST_DIR}/test_dist_autograd.cpp + ) -add_executable(test_dist_autograd ${DIST_AUTOGRAD_TEST_SOURCES}) -target_include_directories(test_dist_autograd PRIVATE ${ATen_CPU_INCLUDE}) -target_link_libraries(test_dist_autograd PRIVATE torch gtest) + add_executable(test_dist_autograd ${DIST_AUTOGRAD_TEST_SOURCES}) + target_include_directories(test_dist_autograd PRIVATE ${ATen_CPU_INCLUDE}) + target_link_libraries(test_dist_autograd PRIVATE torch gtest) -if (USE_CUDA) - target_link_libraries(test_dist_autograd PRIVATE - ${CUDA_LIBRARIES} - ${CUDA_NVRTC_LIB} - ${CUDA_CUDA_LIB} - ${TORCH_CUDA_LIBRARIES}) + if (USE_CUDA) + target_link_libraries(test_dist_autograd PRIVATE + ${CUDA_LIBRARIES} + ${CUDA_NVRTC_LIB} + ${CUDA_CUDA_LIB} + ${TORCH_CUDA_LIBRARIES}) - target_compile_definitions(test_dist_autograd PRIVATE "USE_CUDA") -endif() + target_compile_definitions(test_dist_autograd PRIVATE "USE_CUDA") + endif() -if (INSTALL_TEST) - install(TARGETS test_dist_autograd DESTINATION bin) - # Install PDB files for MSVC builds - if (MSVC AND BUILD_SHARED_LIBS) - install(FILES $<TARGET_PDB_FILE:test_dist_autograd> DESTINATION bin OPTIONAL) + if (INSTALL_TEST) + install(TARGETS test_dist_autograd DESTINATION bin) + # Install PDB files for MSVC builds + if (MSVC AND BUILD_SHARED_LIBS) + install(FILES $<TARGET_PDB_FILE:test_dist_autograd> DESTINATION bin OPTIONAL) + endif() endif() endif()
diff --git a/torch/csrc/autograd/functions/init.cpp b/torch/csrc/autograd/functions/init.cpp index 551160b..63b5a7c 100644 --- a/torch/csrc/autograd/functions/init.cpp +++ b/torch/csrc/autograd/functions/init.cpp
@@ -5,7 +5,9 @@ #include <torch/csrc/autograd/functions/tensor.h> #include <torch/csrc/autograd/generated/python_functions.h> #include <torch/csrc/autograd/python_cpp_function.h> +#ifdef USE_DISTRIBUTED #include <torch/csrc/distributed/autograd/functions/sendrpc_backward.h> +#endif #include <torch/csrc/jit/python_tracer.h> #include <torch/csrc/utils/pybind.h> #include <torch/csrc/utils/tuple_parser.h> @@ -103,9 +105,11 @@ static PyTypeObject CopyBackwardsClass; addClass<CopyBackwards, NoCtor>(module, CopyBackwardsClass, "CopyBackwards"); +#ifdef USE_DISTRIBUTED static PyTypeObject SendRpcBackwardClass; addClass<torch::distributed::autograd::SendRpcBackward, NoCtor>( module, SendRpcBackwardClass, "SendRpcBackward"); +#endif static PyTypeObject CopySlicesClass; addClass<CopySlices, NoCtor>(module, CopySlicesClass, "CopySlices");