Add variable_factories.h to cppdocs (#14381)
Summary:
This will document `torch::from_blob` and such.
soumith ezyang
Pull Request resolved: https://github.com/pytorch/pytorch/pull/14381
Differential Revision: D13216560
Pulled By: goldsborough
fbshipit-source-id: 112f60e45e4d38a8a9983fa71e9cc56bc1a73465
diff --git a/.travis.yml b/.travis.yml
index cb9f828..e4c9bc5 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -27,7 +27,10 @@
install: pip install mypy mypy-extensions
script: mypy @mypy-files.txt
- env: CPP_DOC_CHECK
- install: sudo apt-get install -y doxygen
+ python: "3.6"
+ install:
+ - sudo apt-get install -y doxygen
+ - pip install -r requirements.txt
script: cd docs/cpp/source && ./check-doxygen.sh
- env: CLANG_TIDY
python: "3.6"
diff --git a/docs/cpp/source/Doxyfile b/docs/cpp/source/Doxyfile
index 8cc9073..7c5153a 100644
--- a/docs/cpp/source/Doxyfile
+++ b/docs/cpp/source/Doxyfile
@@ -29,36 +29,37 @@
# {repo_root}/docs/cpp/source/../../.. -> {repo_root}
STRIP_FROM_PATH = ../../..
# What folders / files Doxygen should process.
-INPUT = ../../../torch/csrc/api/include \
- ../../../torch/csrc/api/src \
- ../../../torch/csrc/jit/custom_operator.h \
- ../../../torch/csrc/jit/import.h \
- ../../../torch/csrc/jit/ivalue.h \
- ../../../torch/csrc/jit/script/module.h \
- ../../../aten/src/ATen/ATen.h \
+INPUT = ../../../aten/src/ATen/ATen.h \
../../../aten/src/ATen/Backend.h \
- ../../../aten/src/ATen/DeviceGuard.h \
- ../../../aten/src/ATen/Layout.h \
- ../../../aten/src/ATen/OptionsGuard.h \
- ../../../aten/src/ATen/Scalar.h \
- ../../../aten/src/ATen/TensorOptions.h \
../../../aten/src/ATen/core/Half.h \
../../../aten/src/ATen/core/ScalarType.h \
../../../aten/src/ATen/core/Tensor.h \
+ ../../../aten/src/ATen/cuda/CUDAContext.h \
../../../aten/src/ATen/cuda/CUDAGuard.h \
../../../aten/src/ATen/cuda/CUDAStream.h \
- ../../../aten/src/ATen/cuda/CUDAContext.h \
../../../aten/src/ATen/cudnn/Descriptors.h \
../../../aten/src/ATen/cudnn/Handles.h \
../../../aten/src/ATen/cudnn/Types.h \
../../../aten/src/ATen/cudnn/Utils.h \
+ ../../../aten/src/ATen/DeviceGuard.h \
+ ../../../aten/src/ATen/Layout.h \
../../../aten/src/ATen/mkl/Descriptors.h \
- ../../../c10/util/Optional.h \
- ../../../c10/util/Exception.h \
- ../../../c10/util/ArrayRef.h \
+ ../../../aten/src/ATen/OptionsGuard.h \
+ ../../../aten/src/ATen/Scalar.h \
+ ../../../aten/src/ATen/TensorOptions.h \
+ ../../../build/aten/src/ATen/Functions.h \
../../../c10/Device.h \
../../../c10/DeviceType.h \
- ../../../build/aten/src/ATen/Functions.h
+ ../../../c10/util/ArrayRef.h \
+ ../../../c10/util/Exception.h \
+ ../../../c10/util/Optional.h \
+ ../../../torch/csrc/api/include \
+ ../../../torch/csrc/api/src \
+ ../../../torch/csrc/autograd/generated/variable_factories.h \
+ ../../../torch/csrc/jit/custom_operator.h \
+ ../../../torch/csrc/jit/import.h \
+ ../../../torch/csrc/jit/ivalue.h \
+ ../../../torch/csrc/jit/script/module.h
# Don't include .cpp files!
FILE_PATTERNS = *.h
# If you need this to be YES, exhale will probably break.
diff --git a/docs/cpp/source/check-doxygen.sh b/docs/cpp/source/check-doxygen.sh
index fcc7c08..18863e2 100755
--- a/docs/cpp/source/check-doxygen.sh
+++ b/docs/cpp/source/check-doxygen.sh
@@ -10,6 +10,26 @@
mv temp.txt doxygen-log.txt
}
+pushd "$(dirname "$0")/../../.."
+
+cp aten/src/ATen/common_with_cwrap.py tools/shared/cwrap_common.py
+cp torch/_utils_internal.py tools/shared
+
+python aten/src/ATen/gen.py \
+ -s aten/src/ATen \
+ -d build/aten/src/ATen \
+ aten/src/ATen/Declarations.cwrap \
+ aten/src/THNN/generic/THNN.h \
+ aten/src/THCUNN/generic/THCUNN.h \
+ aten/src/ATen/nn.yaml \
+ aten/src/ATen/native/native_functions.yaml
+
+python tools/setup_helpers/generate_code.py \
+ --declarations-path build/aten/src/ATen/Declarations.yaml \
+ --nn-path aten/src
+
+popd
+
# Run doxygen and log all output.
doxygen 2> original-doxygen-log.txt
cp original-doxygen-log.txt doxygen-log.txt
@@ -19,7 +39,6 @@
# Filter out some warnings.
ignore_warning "warning: no uniquely matching class member found for"
-ignore_warning "warning:.*\.\./\.\./\.\./build/aten.*"
# Count the number of remaining warnings.
warnings="$(grep 'warning:' doxygen-log.txt | wc -l)"
diff --git a/docs/cpp/source/notes/tensor_basics.rst b/docs/cpp/source/notes/tensor_basics.rst
index 19ab8f3..73f6a06 100644
--- a/docs/cpp/source/notes/tensor_basics.rst
+++ b/docs/cpp/source/notes/tensor_basics.rst
@@ -107,6 +107,7 @@
which reduce the dimensions of a ``Tensor``.
.. code-block:: cpp
+
torch::Tensor two = torch::rand({10, 20});
two[1][2] = 4;
// ^^^^^^ <- zero-dimensional Tensor
diff --git a/tools/autograd/templates/variable_factories.h b/tools/autograd/templates/variable_factories.h
index 5f5a89a..59e4168 100644
--- a/tools/autograd/templates/variable_factories.h
+++ b/tools/autograd/templates/variable_factories.h
@@ -39,22 +39,37 @@
AT_FORALL_SCALAR_TYPES_EXCEPT_HALF(TENSOR)
#undef TENSOR
+/// A generic deleter function.
+using Deleter = std::function<void(void*)>;
+
+/// Exposes the given `data` as a `Tensor` without taking ownership of the
+/// original data. `sizes` should specify the shape of the tensor, `strides` the
+/// stride in each dimension. The `deleter` function (a
+/// `std::function<void(void*)>`) will be called on the `data` when the Tensor
+/// data would normally be deallocated. The `TensorOptions` specify additional
+/// configuration options for the returned tensor, such as what type to
+/// interpret the `data` as.
inline at::Tensor from_blob(
void* data,
at::IntList sizes,
at::IntList strides,
- const std::function<void(void*)>& deleter,
- const at::TensorOptions& options = {}) {
+ const Deleter& deleter,
+ const at::TensorOptions& options = at::TensorOptions()) {
at::Tensor tensor =
at::from_blob(data, sizes, strides, deleter, options.is_variable(false));
return autograd::make_variable(tensor, options.requires_grad());
}
+/// Exposes the given `data` as a `Tensor` without taking ownership of the
+/// original data. `sizes` should specify the shape of the tensor, `strides` the
+/// stride in each dimension. The `TensorOptions`
+/// specify additional configuration options for the returned tensor, such as
+/// what type to interpret the `data` as.
inline at::Tensor from_blob(
void* data,
at::IntList sizes,
at::IntList strides,
- const at::TensorOptions& options = {}) {
+ const at::TensorOptions& options = at::TensorOptions()) {
return torch::from_blob(
data,
sizes,
@@ -63,20 +78,30 @@
options);
}
+/// Exposes the given `data` as a `Tensor` without taking ownership of the
+/// original data. `sizes` should specify the shape of the tensor. The `deleter`
+/// (a `std::function<void(void*)>`) function will be called on the `data` when
+/// the Tensor data would normally be deallocated. The `TensorOptions` specify
+/// additional configuration options for the returned tensor, such as what type
+/// to interpret the `data` as.
inline at::Tensor from_blob(
void* data,
at::IntList sizes,
- const std::function<void(void*)>& deleter,
- const at::TensorOptions& options = {}) {
+ const Deleter& deleter,
+ const at::TensorOptions& options = at::TensorOptions()) {
at::Tensor tensor =
at::from_blob(data, sizes, deleter, options.is_variable(false));
return autograd::make_variable(tensor, options.requires_grad());
}
+/// Exposes the given `data` as a `Tensor` without taking ownership of the
+/// original data. `sizes` should specify the shape of the tensor. The
+/// `TensorOptions` specify additional configuration options for the returned
+/// tensor, such as what type to interpret the `data` as.
inline at::Tensor from_blob(
void* data,
at::IntList sizes,
- const at::TensorOptions& options = {}) {
+ const at::TensorOptions& options = at::TensorOptions()) {
return torch::from_blob(data, sizes, /*deleter=*/[](void*) {}, options);
}