[PyTorch] StorageImpl: cache size_bytes.is_symbolic() (#85309)
We've got 6 bools' worth of extra space, so let's try caching this.
Differential Revision: [D39636570](https://our.internmc.facebook.com/intern/diff/D39636570/)
**NOTE FOR REVIEWERS**: This PR has internal Meta-specific changes or comments, please review them on [Phabricator](https://our.internmc.facebook.com/intern/diff/D39636570/)!
Pull Request resolved: https://github.com/pytorch/pytorch/pull/85309
Approved by: https://github.com/ezyang
diff --git a/c10/core/StorageImpl.h b/c10/core/StorageImpl.h
index 6ea15d1..bbf0803 100644
--- a/c10/core/StorageImpl.h
+++ b/c10/core/StorageImpl.h
@@ -42,7 +42,8 @@
at::Allocator* allocator,
bool resizable)
: data_ptr_(std::move(data_ptr)),
- size_bytes_(size_bytes),
+ size_bytes_(std::move(size_bytes)),
+ size_bytes_is_symbolic_(size_bytes_.is_symbolic()),
resizable_(resizable),
received_cuda_(false),
allocator_(allocator) {
@@ -76,6 +77,7 @@
void reset() {
data_ptr_.clear();
size_bytes_ = 0;
+ size_bytes_is_symbolic_ = false;
}
template <typename T>
@@ -95,7 +97,8 @@
}
size_t nbytes() const {
- return size_bytes_.expect_int();
+ TORCH_CHECK(!size_bytes_is_symbolic_);
+ return size_bytes_.as_int_unchecked();
}
SymInt sym_nbytes() const {
@@ -105,6 +108,7 @@
// TODO: remove later
void set_nbytes(size_t size_bytes) {
size_bytes_ = size_bytes;
+ size_bytes_is_symbolic_ = false;
}
void set_nbytes(c10::SymInt size_bytes) {
@@ -194,6 +198,7 @@
size_t size_bytes) {
data_ptr_ = std::move(data_ptr);
size_bytes_ = size_bytes;
+ size_bytes_is_symbolic_ = false;
allocator_ = nullptr;
resizable_ = false;
}
@@ -211,6 +216,7 @@
private:
DataPtr data_ptr_;
SymInt size_bytes_;
+ bool size_bytes_is_symbolic_;
bool resizable_;
// Identifies that Storage was received from another process and doesn't have
// local to process cuda memory allocation