From c7e0101eba4c815d65b122d0f722c63c9fe1ce73 Mon Sep 17 00:00:00 2001 From: Richard Barnes Date: Wed, 23 Oct 2024 13:27:31 -0700 Subject: [PATCH] c10::nullopt -> std::nullopt (#1032) Summary: X-link: https://github.com/pytorch/torchrec/pull/2515 X-link: https://github.com/pytorch/executorch/pull/6461 X-link: https://github.com/pytorch/audio/pull/3848 X-link: https://github.com/pytorch/ao/pull/1151 Pull Request resolved: https://github.com/facebookincubator/AITemplate/pull/1032 Reviewed By: houseroad Differential Revision: D64835967 fbshipit-source-id: 9f9f65335aaf5497680561027ef9314e4b36f8d9 --- fx2ait/fx2ait/csrc/AITModelImpl.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/fx2ait/fx2ait/csrc/AITModelImpl.cpp b/fx2ait/fx2ait/csrc/AITModelImpl.cpp index 8530ae706..e2ebd4c70 100644 --- a/fx2ait/fx2ait/csrc/AITModelImpl.cpp +++ b/fx2ait/fx2ait/csrc/AITModelImpl.cpp @@ -366,7 +366,7 @@ std::vector AITModelImpl::processOutputs( output.unsafeGetTensorImpl()->set_sizes_contiguous(size); } - if (floating_point_output_dtype_ != c10::nullopt && + if (floating_point_output_dtype_ != std::nullopt && output.is_floating_point()) { outputs.emplace_back(output.to(*floating_point_output_dtype_)); } else { @@ -394,7 +394,7 @@ std::vector AITModelImpl::processInputs( auto input_name = input_names_[python_input_idx]; const auto ait_input_idx = input_name_to_index_.at(input_name); auto& input = inputs[python_input_idx]; - if (floating_point_input_dtype_ != c10::nullopt && + if (floating_point_input_dtype_ != std::nullopt && input.is_floating_point()) { // Need to keep input alive; cannot just stash result of to() // call in a local!