Skip to content

Commit

Permalink
#3645: fix moreh_softmax, moreh_softmin comment
Browse files Browse the repository at this point in the history
  • Loading branch information
hschoi4448 committed Jan 25, 2024
1 parent 2945865 commit a7545af
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions tt_eager/tt_lib/csrc/operations/primary/module.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -489,7 +489,7 @@ void py_module(py::module& m_primary) {
py::arg("dim").noconvert(),
py::arg("strategy").noconvert() = MorehSoftmaxOpParallelizationStrategy::NONE,
py::arg("output_mem_config").noconvert() = operation::DEFAULT_OUTPUT_MEMORY_CONFIG,
"Performs a softmax operation. Returns a output tensor.");
"Performs a softmax operation. Returns an output tensor.");
m_primary.def(
"moreh_softmax_backward",
&moreh_softmax_backward,
Expand All @@ -498,15 +498,15 @@ void py_module(py::module& m_primary) {
py::arg("dim").noconvert(),
py::arg("strategy").noconvert() = MorehSoftmaxBackwardOpParallelizationStrategy::NONE,
py::arg("output_mem_config").noconvert() = operation::DEFAULT_OUTPUT_MEMORY_CONFIG,
"Performs a softmax backward operation. Returns a input grad tensor.");
"Performs a softmax backward operation. Returns an input grad tensor.");
m_primary.def(
"moreh_softmin",
&moreh_softmin,
py::arg("input_tensors").noconvert(),
py::arg("dim").noconvert(),
py::arg("strategy").noconvert() = MorehSoftmaxOpParallelizationStrategy::NONE,
py::arg("output_mem_config").noconvert() = operation::DEFAULT_OUTPUT_MEMORY_CONFIG,
"Performs a softmax operation. Returns a output tensor.");
"Performs a softmin operation. Returns an output tensor.");
m_primary.def(
"moreh_softmin_backward",
&moreh_softmin_backward,
Expand All @@ -515,7 +515,7 @@ void py_module(py::module& m_primary) {
py::arg("dim").noconvert(),
py::arg("strategy").noconvert() = MorehSoftmaxBackwardOpParallelizationStrategy::NONE,
py::arg("output_mem_config").noconvert() = operation::DEFAULT_OUTPUT_MEMORY_CONFIG,
"Performs a softmin backward operation. Returns a input grad tensor.");
"Performs a softmin backward operation. Returns an input grad tensor.");
m_primary.def(
"moreh_sum",
&moreh_sum,
Expand Down

0 comments on commit a7545af

Please sign in to comment.