Skip to content
This repository was archived by the owner on Apr 28, 2023. It is now read-only.

Commit 30995a2

Browse files
Merge pull request #115 from facebookresearch/emit-inifnities
[Hotfix] emit macros for inf/inff
2 parents 02fe737 + 1981248 commit 30995a2

File tree

5 files changed

+10
-5
lines changed

5 files changed

+10
-5
lines changed

docs/source/framework/pytorch_integration/layers_database.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@ Softmax
139139
.. code::
140140
141141
def softmax(float(N, D) I) -> (O, maxVal, expDistance, expSum) {
142-
maxVal(n) max= I(n, d)
142+
maxVal(n) max=! I(n, d)
143143
expDistance(n, d) = exp(I(n, d) - maxVal(n))
144144
expSum(n) +=! expDistance(n, d)
145145
O(n, d) = expDistance(n, d) / expSum(n)

include/tc/core/libraries.h

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,11 @@ typedef int int32;
3636
typedef long int64;
3737
typedef float float32;
3838
typedef double float64;
39+
)C";
3940

41+
constexpr auto defines = R"C(
42+
#define inff __int_as_float(0x7f800000)
43+
#define inf __longlong_as_double(0x7ff0000000000000LL)
4044
)C";
4145

4246
constexpr auto mathFunctionDecl = R"C(

src/core/polyhedral/mapped_scop.cc

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -564,7 +564,8 @@ std::tuple<std::string, tc::Grid, tc::Block> MappedScop::codegen(
564564
auto mappedScopForCodegen = makeSpecializedMappedScop(*this);
565565

566566
std::stringstream code;
567-
code << code::cpp::boundsAsTemplate << code::c::types << std::endl;
567+
code << code::cpp::boundsAsTemplate << code::c::types << code::c::defines
568+
<< std::endl;
568569
if (mappedScopForCodegen->scop().treeSyncUpdateMap.size() != 0) {
569570
code << code::cuda::common;
570571
code << code::cuda::cubBlockReduce;

test/test_execution_engine.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ TEST_F(ATenCompilationUnitTest, DISABLED_SoftmaxB) {
8787
outputs);
8888
}
8989

90-
TEST_F(ATenCompilationUnitTest, DISABLED_SoftmaxC) {
90+
TEST_F(ATenCompilationUnitTest, SoftmaxC) {
9191
at::Tensor a = at::CUDA(at::kFloat).rand({32, 16});
9292
std::vector<at::Tensor> inputs = {a};
9393
std::vector<at::Tensor> outputs;
@@ -114,7 +114,7 @@ TEST_F(ATenCompilationUnitTest, SoftmaxD) {
114114
Check(
115115
R"(
116116
def softmax(float(N, D) I) -> (O, maxVal, expDistance, expSum) {
117-
maxVal(n) max= I(n, d)
117+
maxVal(n) max=! I(n, d)
118118
expDistance(n, d) = exp(I(n, d) - maxVal(n))
119119
expSum(n) +=! expDistance(n, d)
120120
O(n, d) = expDistance(n, d) / expSum(n)

test_python/layers/test_softmax.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ class TestSoftmax(unittest.TestCase):
2727
def test_softmax(self):
2828
LANG = """
2929
def softmax(float(N, D) I) -> (O, maxVal, expDistance, expSum) {
30-
maxVal(n) max= I(n, d)
30+
maxVal(n) max=! I(n, d)
3131
expDistance(n, d) = exp(I(n, d) - maxVal(n))
3232
expSum(n) +=! expDistance(n, d)
3333
O(n, d) = expDistance(n, d) / expSum(n)

0 commit comments

Comments
 (0)