From 874c04a39cf686d25514bfe9a40fb89afc627ea8 Mon Sep 17 00:00:00 2001 From: Joshua Bell Date: Wed, 20 Mar 2024 15:27:08 -0700 Subject: [PATCH] Bugfix: Drop "re-throw" in MLActivation creation steps The "create an MLActivation" steps don't throw, so there's no need to re-throw. Discussed in https://github.com/webmachinelearning/webnn/pull/591#issuecomment-1970013201 --- index.bs | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/index.bs b/index.bs index d503d4fd..8ebcb5e7 100644 --- a/index.bs +++ b/index.bs @@ -1721,7 +1721,6 @@ partial interface MLGraphBuilder { 1. If [=checking clamp options=] given |options| returns false, then [=exception/throw=] a {{TypeError}}. 1. Let |op| be the result of [=creating an MLActivation=] given [=this=], "clamp" and |options|. - 1. If that [=exception/throws=] an error, re-[=exception/throw=] the error. 1. Return |op|. @@ -3373,7 +3372,6 @@ partial interface MLGraphBuilder { The hardSigmoid(|options|) method steps are: 1. Let |op| be the result of [=creating an MLActivation=] given [=this=], "hardSigmoid" and |options|. - 1. If that [=exception/throws=] an error, re-[=exception/throw=] the error. 1. Return |op|. @@ -3445,7 +3443,6 @@ partial interface MLGraphBuilder { The hardSwish() method steps are: 1. Let |op| be the result of [=creating an MLActivation=] given [=this=] and "hardSwish". - 1. If that [=exception/throws=] an error, re-[=exception/throw=] the error. 1. Return |op|. @@ -3726,7 +3723,6 @@ partial interface MLGraphBuilder { The leakyRelu(|options|) method steps are: 1. Let |op| be the result of [=creating an MLActivation=] given [=this=], "leakyRelu" and |options|. - 1. If that [=exception/throws=] an error, re-[=exception/throw=] the error. 1. Return |op|. @@ -3809,7 +3805,6 @@ partial interface MLGraphBuilder { The linear(|options|) method steps are: 1. Let |op| be the result of [=creating an MLActivation=] given [=this=], "linear" and |options|. - 1. If that [=exception/throws=] an error, re-[=exception/throw=] the error. 1. Return |op|. @@ -4922,7 +4917,6 @@ partial interface MLGraphBuilder { The relu() method steps are: 1. Let |op| be the result of [=creating an MLActivation=] given [=this=] and "relu". - 1. If that [=exception/throws=] an error, re-[=exception/throw=] the error. 1. Return |op|. @@ -5158,7 +5152,6 @@ partial interface MLGraphBuilder { The sigmoid() method steps are: 1. Let |op| be the result of [=creating an MLActivation=] given [=this=] and "sigmoid". - 1. If that [=exception/throws=] an error, re-[=exception/throw=] the error. 1. Return |op|. @@ -5265,7 +5258,6 @@ partial interface MLGraphBuilder { The softmax() method steps are: 1. Let |op| be the result of [=creating an MLActivation=] given [=this=] and "softmax". - 1. If that [=exception/throws=] an error, re-[=exception/throw=] the error. 1. Return |op|. @@ -5346,7 +5338,6 @@ partial interface MLGraphBuilder { The softplus(|options|) method steps are: 1. Let |op| be the result of [=creating an MLActivation=] given [=this=], "softplus" and |options|. - 1. If that [=exception/throws=] an error, re-[=exception/throw=] the error. 1. Return |op|. @@ -5410,7 +5401,6 @@ partial interface MLGraphBuilder { The softsign() method steps are: 1. Let |op| be the result of [=creating an MLActivation=] given [=this=] and "softsign". - 1. If that [=exception/throws=] an error, re-[=exception/throw=] the error. 1. Return |op|. @@ -5558,7 +5548,6 @@ partial interface MLGraphBuilder { The tanh() method steps are: 1. Let |op| be the result of [=creating an MLActivation=] given [=this=] and "tanh". - 1. If that [=exception/throws=] an error, re-[=exception/throw=] the error. 1. Return |op|.