Skip to content

Commit

Permalink
Generate baseline data for gelu operator
Browse files Browse the repository at this point in the history
  • Loading branch information
BruceDai committed May 28, 2024
1 parent a7105c8 commit 991a9f0
Show file tree
Hide file tree
Showing 4 changed files with 1,092 additions and 1 deletion.
6 changes: 5 additions & 1 deletion test/tools/gen-operator-with-single-input.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,14 @@

import path from 'path';
import {softsign} from '../../src/softsign.js';
import {gelu} from '../../src/gelu.js';
import {Tensor} from '../../src/lib/tensor.js';
import {utils} from './utils.js';

(() => {
function computeBySingleInput(operatorName, input, options = {}) {
const operatorMappingDict = {
'gelu': gelu,
'softsign': softsign,
};
const inputTensor = new Tensor(input.shape, input.data);
Expand Down Expand Up @@ -55,7 +57,9 @@ import {utils} from './utils.js';
(typeof test.inputs[inputName].data === 'object' &&
typeof test.inputs[inputName].data[0] === 'number') ?
test.inputs[inputName].data :
toSaveDataDict['inputsData'][test.inputs[inputName].data];
utils.getPrecisionDataFromDataDict(
toSaveDataDict['inputsData'], test.inputs[inputName].data,
test.inputs[inputName].type);
}
// update weights (scale, bias, and etc.) data of options
if (test.options) {
Expand Down
210 changes: 210 additions & 0 deletions test/tools/resources/gelu.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,210 @@
{
"tests": [
{
"name": "gelu float32 0D scalar",
"inputs": {
"input": {
"shape": [],
"data": "float64DataScalar",
"type": "float32"
}
},
"expected": {
"name": "output",
"shape": [],
"data": "float32DataScalar",
"type": "float32"
}
},
{
"name": "gelu float16 0D scalar",
"inputs": {
"input": {
"shape": [],
"data": "float64DataScalar",
"type": "float16"
}
},
"expected": {
"name": "output",
"shape": [],
"data": "float16DataScalar",
"type": "float16"
}
},
{
"name": "gelu float32 1D tensor",
"inputs": {
"input": {
"shape": [24],
"data": "float64Data",
"type": "float32"
}
},
"expected": {
"name": "output",
"shape": [24],
"data": "float32Data",
"type": "float32"
}
},
{
"name": "gelu float16 1D tensor",
"inputs": {
"input": {
"shape": [24],
"data": "float64Data",
"type": "float16"
}
},
"expected": {
"name": "output",
"shape": [24],
"data": "float16Data",
"type": "float16"
}
},
{
"name": "gelu float32 2D tensor",
"inputs": {
"input": {
"shape": [4, 6],
"data": "float64Data",
"type": "float32"
}
},
"expected": {
"name": "output",
"shape": [4, 6],
"data": "float32Data",
"type": "float32"
}
},
{
"name": "gelu float16 2D tensor",
"inputs": {
"input": {
"shape": [4, 6],
"data": "float64Data",
"type": "float16"
}
},
"expected": {
"name": "output",
"shape": [4, 6],
"data": "float16Data",
"type": "float16"
}
},
{
"name": "gelu float32 3D tensor",
"inputs": {
"input": {
"shape": [2, 3, 4],
"data": "float64Data",
"type": "float32"
}
},
"expected": {
"name": "output",
"shape": [2, 3, 4],
"data": "float32Data",
"type": "float32"
}
},
{
"name": "gelu float16 3D tensor",
"inputs": {
"input": {
"shape": [2, 3, 4],
"data": "float64Data",
"type": "float16"
}
},
"expected": {
"name": "output",
"shape": [2, 3, 4],
"data": "float16Data",
"type": "float16"
}
},
{
"name": "gelu float32 4D tensor",
"inputs": {
"input": {
"shape": [2, 2, 2, 3],
"data": "float64Data",
"type": "float32"
}
},
"expected": {
"name": "output",
"shape": [2, 2, 2, 3],
"data": "float32Data",
"type": "float32"
}
},
{
"name": "gelu float16 4D tensor",
"inputs": {
"input": {
"shape": [2, 2, 2, 3],
"data": "float64Data",
"type": "float16"
}
},
"expected": {
"name": "output",
"shape": [2, 2, 2, 3],
"data": "float16Data",
"type": "float16"
}
},
{
"name": "gelu float32 5D tensor",
"inputs": {
"input": {
"shape": [2, 1, 4, 1, 3],
"data": "float64Data",
"type": "float32"
}
},
"expected": {
"name": "output",
"shape": [2, 1, 4, 1, 3],
"data": "float32Data",
"type": "float32"
}
},
{
"name": "gelu float16 5D tensor",
"inputs": {
"input": {
"shape": [2, 1, 4, 1, 3],
"data": "float64Data",
"type": "float16"
}
},
"expected": {
"name": "output",
"shape": [2, 1, 4, 1, 3],
"data": "float16Data",
"type": "float16"
}
}
],
"inputsData": {
"float64DataScalar": {
"shape": [1],
"type": "float64"
},
"float64Data": {
"shape": [24],
"type": "float64"
}
},
"inputsDataRange": {
"max": 1,
"min": -1
}
}
Loading

0 comments on commit 991a9f0

Please sign in to comment.