forked from microsoft/onnxjs
-
Notifications
You must be signed in to change notification settings - Fork 0
/
index.js
25 lines (20 loc) · 1020 Bytes
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
require('onnxjs');
const assert = require('assert');
async function main() {
// Create an ONNX inference session with WebAssembly backend.
const session = new onnx.InferenceSession({backendHint: 'wasm'});
// Load an ONNX model. This model is Resnet50 that takes a 1*3*224*224 image and classifies it.
await session.loadModel("./add.onnx");
const x = new Float32Array(3 * 4 * 5).fill(1);
const y = new Float32Array(3 * 4 * 5).fill(2);
const tensorX = new onnx.Tensor(x, 'float32', [3, 4, 5]);
const tensorY = new onnx.Tensor(y, 'float32', [3, 4, 5]);
// Run model with Tensor inputs and get the result by output name defined in model.
const outputMap = await session.run([tensorX, tensorY]);
const outputData = outputMap.get('sum');
// Check if result is expected.
assert.deepEqual(outputData.dims, [3, 4, 5]);
assert(outputData.data.every((value) => value === 3));
console.log(`Got an Tensor of size ${outputData.data.length} with all elements being ${outputData.data[0]}`);
}
main();