diff --git a/js/web/lib/wasm/jsep/webgpu/ops/fuse-utils.ts b/js/web/lib/wasm/jsep/webgpu/ops/fuse-utils.ts index 6e66abacf347..cfa0b42ef9ee 100644 --- a/js/web/lib/wasm/jsep/webgpu/ops/fuse-utils.ts +++ b/js/web/lib/wasm/jsep/webgpu/ops/fuse-utils.ts @@ -30,6 +30,10 @@ export const getActivationSnippet = baseType}(uniforms.beta)));`; case 'LeakyRelu': return `value = select(${baseType}(uniforms.alpha) * value, value, value >= ${valueType}(0.0));`; + case 'Tanh': + return `let e2x = exp(-2.0 * abs(value)); + value = sign(value) * (1.0 - e2x) / (1.0 + e2x); + `; case '': return ''; // TODO: adding other activations that can be fused. diff --git a/js/web/test/data/ops/fused-conv.jsonc b/js/web/test/data/ops/fused-conv.jsonc index 6a10e3b96a26..d88c91ebc9de 100644 --- a/js/web/test/data/ops/fused-conv.jsonc +++ b/js/web/test/data/ops/fused-conv.jsonc @@ -430,5 +430,38 @@ ] } ] + }, + { + "name": "fused conv with tanh", + "operator": "FusedConv", + "attributes": [ + { "name": "activation", "data": "Tanh", "type": "string" }, + { "name": "kernel_shape", "data": [2, 2], "type": "ints" } + ], + "opset": { "domain": "com.microsoft", "version": 1 }, + "cases": [ + { + "name": "T[0]", + "inputs": [ + { + "data": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9], + "dims": [1, 1, 3, 3], + "type": "float32" + }, + { + "data": [0.11, 0.12, 0.13, 0.14], + "dims": [1, 1, 2, 2], + "type": "float32" + } + ], + "outputs": [ + { + "data": [0.15572261810302734, 0.20409323275089264, 0.29770541191101074, 0.3425688147544861], + "dims": [1, 1, 2, 2], + "type": "float32" + } + ] + } + ] } ]