We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 555b295 commit 1053e82Copy full SHA for 1053e82
src/tilegym/ops/activation.py
@@ -24,7 +24,7 @@ def relu(x: torch.Tensor) -> torch.Tensor:
24
raise NotImplementedError(f"relu is not implemented for {get_current_backend()}")
25
26
27
-@dispatch("gelu", fallback_backend="triton")
+@dispatch("gelu")
28
def gelu(x: torch.Tensor, approximate: str = "none") -> torch.Tensor:
29
"""
30
Applies the Gaussian Error Linear Unit function element-wise.
0 commit comments