Skip to content

Commit 6bfb695

Browse files
committed
Change activation_fn default
1 parent 485dd34 commit 6bfb695

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

sharktank/sharktank/layers/ffn_block.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,12 @@ def __init__(
2424
self,
2525
theta: Theta,
2626
is_gated: bool = True,
27-
activation_fn: Optional[Callable[[AnyTensor], AnyTensor]] = None,
27+
activation_fn: Callable[[AnyTensor], AnyTensor] = F.silu,
2828
):
2929
super().__init__(theta)
3030

3131
self.is_gated = is_gated
32-
self.activation_fn = activation_fn or F.silu
32+
self.activation_fn = activation_fn
3333
if self.is_gated:
3434
self.add_module("ffn_gate", LinearLayer(theta("ffn_gate")))
3535
self.add_module("ffn_up", LinearLayer(theta("ffn_up")))

0 commit comments

Comments
 (0)