Skip to content

Commit 300676a

Browse files
committed
stabilize fp16 training
1 parent eb3b606 commit 300676a

1 file changed

Lines changed: 2 additions & 2 deletions

File tree

modules/commons/common_layers.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -120,8 +120,8 @@ def forward(self, x):
120120
gate_min, gate_max = torch.aminmax(gate.detach())
121121
max_abs_out = torch.max(-out_min, out_max).float()
122122
max_abs_gate = torch.max(-gate_min, gate_max).float()
123-
if max_abs_out * max_abs_gate > 65504:
124-
return (out.float() * gate.float()).clamp(-65504, 65504).half()
123+
if max_abs_out * max_abs_gate > 1000:
124+
return (out.float() * gate.float()).clamp(-1000, 1000).half()
125125
return out * gate
126126

127127

0 commit comments

Comments
 (0)