Skip to content

Commit

Permalink
🔧 Refactor heatmap activation and improve numerical stability in Soft…
Browse files Browse the repository at this point in the history
…maxND
  • Loading branch information
jejon committed Nov 22, 2024
1 parent c275b1c commit b0487de
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 2 deletions.
1 change: 0 additions & 1 deletion src/landmarker/heatmap/decoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,6 @@ def _activate_norm_heatmap(
if activation is not None:
if activation == "softmax":
heatmap = torch.exp(t * heatmap)
heatmap = heatmap / torch.sum(heatmap, dim=dim, keepdim=True)
elif activation == "sigmoid":
heatmap = torch.sigmoid(heatmap)
elif activation == "ReLU":
Expand Down
2 changes: 1 addition & 1 deletion src/landmarker/models/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def __init__(self, spatial_dims):
self.dim = (-2, -1) if spatial_dims == 2 else (-3, -2, -2)

def forward(self, x):
out = torch.exp(x)
out = torch.exp(x - torch.max(x, dim=self.dim, keepdim=True)[0])
return out / torch.sum(out, dim=self.dim, keepdim=True)


Expand Down

0 comments on commit b0487de

Please sign in to comment.