Skip to content

Commit

Permalink
fix concat problem when uttr is on cuda device
Browse files Browse the repository at this point in the history
  • Loading branch information
yistLin committed May 20, 2021
1 parent 876394a commit a7ac334
Showing 1 changed file with 2 additions and 3 deletions.
5 changes: 2 additions & 3 deletions modules/dvector.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,9 +55,8 @@ def embed_utterance(self, utterance: Tensor) -> Tensor:
# Pad to multiple of hop length
hop_len = self.seg_len // 2
tgt_len = math.ceil(utterance.size(0) / hop_len) * hop_len
padded = torch.cat(
[utterance, torch.zeros(tgt_len - utterance.size(0), utterance.size(1))]
)
zero_padding = torch.zeros(tgt_len - utterance.size(0), utterance.size(1))
padded = torch.cat([utterance, zero_padding.to(utterance.device)])

segments = padded.unfold(0, self.seg_len, self.seg_len // 2)
segments = segments.transpose(1, 2) # (batch, seg_len, mel_dim)
Expand Down

0 comments on commit a7ac334

Please sign in to comment.