From b7dfde82e914d73d1ca4d95e45291df7698167eb Mon Sep 17 00:00:00 2001 From: areiner222 Date: Tue, 25 Mar 2025 12:58:06 -0400 Subject: [PATCH] Update padding.py For tensors of dim==3, I believe the order of the padding is incorrect (this became evident when testing examples with EmebddingOps). I've updated the pad_diff to apply to the sequence axis. --- transformers4rec/torch/utils/padding.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/transformers4rec/torch/utils/padding.py b/transformers4rec/torch/utils/padding.py index b34333ad20..5d11dbfb38 100644 --- a/transformers4rec/torch/utils/padding.py +++ b/transformers4rec/torch/utils/padding.py @@ -25,7 +25,7 @@ def _pad_dense_tensor(t: torch.Tensor, length: int) -> torch.Tensor: return F.pad(input=t, pad=(0, pad_diff, 0, 0)) elif len(t.shape) == 3: pad_diff = length - t.shape[1] - return F.pad(input=t, pad=(0, pad_diff, 0, 0, 0, 0)) + return F.pad(input=t, (0, 0, 0, pad_diff, 0, 0)) else: return t