test(core): fix FeedForward and MultiHeadAttention tests for unified interface and tuple outputs

This commit is contained in:
Sergey Penkovsky
2025-10-05 19:26:18 +03:00
parent c39e68d71a
commit 3843e64098
3 changed files with 14 additions and 14 deletions

View File

@@ -98,7 +98,7 @@ def test_multi_head_attention():
batch_size, seq_len = 2, 16
inputs = torch.randn(batch_size, seq_len, emb_size)
output = attention(inputs)
output, _ = attention(inputs)
assert output.shape == inputs.shape
print("✅ Multi-head attention test passed")