From 13d1d7c6cf067f812f9c0940dda971e8f5e522ef Mon Sep 17 00:00:00 2001 From: Deep Patel <35742688+deepsworld@users.noreply.github.com> Date: Thu, 2 Mar 2023 17:01:50 -0500 Subject: [PATCH] Fix hardcoded heads in `BoxMultiHeadedAttention` --- models/RelationTransformerModel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models/RelationTransformerModel.py b/models/RelationTransformerModel.py index 189d3ab..0d70c52 100644 --- a/models/RelationTransformerModel.py +++ b/models/RelationTransformerModel.py @@ -267,7 +267,7 @@ def __init__(self, h, d_model, trignometric_embedding=True, legacy_extra_skip=Fa #matrices W_q, W_k, W_v, and one last projection layer self.linears = clones(nn.Linear(d_model, d_model), 4) - self.WGs = clones(nn.Linear(geo_feature_dim, 1, bias=True),8) + self.WGs = clones(nn.Linear(geo_feature_dim, 1, bias=True), h) self.attn = None self.dropout = nn.Dropout(p=dropout)