From 0557b164164c84a48da1590aa49f62c4e938c28f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?BU=20Fanchen=20=E5=8D=9C=E5=87=A1=E8=BE=B0?= Date: Fri, 5 Apr 2024 10:22:33 +0900 Subject: [PATCH] Update attention.py A small linebreak typo that makes the doc look strange See https://rl4co.readthedocs.io/en/latest/_content/api/models/zoo.html#rl4co.models.zoo.ham.attention.HeterogenousMHA:~:text=possible%20(Mask%20_sphinx_paramlinks_rl4co.models.zoo.ham.attention.HeterogenousMHA.forward.should%20contain%201%20if%20attention%20is%20not)%20%E2%80%93 --- rl4co/models/zoo/ham/attention.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rl4co/models/zoo/ham/attention.py b/rl4co/models/zoo/ham/attention.py index 4636b88d..0c4d593e 100644 --- a/rl4co/models/zoo/ham/attention.py +++ b/rl4co/models/zoo/ham/attention.py @@ -56,7 +56,8 @@ def forward(self, q, h=None, mask=None): q: queries (batch_size, n_query, input_dim) h: data (batch_size, graph_size, input_dim) mask: mask (batch_size, n_query, graph_size) or viewable as that (i.e. can be 2 dim if n_query == 1) - Mask should contain 1 if attention is not possible (i.e. mask is negative adjacency) + + Mask should contain 1 if attention is not possible (i.e. mask is negative adjacency) """ if h is None: h = q # compute self-attention