Skip to content

Commit

Permalink
Revert "Merge pull request #747 from AIStream-Peelout/crossformer_fixes"
Browse files Browse the repository at this point in the history
This reverts commit 224b67c, reversing
changes made to 9074dc0.
  • Loading branch information
isaacmg committed May 7, 2024
1 parent 224b67c commit 2dd6d0d
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 7 deletions.
6 changes: 1 addition & 5 deletions flood_forecast/transformer_xl/cross_former.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ def __init__(
e_layers=3,
dropout=0.0,
baseline=False,
n_targs=None,
device=torch.device("cuda:0"),
):
"""Crossformer: Transformer Utilizing Cross-Dimension Dependency for Multivariate Time Series Forecasting.
Expand Down Expand Up @@ -58,7 +57,6 @@ def __init__(
self.out_len = forecast_length
self.seg_len = seg_len
self.merge_win = win_size
self.n_targs = n_time_series if n_targs is None else n_targs

self.baseline = baseline

Expand Down Expand Up @@ -128,9 +126,7 @@ def forward(self, x_seq: torch.Tensor):
)
predict_y = self.decoder(dec_in, enc_out)

result = base + predict_y[:, : self.out_len, :]
res = result[:, :, :self.n_targs]
return res
return base + predict_y[:, : self.out_len, :]


class SegMerging(nn.Module):
Expand Down
2 changes: 1 addition & 1 deletion flood_forecast/transformer_xl/multi_head_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@


class MultiAttnHeadSimple(torch.nn.Module):
"""A simple multi-head attention model inspired by Vas.,wani et al."""
"""A simple multi-head attention model inspired by Vaswani et al."""

def __init__(
self,
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

setup(
name='flood_forecast',
version='1.0001dev',
version='1.001dev',
packages=[
'flood_forecast',
'flood_forecast.transformer_xl',
Expand Down

0 comments on commit 2dd6d0d

Please sign in to comment.