From 8bee499328e9d242361fb11289b63bca6bf17d9d Mon Sep 17 00:00:00 2001 From: Yinon Polak Date: Sun, 19 Mar 2023 17:03:36 +0200 Subject: [PATCH] modify feedforward net, move layer norm to start of thr block --- freqtrade/freqai/prediction_models/PyTorchMLPModel.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/freqtrade/freqai/prediction_models/PyTorchMLPModel.py b/freqtrade/freqai/prediction_models/PyTorchMLPModel.py index 91e496c5d..482b3f889 100644 --- a/freqtrade/freqai/prediction_models/PyTorchMLPModel.py +++ b/freqtrade/freqai/prediction_models/PyTorchMLPModel.py @@ -22,7 +22,7 @@ class PyTorchMLPModel(nn.Module): def forward(self, x: Tensor) -> Tensor: x = self.relu(self.input_layer(x)) x = self.dropout(x) - x = self.relu(self.blocks(x)) + x = self.blocks(x) logits = self.output_layer(x) return logits @@ -35,8 +35,8 @@ class Block(nn.Module): self.ln = nn.LayerNorm(hidden_dim) def forward(self, x): - x = self.dropout(self.ff(x)) - x = self.ln(x) + x = self.ff(self.ln(x)) + x = self.dropout(x) return x @@ -46,7 +46,6 @@ class FeedForward(nn.Module): self.net = nn.Sequential( nn.Linear(hidden_dim, hidden_dim), nn.ReLU(), - nn.Linear(hidden_dim, hidden_dim), ) def forward(self, x):