clean code

This commit is contained in:
Yinon Polak
2023-03-20 19:28:30 +02:00
parent bf4aa91aab
commit 6b4d9f97c1
3 changed files with 4 additions and 5 deletions

View File

@@ -36,7 +36,7 @@ class PyTorchMLPModel(nn.Module):
"""
def __init__(self, input_dim: int, output_dim: int, **kwargs):
super(PyTorchMLPModel, self).__init__()
super().__init__()
hidden_dim: int = kwargs.get("hidden_dim", 256)
dropout_percent: int = kwargs.get("dropout_percent", 0.2)
n_layer: int = kwargs.get("n_layer", 1)
@@ -65,7 +65,7 @@ class Block(nn.Module):
"""
def __init__(self, hidden_dim: int, dropout_percent: int):
super(Block, self).__init__()
super().__init__()
self.ff = FeedForward(hidden_dim)
self.dropout = nn.Dropout(p=dropout_percent)
self.ln = nn.LayerNorm(hidden_dim)
@@ -85,7 +85,7 @@ class FeedForward(nn.Module):
"""
def __init__(self, hidden_dim: int):
super(FeedForward, self).__init__()
super().__init__()
self.net = nn.Sequential(
nn.Linear(hidden_dim, hidden_dim),
nn.ReLU(),

View File

@@ -47,4 +47,4 @@ class PyTorchRegressor(BasePyTorchModel):
y = self.model.model(x)
pred_df = DataFrame(y.detach().numpy(), columns=[dk.label_list[0]])
return (pred_df, dk.do_predict)
return (pred_df, dk.do_predict)