Advertisement
alkkofficial

Untitled

Jun 25th, 2023
82
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.17 KB | None | 0 0
  1. class Tanh200(nn.Module):
  2. def __init__(self):
  3. super(Tanh200, self).__init__()
  4.  
  5. def forward(self, x):
  6. return torch.tanh(x / 200)
  7.  
  8.  
  9. class Agent(nn.Module):
  10. def __init__(self):
  11. super().__init__()
  12. self.fc1 = nn.Linear(833, 2048,dtype=torch.float32).to(d)
  13. self.bn1 = nn.BatchNorm1d(2048, dtype=torch.float32).to(d)
  14. self.dropout1 = nn.Dropout(p=0.45).to(d)
  15. self.relu = nn.LeakyReLU(0.05).to(d)
  16. self.layer2 = nn.Linear(2048, 1,dtype=torch.float32).to(d)
  17. self.dropout2 = nn.Dropout(p=0.45).to(d)
  18. self.tanh200 = Tanh200().to(d)
  19. self.hidden_layers = nn.ModuleList().to(d)
  20.  
  21. # Initialize weights of Linear layers using Xavier initialization
  22. init.xavier_uniform_(self.fc1.weight).to(d)
  23. init.xavier_uniform_(self.layer2.weight).to(d)
  24.  
  25. self.loss = nn.MSELoss().to(d)
  26.  
  27. def forward(self, x):
  28. x = self.fc1(x).to(d)
  29. x = self.bn1(x).to(d)
  30. x = self.dropout1(x).to(d)
  31. x = self.relu(x).to(d)
  32. x = self.layer2(x).to(d)
  33. x = self.dropout2(x).to(d)
  34. x = self.tanh200(x).to(d)
  35. return x
  36.  
  37.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement