You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

example_torchcode.py 705 B

12345678910111213141516171819202122232425262728293031
  1. import torch
  2. import torch.nn as nn
  3. class Model(nn.Module):
  4. """
  5. Simple model that performs a ReLU activation.
  6. """
  7. def __init__(self):
  8. super(Model, self).__init__()
  9. def forward(self, x: torch.Tensor) -> torch.Tensor:
  10. """
  11. Applies ReLU activation to the input tensor.
  12. Args:
  13. x (torch.Tensor): Input tensor of any shape.
  14. Returns:
  15. torch.Tensor: Output tensor with ReLU applied, same shape as input.
  16. """
  17. return torch.relu(x)
  18. batch_size = 16
  19. dim = 16384
  20. def get_inputs():
  21. x = torch.randn(batch_size, dim)
  22. return [x]
  23. def get_init_inputs():
  24. return [] # No special initialization inputs needed