|
- import torch
- import torch.nn as nn
-
- class Model(nn.Module):
- """
- Simple model that performs a ReLU activation.
- """
- def __init__(self):
- super(Model, self).__init__()
-
- def forward(self, x: torch.Tensor) -> torch.Tensor:
- """
- Applies ReLU activation to the input tensor.
-
- Args:
- x (torch.Tensor): Input tensor of any shape.
-
- Returns:
- torch.Tensor: Output tensor with ReLU applied, same shape as input.
- """
- return torch.relu(x)
-
- batch_size = 16
- dim = 16384
-
- def get_inputs():
- x = torch.randn(batch_size, dim)
- return [x]
-
- def get_init_inputs():
- return [] # No special initialization inputs needed
|