import torch
from torch.cuda.amp import autocast,GradScaler
def train(epoch):
scaler = GradScaler()
x = torch.randn(3,100).cuda()
y = torch.randn(3,5).cuda()
model = torch.nn.Linear(100, 5).cuda()
optimizer = torch.optim.SGD(model.parameters(), lr=1e-3)
for e in range(epoch):
with autocast():
y_pred = model(x)
loss = torch.nn.functional.mse_loss(y_pred, y)
scaler.scale(loss).backward()
scaler.step(optimizer)
scaler.update()
print("complish")
if __name__=="__main__":
if torch.cuda.is_available():
train()
else:
print("cuda is not available")