1# add l2 regularization to optimzer by just adding in a weight_decay
2optimizer = torch.optim.Adam(model.parameters(),lr=1e-4,weight_decay=1e-5)
1 loss = mse(pred, target)
2 l1 = 0
3 for p in net.parameters():
4 l1 = l1 + p.abs().sum()
5 loss = loss + lambda_l1 * l1
6 loss.backward()
7 optimizer.step()
8