[PyTorch] Softmax and Cross Entropy
import torch
import torch.nn as nn
import numpy as np
def softmax(x):
return np.exp(x) / np.sum(np.exp(x), axis=0)
x = np.array([2.0, 1.0, 0.1])
outputs = softmax(x)
outputs
array([0.65900114, 0.24243297, 0.09856589])
x = torch.tensor([2.0, 1.0, 0.1])
outputs = torch.softmax(x, dim=0)
outputs
tensor([0.6590, 0.2424, 0.0986])
def cross_entropy(actual, predicted):
loss = -np.sum(actual * np.log(predicted))
return loss
y = np.array([2,0,1])
y_pred_good = np.array([0.7, 0.2, 0.1])
y_pred_bad = np.array([0.1, 0.3, 0.6])
l1 = cross_entropy(y,y_pred_good)
l2 = cross_entropy(y,y_pred_bad)
print(f"loss 1 numpy : {l1:.4f}")
print(f"loss 2 numpy : {l2:.4f}")
loss 1 numpy : 3.0159 loss 2 numpy : 5.1160
loss = nn.CrossEntropyLoss()
y = torch.tensor([2,0,1])
y_pred_good = torch.tensor([[0.1, 1.0, 2.1],[2.0, 1.0, 0.1] ,[0.1, 3.0, 0.1]])
y_pred_bad = torch.tensor([[2.1, 1.0, 0.1], [0.1, 1.0, 2.1], [0.1, 5.0, 0.1]])
l1 = loss(y_pred_good, y)
l2 = loss(y_pred_bad, y)
print(f"loss 1 numpy : {l1:.4f}")
print(f"loss 2 numpy : {l2:.4f}")
loss 1 numpy : 0.3018 loss 2 numpy : 1.5943
_, pred1 = torch.max(y_pred_good, 1)
_, pred2 = torch.max(y_pred_bad, 1)
print(pred1, pred2)
tensor([2, 0, 1]) tensor([0, 2, 1])
{\color{Blue}x^2}+{\color{Red}2x}-{\color{Green}1}
\
\alpha
×
댓글남기기