3. pytorch-激活函数
2018-06-29 本文已影响109人
FantDing
常见激活函数
- relu
- sigmod
- tanh
- softplus
示例代码
import torch
from torch.autograd import Variable
from torch.nn import functional as F
import matplotlib.pyplot as plt
if __name__ == "__main__":
tensor = torch.linspace(-5, 5, 100)
x = Variable(tensor)
x_np = x.data.numpy() # 取出ndarray来画图
y_relu = F.relu(x).data.numpy()
y_sigmod = F.sigmoid(x).data.numpy()
y_tanh = F.tanh(x).data.numpy()
y_softplus = F.softplus(x).data.numpy()
plt.subplot(2, 2, 1)
plt.plot(x_np, y_relu, label="relu")
plt.legend()
plt.subplot(2, 2, 2)
plt.plot(x_np, y_sigmod, label="sigmod")
plt.legend()
plt.subplot(2, 2, 3)
plt.plot(x_np, y_tanh, label="tanh")
plt.legend()
plt.subplot(2, 2, 4)
plt.plot(x_np, y_softplus, label="softplus")
plt.legend()
plt.show()
activation function