激活函數/損失函數匯總

激活函數/損失函數匯總

來自專欄 機器學習隨記

定義激活函數父類

import numpy as npimport matplotlib.pyplot as pltclass Activation(object): def __init__(self, x): self.x = x self.p = None self.derivative = None def forward(self): pass def backward(self): pass def __call__(self): res = list() res.append(self.forward()) res.append(self.backward()) return res

Sigmoid

class Sigmoid(Activation): def __init__(self, x): super(Sigmoid, self).__init__(x) def forward(self): self.p = 1. / (1 + np.exp(np.negative(self.x))) return self.p def backward(self): self.derivative = self.p * (1 - self.p) return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, Sigmoid(x)()[0], label=sigmoid_forward) plt.plot(x, Sigmoid(x)()[1], label=sigmoid_backward) plt.legend(loc=best) plt.show()

Tanh

class Tanh(Activation): def __init__(self, x): super(Tanh, self).__init__(x) def forward(self): self.p = (np.exp(x) - np.exp(np.negative(x))) / (np.exp(x) + np.exp(np.negative(x))) return self.p def backward(self): self.derivative = 1 - np.power(self.p, 2) return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, Tanh(x)()[0], label=tanh_forward) plt.plot(x, Tanh(x)()[1], label=tanh_backward) plt.legend(loc=best) plt.show()

Relu

class Relu(Activation): def __init__(self, x): super(Relu, self).__init__(x) def forward(self): self.p = np.maximum(0, self.x) return self.p def backward(self): self.derivative = np.sign(self.p) return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, Relu(x)()[0], label=relu_forward) plt.plot(x, Relu(x)()[1], label=relu_backward) plt.legend(loc=best) plt.show()

Identity

class Identity(Activation): def __init__(self, x): super(Identity, self).__init__(x) def forward(self): self.p = self.x return self.p def backward(self): self.derivative = np.full_like(self.p, 1) return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, Identity(x)()[0], label=identity_forward) plt.plot(x, Identity(x)()[1], label=identity_backward) plt.legend(loc=best) plt.show()

LeakyRelu

class LeakyRelu(Activation): def __init__(self, x, alpha=0.1): super(LeakyRelu, self).__init__(x) self.alpha = alpha def forward(self): self.p = np.maximum(self.alpha * self.x, self.x) return self.p def backward(self): self.derivative = np.full_like(self.p, 1) self.derivative[self.p < 0] = self.alpha return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, LeakyRelu(x)()[0], label=leakyRelu_forward) plt.plot(x, LeakyRelu(x)()[1], label=leakyRelu_backward) plt.legend(loc=best) plt.show()

Relu6

class Relu6(Activation): def __init__(self, x, alpha=6): super(Relu6, self).__init__(x) self.alpha = alpha def forward(self): self.p = np.minimum(np.maximum(0, self.x), self.alpha) return self.p def backward(self): self.derivative = np.full_like(self.p, 0) self.derivative[np.logical_and(self.p > 0, self.p <= 6)] = 1 return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, Relu6(x)()[0], label=Relu6_forward) plt.plot(x, Relu6(x)()[1], label=Relu6_backward) plt.legend(loc=best) plt.show()

ELU

class ELU(Activation): def __init__(self, x, alpha=0.9): super(ELU, self).__init__(x) self.alpha = alpha def forward(self): self.p = np.maximum(0, self.x) + np.minimum(0, self.alpha * (np.exp(x) - 1)) return self.p def backward(self): self.derivative = np.full_like(self.p, 1) self.derivative[self.p < 0] = self.p[self.p < 0] + self.alpha return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, ELU(x)()[0], label=elu_forward) plt.plot(x, ELU(x)()[1], label=elu_backward) plt.legend(loc=best) plt.show()

PRelu

class PRelu(Activation): def __init__(self, x, alpha=0.7): super(PRelu, self).__init__(x) self.alpha = alpha def forward(self): self.p = np.maximum(0, self.x) + self.alpha * np.minimum(0, self.x) return self.p def backward(self): self.derivative = np.full_like(self.p, 1) self.derivative[self.p < 0] = self.alpha return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, PRelu(x)()[0], label=PRelu_forward) plt.plot(x, PRelu(x)()[1], label=PRelu_backward) plt.legend(loc=best) plt.show()

Threshold

class Threshold(Activation): def __init__(self, x, alpha=1, beta=0.1): super(Threshold, self).__init__(x) self.alpha = alpha self.beta = beta def forward(self): self.p = np.copy(self.x) self.p[self.x < self.alpha] = self.beta return self.p def backward(self): self.derivative = np.full_like(self.p, 1) self.derivative[self.p < self.alpha] = 0 return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, Threshold(x)()[0], label=threshold_forward) plt.plot(x, Threshold(x)()[1], label=threshold_backward) plt.legend(loc=best) plt.show()

HardTanh

class HardTanh(Activation): def __init__(self, x, alpha=-1, beta=1): super(HardTanh, self).__init__(x) self.alpha = alpha self.beta = beta def forward(self): self.p = np.copy(self.x) self.p[self.x > self.beta] = self.beta self.p[self.x < self.alpha] = self.alpha return self.p def backward(self): self.derivative = np.full_like(self.p, 1) self.derivative[np.logical_or(self.p >= self.beta, self.p <= self.alpha)] = 0 return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, HardTanh(x)()[0], label=hardTanh_forward) plt.plot(x, HardTanh(x)()[1], label=hardTanh_backward) plt.legend(loc=best) plt.show()

LogSigmoid

class LogSigmoid(Activation): def __init__(self, x): super(LogSigmoid, self).__init__(x) def forward(self): self.p = np.log(1. / (1 + np.exp(-x))) return self.p def backward(self): self.derivative = 1 - 1. / (1 + np.exp(-self.x)) return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, LogSigmoid(x)()[0], label=logSigmoid_forward) plt.plot(x, LogSigmoid(x)()[1], label=logSigmoid_backward) plt.legend(loc=best) plt.show()

SoftPlus

class SoftPlus(Activation): def __init__(self, x, alpha=0.5): super(SoftPlus, self).__init__(x) self.alpha = alpha def forward(self): self.p = 1. / self.alpha * np.log(1 + np.exp(self.alpha * self.x)) return self.p def backward(self): self.derivative = 1 - 1. / (1 + np.exp(self.alpha * self.x)) return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, SoftPlus(x)()[0], label=softPlus_forward) plt.plot(x, SoftPlus(x)()[1], label=softPlus_backward) plt.legend(loc=best) plt.show()

SoftShrink

class SoftShrink(Activation): def __init__(self, x, alpha=0.5): super(SoftShrink, self).__init__(x) self.alpha = alpha def forward(self): self.p = np.full_like(self.x, 0) self.p[self.x > self.alpha] = self.x[self.x > self.alpha] - self.alpha self.p[self.x < -self.alpha] = self.x[self.x < -self.alpha] + self.alpha return self.p def backward(self): self.derivative = np.full_like(self.p, 0) self.derivative[np.logical_or(self.p > 0, self.p < 0)] = 1 return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, SoftShrink(x)()[0], label=softShrink_forward) plt.plot(x, SoftShrink(x)()[1], label=softShrink_backward) plt.legend(loc=best) plt.show()

SoftSign

class SoftSign(Activation): def __init__(self, x): super(SoftSign, self).__init__(x) def forward(self): self.p = self.x / (1 + np.abs(self.x)) return self.p def backward(self): self.derivative = np.full_like(self.p, 0) self.derivative[self.x >= 0] = np.power(1 - self.p[self.x >= 0], 2) self.derivative[self.x < 0] = np.power(1 + self.p[self.x < 0], 2) return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, SoftSign(x)()[0], label=softSign_forward) plt.plot(x, SoftSign(x)()[1], label=softSign_backward) plt.legend(loc=best) plt.show()

TanhShrink

class TanhShrink(Activation): def __init__(self, x): super(TanhShrink, self).__init__(x) def forward(self): self.p = self.x - np.tanh(x) return self.p def backward(self): self.derivative = np.power(self.p, 2) return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, TanhShrink(x)()[0], label=tanhShrink_forward) plt.plot(x, TanhShrink(x)()[1], label=tanhShrink_backward) plt.legend(loc=best) plt.show()

Abs

class Abs(Activation): def __init__(self, x): super(Abs, self).__init__(x) def forward(self): self.p = np.abs(self.x) return self.p def backward(self): self.derivative = np.sign(x) return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, Abs(x)()[0], label=Abs_forward) plt.plot(x, Abs(x)()[1], label=Abs_backward) plt.legend(loc=best) plt.show()

Swish

class Swish(Activation): def __init__(self, x): super(Swish, self).__init__(x) def forward(self): self.p = self.x / (1 + np.exp(-self.x)) return self.p def backward(self): self.derivative = (1 + np.exp(-self.x) + self.x * np.exp(-self.x)) / np.power(1 + np.exp(-self.x), 2) return self.derivativeif __name__ == "__main__": x = np.linspace(-10, 10, 500) plt.plot(x, Swish(x)()[0], label=Swish_forward) plt.plot(x, Swish(x)()[1], label=Swish_backward) plt.legend(loc=best) plt.show()


推薦閱讀:

如何用flask部署pytorch模型
圖像檢索(一):因緣際會與前瞻
基於深度學習的計算機視覺應用之目標檢測
[論文筆記] two-stream和two-stream fusion
skimage例子學習(一)HED顏色通道提取與融合

TAG:機器學習 | 深度學習DeepLearning | 計算機視覺 |