This article is a learning memo of Deep Learning from scratch.
--Perceptron is an algorithm that receives multiple signals as inputs and outputs one signal. When "bias" and "weight" are set as parameters and a certain input is given, a fixed value is output. "Bias" is a parameter that controls the ease of ignition. "Weight" is a parameter that controls the importance of each input. --The AND element, NAND element, and OR element of a logic circuit can be expressed by a (single layer) perceptron. --The XOR element can be created by combining an AND element, a NAND element, and an OR element. → What could not be expressed with a single layer can now be expressed with a multi-layer perceptron.
\begin{eqnarray}
y=\left\{ \begin{array}{ll}
0 & (b + w_1 x_1 + w_2 x_2 \leqq 0) \\
1 & (b + w_1 x_1 + w_2 x_2 \gt 0) \\
\end{array} \right.
\end{eqnarray}
perceptron.py
import numpy as np
# x1,x2:Input w1,w2:Weight b:bias
def perceptron(x1,x2,w1,w2,b):
x = np.array([x1, x2])
w = np.array([w1, w2])
tmp = np.sum(w*x) + b
return 0 if tmp <= 0 else 1
def AND(x1,x2):
return perceptron(x1,x2,0.5,0.5,-0.7)
def NAND(x1,x2):
return perceptron(x1,x2,-0.5,-0.5,0.7)
def OR(x1,x2):
return perceptron(x1,x2,0.5,0.5,0.0)
def XOR(x1,x2):
#Multilayer perceptron
s1 = NAND(x1,x2)
s2 = OR(x1,x2)
y = AND(s1,s2)
return y
#Check the operation below
print("AND")
print(AND(0,0)) # 0
print(AND(0,1)) # 0
print(AND(1,0)) # 0
print(AND(1,1)) # 1
print("NAND")
print(NAND(0,0)) # 1
print(NAND(0,1)) # 1
print(NAND(1,0)) # 1
print(NAND(1,1)) # 0
print("OR")
print(OR(0,0)) # 0
print(OR(0,1)) # 1
print(OR(1,0)) # 1
print(OR(1,1)) # 1
print("XOR")
print(XOR(0,0)) # 0
print(XOR(0,1)) # 1
print(XOR(1,0)) # 1
print(XOR(1,1)) # 0
Recommended Posts