I wrote the basic processing of python's genetic algorithm and neural network. Since the genetic algorithm is not an expert, it is a genetic algorithm.
It is a calculation method that imitates the nerve cell network of living organisms. [Link to Wikipedia](https://en.wikipedia.org/wiki/%E3%83%8B%E3%83%A5%E3%83%BC%E3%83%A9%E3%83%AB% E3% 83% 8D% E3% 83% 83% E3% 83% 88% E3% 83% AF% E3% 83% BC% E3% 82% AF)
It is a logical formula that outputs 1 if the two inputs are the same and 0 if they are different.
Input 1 | Input 2 | output |
---|---|---|
0 | 0 | 0 |
0 | 1 | 1 |
1 | 0 | 1 |
1 | 1 | 0 |
What I know about genetic algorithms is ・ Select individuals with good characteristics -Create a new individual that inherits the characteristics from an individual with good characteristics. ・ Repeat generational changes to find individuals with better characteristics and find answers. about it. There seem to be various methods, but this time I am an amateur ・ Sort populations by grade ・ Replace the bottom 10% with newly generated individuals ・ Generate + 50% by crossing the top 50% and other individuals (described later) ・ Repeat this process to find an individual that derives the most appropriate answer. I took the method.
I understand that it is to take out the characteristics from two individuals and create a new one. It imitates the inheritance of a parent's gene by a child in the real world of living things. The actual inheritance is an image of digitally inheriting the gene of either parent bit by bit, This time, we arranged the synaptic weights (float type) of the neural network. By taking the internal division of each numerical value of the two sequences indicating the synaptic weights of the two individuals I imitated inheriting the characteristics of two individuals. Example
w1 | w2 | w3 | w4 | |
---|---|---|---|---|
Synaptic weight of individual 1 | 1 | 1 | 3 | 4 |
Synaptic weight of individual 2 | 0 | 0 | 1 | 1 |
Similarity with individual 1 | 1 | 0 | 0.5 | 0.1 |
Similarity with individual 2 | 0 | 1 | 0.5 | 0.9 |
New individual synaptic weight | 1 | 0 | 2 | 1.3 |
However, since there is a possibility that it will converge to a local solution, we decided to take the value of the outer division of the two weights.
sample.py
from numpy.random import *
import numpy as np
import math
import sys
class Body: #Each individual
#Number of input layers
nn_input=2
#Number of middle layers
nn_hidden=3
nn_output=1
#Range of initial synapse weights
w_range=10
#Number of synapses
w_length=(nn_input+1)*(nn_hidden+1)+(nn_hidden+1)*(nn_output)
#Minimum value to enter in the sigmoid function
sigmoidinputmin=math.log(sys.float_info.min)
#Maximum value to enter in the sigmoid function
sigmoidinputmax=math.log(sys.float_info.max)
#How much heredity at the time of crossing should be divided into internal and external
#0 ... average of two weights
#1 ... Either of the two weights
#1 or more ... External division (not a value between two weights) is possible
crossRatio=2
def __init__(self):
self.w=None #Synapse weight
self.score=-1 #Evaluation results
self.result=None #Calculation result
#Create New
def createNewBodyAtInit():
body=Body()
body.w=np.random.uniform(-Body.w_range,Body.w_range,Body.w_length)
body.calc()
return body
#View results
def showResult(self):
aaa= str(self.score) + ":" + str(self.result)
print(aaa)
#Crossing
#Argument: Crossing individuals
#Return value Generated individual
def cross(self,otherBody):
w1=self.w
w2=otherBody.w
newW=[]
for i in range(len(w1)):
a=np.random.random()*Body.crossRatio
newW.append(w1[i]*a+w2[i]*(1-a))
newBody=Body()
newBody.w=newW
newBody.calc()
return newBody
#Evaluation calculation
def calc(self):
result=[]
for _in in in_sample:
result.append(self.nn(_in))
_ = out_sample-result
error = _*_
self.result=result
self.score=sum(error)
#Sigmoid function
def sigmoid(self,x):
#Returns 1 as the input is too small to overflow
if x<Body.sigmoidinputmin :
return 1
#If it's too big, there's no overflow,
#Same process as above (not required)
if Body.sigmoidinputmax<x :
return 0
a=math.log(sys.float_info.max)
return 1/(1+math.e**-x)
#Neural net calculation
def nn(self,inp):
w=self.w
w_num=0
hidden_o=[]
for i in range (Body.nn_hidden):
o=0
for j in range(Body.nn_input):
o+=w[w_num]*inp[j]
w_num+=1
o+=w[w_num]
w_num+=1
hidden_o.append(self.sigmoid(o))
o=0
for i in range (Body.nn_hidden):
o+=w[w_num]*hidden_o[i]
w_num+=1
o+=w[w_num]
w_num+=1
return self.sigmoid(o)
def main():
#Input value
global in_sample
in_sample=np.array([[0,0],
[0,1],
[1,0],
[1,1]])
#Output target value
global out_sample
out_sample=np.array([0,1,1,0]);
#Population
body_counts=100
#Number of generations
loop=1000
#Population
bodies=[]
#Initial generation of population
for i in range(body_counts):
body=Body.createNewBodyAtInit()
bodies.append(body)
#Evaluation while changing generations
for i in range(loop):
print("***")
bodies=generate(bodies)
bodies[0].showResult()
#Population regeneration
#Argument: Population before generation change
#Return value: Population after generation change
def generate(bodies):
bodies=sort(bodies)
a1=0
length=len(bodies)
newgennum=length-1
#Mutant individual ratio
mutationRatio=0.1
stop=newgennum-int(length*mutationRatio)
#Generation of mutant individuals
while stop<newgennum :
body = Body.createNewBodyAtInit()
bodies[newgennum]=body
newgennum += -1
#Generation rate of crossed individuals
crossRatio=0.5
#Crossing
stop=newgennum-int(length*crossRatio)
while stop<newgennum :
a2=int(length*np.random.random())
body = bodies[a1].cross(bodies[a2])
bodies[newgennum]=body
a1 += 1
a2 += -1
newgennum += -1
return bodies
#Sort populations by grade.
#Argument: Populace before sorting
#Return value: Population after sorting
def sort(bodies):
di = {}
for body in bodies:
di[body.score]=body
keys = sorted(di.keys())
li=[]
for key in sorted(di.keys()):
li.append(di[key])
return li
if __name__ =="__main__":
main()
Recommended Posts