Will be dealt with here.
Basics of graph theory </ b> https://qiita.com/maskot1977/items/e1819b7a1053eb9f7d61
I wrote an article in the past and received "likes" from many people, but this time I would like to make the content easy to understand with animation.
A simple animation can be drawn as follows.
# -*- coding: UTF-8 -*-
import math
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from IPython.display import HTML
fig = plt.figure()
ims = [] #Video = list that stores a set of still images
for i in range(360):
rad = math.radians(i)
x1, y1 = math.cos(rad), math.sin(rad)
x2, y2 = math.cos(rad * 2), math.sin(rad * 2)
im = plt.scatter(x1, y1) #Still image part 1 (not list type)
im2 = plt.scatter(x2, y2) #Still image part 2 (not list type)
im3 = plt.plot([x1, x2], [y1, y2]) #Still image part 3 (list type)
image = [im, im2] + im3 #One list represents one still image
ims.append(image) #Add one still image
#Convert to video
ani = animation.ArtistAnimation(fig, ims, interval=10, repeat_delay=1000)
ani.save("Animation1.gif", writer='pillow') #Save as gif file
HTML(ani.to_jshtml()) #Display on HTML
Use the same data as Basics of Graph Theory. The coordinate data (latitude / longitude) of the city where the prefectural office is located is written. The city where the prefectural office is located is called the "top" </ b>, and the straight line connecting the cities is called the "side" </ b>. Cities connected by edges are called "adjacent" </ b>.
import urllib.request
url = 'https://raw.githubusercontent.com/maskot1977/ipython_notebook/master/toydata/location.txt'
urllib.request.urlretrieve(url, 'location.txt') #Download data
('location.txt', <http.client.HTTPMessage at 0x11c9c2320>)
Let's read it using pandas, which was not used in Basics of Graph Theory.
import pandas as pd
japan = pd.read_csv('location.txt')
japan
Town | Longitude | Latitude | |
---|---|---|---|
0 | Sapporo | 43.06417 | 141.34694 |
1 | Aomori | 40.82444 | 140.74000 |
2 | Morioka | 39.70361 | 141.15250 |
3 | Sendai | 38.26889 | 140.87194 |
4 | Akita | 39.71861 | 140.10250 |
5 | Yamagata | 38.24056 | 140.36333 |
... | ... | ... | ... |
45 | Kagoshima | 31.56028 | 130.55806 |
46 | Naha | 26.21250 | 127.68111 |
Illustrated.
%matplotlib inline
import matplotlib.pyplot as plt
plt.figure(figsize=(10, 8))
plt.scatter(japan['Latitude'], japan['Longitude'])
for city, x, y in zip(japan['Town'], japan['Latitude'], japan['Longitude']):
plt.text(x, y, city, alpha=0.5, size=12)
plt.grid()
Using the above data, let's create an animation of depth-first search, breadth-first search, and best-first search, which are the basic algorithms of graph theory.
I didn't use it in Basics of Graph Theory, but if you use scipy, you can find the distance matrix (distance between vertices) as follows.
import numpy as np
from scipy.spatial import distance
mat = japan[['Latitude', 'Longitude']].values
dist_mat = distance.cdist(mat, mat, metric='euclidean') #Euclidean distance
In order to draw a straight line between two points when drawing a graph, I make my own function to find the coordinates from a set of sides (set of vertices).
def get_edges(routes):
edges = []
for route in routes:
if len(route) == 2:
town1, y1, x1 = [value for value in japan.values][route[0]]
town2, y2, x2 = [value for value in japan.values][route[1]]
edges.append([[x1, x2], [y1, y2]])
return edges
The usage example looks like this.
get_edges([[1, 2], [3, 4], [5, 6]])
[[[140.74, 141.1525], [40.82444, 39.70361]],
[[140.87194, 140.1025], [38.26889, 39.71861]],
[[140.36333, 140.46778], [38.240559999999995, 37.75]]]
Now, as the first graph search algorithm, we will do a depth-first search.
In graph search, how to create an "adjacency list" (which vertex can go from which vertex) is very important, but "connect edges between vertices below a certain distance (threshold)" </ b> I would like to go with the policy. Using numpy and the distance matrix, which were not used in Basics of Graph Theory, it can be calculated as follows.
def neighbor(town, dist_mat=dist_mat, threshold=1): #Function to get an adjacency list 1
return [x[0] for x in enumerate(np.where(dist_mat[town] <= threshold, True, False)) if x[1]]
The usage example looks like this.
neighbor(12) #Which cities are within 1 distance from Tokyo?
[7, 8, 9, 10, 11, 12, 13]
In Basics of Graph Theory, the while statement was used to solve the graph search problem, but here, I want to show the progress step by step. I defined the function to advance one step of the search as follows.
def traverse(i=0): #One step of depth-first search
if len(stack) != 0: #If the stack is not empty
next_town, current_town = stack.pop() #Get the next route (current location and next city)
current_direction = [[next_town, current_town]] #For drawing
if next_town not in visited_towns: #If the next city has not been visited
used_routes.append([next_town, current_town]) #Register the route
visited_towns.append(next_town) #Make it visited
for nei in neighbor(next_town): #Take out the cities adjacent to the visited city one by one
if nei not in visited_towns: #If you haven't visited
stack.append([nei, next_town]) #Put the route on the stack
return current_direction #For drawing
Now let's animate it.
# -*- coding: UTF-8 -*-
import math
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from IPython.display import HTML
fig = plt.figure(figsize=(10, 8))
stack = [[12, 12]] #The starting point is Tokyo
visited_towns = [] #An array that stores visited cities
used_routes = [] #An array that stores the routes actually used
current_direction = [] #Route currently being checked
ims = [] #Array for storing still images
i = 0
while len(stack) > 0: #If the stack is not empty
if i != 0: #Since the initial value is displayed the first time, the search cannot proceed.
current_direction = traverse() #Take the search one step
image = [] #An array that stores parts to be written in one still image
for edge in get_edges(stack): #The red line is the "candidate" route in the stack
image += plt.plot(edge[0], edge[1], 'r', alpha=0.5)
for edge in get_edges(current_direction): #The blue line is the route currently being checked
image += plt.plot(edge[0], edge[1], 'b', lw=5, alpha=0.5)
for edge in get_edges(used_routes): #Black line is used route
image += plt.plot(edge[0], edge[1], 'k', lw=2)
for city, x, y in zip(japan['Town'], japan['Latitude'], japan['Longitude']):
image.append(plt.text(x, y, city, alpha=0.5, size=12)) #Show city name
if len(current_direction) > 0:
current_town = current_direction[0][0]
image += plt.plot(japan.iloc[current_town, :]['Latitude'],
japan.iloc[current_town, :]['Longitude'],
markersize=20, marker='o') #Maru is the city currently being checked
ims.append(image) #Store one still image
i += 1
#Convert to video
ani = animation.ArtistAnimation(fig, ims, interval=500, repeat_delay=1000)
ani.save("Animation2.gif", writer='pillow') #Save as gif file
HTML(ani.to_jshtml()) #Display on HTML
Depth-first search animation 1
In the depth-first search above, "the last city on the stack" is the next destination candidate. When you move to a city, the cities adjacent to it are put on the stack. At this time, the behavior changes even with the same depth-first search by considering the order in which they are put on the stack. Let's modify it so that cities that are close to each other are preferentially selected as destination candidates.
import numpy as np
def neighbor(town, dist_mat=dist_mat, threshold=1): #Function 2 to get an adjacency list
return np.argsort(dist_mat[town])[1:np.where(dist_mat[town] <= threshold, 1, 0).sum()][::-1]
The code below is the same as "Depth-first search animation 1" (only the file name of the saved video is different). Let's see how changing the function neighbor
changes its behavior.
# -*- coding: UTF-8 -*-
# -*- coding: UTF-8 -*-
import math
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from IPython.display import HTML
fig = plt.figure(figsize=(10, 8))
stack = [[12, 12]] #The starting point is Tokyo
visited_towns = [] #An array that stores visited cities
used_routes = [] #An array that stores the routes actually used
current_direction = [] #Route currently being checked
ims = [] #Array for storing still images
i = 0
while len(stack) > 0: #If the stack is not empty
if i != 0: #Since the initial value is displayed the first time, the search cannot proceed.
if stack[0] != []: #The search cannot proceed even at the last display
current_direction = traverse() #Take the search one step
image = [] #An array that stores parts to be written in one still image
for edge in get_edges(stack): #The red line is the "candidate" route in the stack
image += plt.plot(edge[0], edge[1], 'r', alpha=0.5)
for edge in get_edges(current_direction): #The blue line is the route currently being checked
image += plt.plot(edge[0], edge[1], 'b', lw=5, alpha=0.5)
for edge in get_edges(used_routes): #Black line is used route
image += plt.plot(edge[0], edge[1], 'k', lw=2)
for city, x, y in zip(japan['Town'], japan['Latitude'], japan['Longitude']):
image.append(plt.text(x, y, city, alpha=0.5, size=12)) #Show city name
if len(current_direction) > 0:
current_town = current_direction[0][0]
image += plt.plot(japan.iloc[current_town, :]['Latitude'],
japan.iloc[current_town, :]['Longitude'],
markersize=20, marker='o') #Maru is the city currently being checked
ims.append(image) #Store one still image
if len(stack) == 0: #For the last display
current_direction = []
stack.append([])
elif stack[0] == []: #For the last escape
break
i += 1
#Convert to video
ani = animation.ArtistAnimation(fig, ims, interval=500, repeat_delay=1000)
ani.save("Animation3.gif", writer='pillow') #Save as gif file
HTML(ani.to_jshtml()) #Display on HTML
Depth-first search animation 2
Next, let's do a breadth-first search. The basic algorithm is almost the same, just put the stack (first in, last out) into a queue (queue: first in, first out).
Rewrite the function traverse
. The part to be rewritten is only one point shown as "change". Since the list used as a stack changes to a queue, I would like to change the variable name stack
, but doing so will increase the part to be rewritten, so let's leave stack
as it is.
def traverse(i=0): #One step of breadth-first search
if len(stack) != 0:
next_town, current_town = stack.pop(0) #change point
current_direction = [[next_town, current_town]]
if next_town not in visited_towns:
used_routes.append([next_town, current_town])
visited_towns.append(next_town)
for nei in neighbor(next_town):
if nei not in visited_towns:
stack.append([nei, next_town])
return current_direction
The function to get the adjacency list is also rewritten, but it is basically the same as the previous one. The stack is queued, so you just flip the order.
import numpy as np
def neighbor(town, dist_mat=dist_mat, threshold=1): #Function to get an adjacency list 3
return np.argsort(dist_mat[town])[1:np.where(dist_mat[town] <= threshold, 1, 0).sum()] #Change only the end
The following code is also the same as "Depth-first search animation 1" and "Depth-first search animation 2" (only the file name of the saved video is different). Let's see how changing the function traverse
changes its behavior.
# -*- coding: UTF-8 -*-
import math
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from IPython.display import HTML
fig = plt.figure(figsize=(10, 8))
stack = [[12, 12]] #The starting point is Tokyo
visited_towns = [] #An array that stores visited cities
used_routes = [] #An array that stores the routes actually used
current_direction = [] #Route currently being checked
ims = [] #Array for storing still images
i = 0
while len(stack) > 0: #If the stack is not empty
if i != 0: #Since the initial value is displayed the first time, the search cannot proceed.
if stack[0] != []: #The search cannot proceed even at the last display
current_direction = traverse() #Take the search one step
image = [] #An array that stores parts to be written in one still image
for edge in get_edges(stack): #The red line is the "candidate" route in the stack
image += plt.plot(edge[0], edge[1], 'r', alpha=0.5)
for edge in get_edges(current_direction): #The blue line is the route currently being checked
image += plt.plot(edge[0], edge[1], 'b', lw=5, alpha=0.5)
for edge in get_edges(used_routes): #Black line is used route
image += plt.plot(edge[0], edge[1], 'k', lw=2)
for city, x, y in zip(japan['Town'], japan['Latitude'], japan['Longitude']):
image.append(plt.text(x, y, city, alpha=0.5, size=12)) #Show city name
if len(current_direction) > 0:
current_town = current_direction[0][0]
image += plt.plot(japan.iloc[current_town, :]['Latitude'],
japan.iloc[current_town, :]['Longitude'],
markersize=20, marker='o') #Maru is the city currently being checked
ims.append(image) #Store one still image
if len(stack) == 0: #For the last display
current_direction = []
stack.append([])
elif stack[0] == []: #For the last escape
break
i += 1
#Convert to video
ani = animation.ArtistAnimation(fig, ims, interval=500, repeat_delay=1000)
ani.save("Animation4.gif", writer='pillow') #Save as gif file
HTML(ani.to_jshtml()) #Display on HTML
Breadth-first search animation
Finally, best-first search. In the top two searches, we've added short-distance cities to be preferentially retrieved when adding to the stack (or queue). Best-first search sorts the entire stack (actually a queue) after it has been added, giving priority to the cities with the shortest distance. The result is a "minimum tree".
The only change from the past is to re-sort the entire stack.
def sort_stack(stack):
return [stack[i] for i in np.argsort([dist_mat[edge[0]][edge[1]] for edge in stack])]
The code below is basically the same as before. The only changes are the addition of the function sort_stack
and the rename of the file that saves the video.
# -*- coding: UTF-8 -*-
import math
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from IPython.display import HTML
fig = plt.figure(figsize=(10, 8))
stack = [[12, 12]] #The starting point is Tokyo
visited_towns = [] #An array that stores visited cities
used_routes = [] #An array that stores the routes actually used
current_direction = [] #Route currently being checked
ims = [] #Array for storing still images
i = 0
while len(stack) > 0: #If the stack is not empty
if i != 0: #Since the initial value is displayed the first time, the search cannot proceed.
if stack[0] != []: #The search cannot proceed even at the last display
stack = sort_stack(stack) #Sort stack for best-first search
current_direction = traverse() #Take the search one step
image = [] #An array that stores parts to be written in one still image
for edge in get_edges(stack): #The red line is the "candidate" route in the stack
image += plt.plot(edge[0], edge[1], 'r', alpha=0.5)
for edge in get_edges(current_direction): #The blue line is the route currently being checked
image += plt.plot(edge[0], edge[1], 'b', lw=5, alpha=0.5)
for edge in get_edges(used_routes): #Black line is used route
image += plt.plot(edge[0], edge[1], 'k', lw=2)
for city, x, y in zip(japan['Town'], japan['Latitude'], japan['Longitude']):
image.append(plt.text(x, y, city, alpha=0.5, size=12)) #Show city name
if len(current_direction) > 0:
current_town = current_direction[0][0]
image += plt.plot(japan.iloc[current_town, :]['Latitude'],
japan.iloc[current_town, :]['Longitude'],
markersize=20, marker='o') #Maru is the city currently being checked
ims.append(image) #Store one still image
if len(stack) == 0: #For the last display
current_direction = []
stack.append([])
elif stack[0] == []: #For the last escape
break
i += 1
#Convert to video
ani = animation.ArtistAnimation(fig, ims, interval=500, repeat_delay=1000)
ani.save("Animation5.gif", writer='pillow') #Save as gif file
HTML(ani.to_jshtml()) #Display on HTML
Best-first search animation
Recommended Posts