Informed search algorithms

 

Expt No: 2                                           Informed search algorithms

Date:

 

Aim: To write a program to demonstrate informed search algorithms – A* and memory–bound A* algorithms

 

Program


# A* algorithm

 

import heapq

 

class Graph:

    def __init__(self):

        self.nodes = set()

        self.edges = {}

   

    def add_node(self, value):

        self.nodes.add(value)

        if value not in self.edges:

            self.edges[value] = []

   

    def add_edge(self, from_node, to_node, cost):

        self.edges[from_node].append((to_node, cost))

        self.edges[to_node].append((from_node, cost))

   

    def get_neighbors(self, node):

        return self.edges[node]

 

def heuristic(node, goal):

    # Manhattan distance between two nodes

    x1, y1 = node

    x2, y2 = goal

    return abs(x1 - x2) + abs(y1 - y2)


 

 

def astar_search(graph, start, goal):

    frontier = [(0, start)]  # Priority queue (cost, node)

    came_from = {}

    cost_so_far = {start: 0}

   

    while frontier:

        current_cost, current_node = heapq.heappop(frontier)

       

        if current_node == goal:

            break

       

        for next_node, edge_cost in graph.get_neighbors(current_node):

            new_cost = cost_so_far[current_node] + edge_cost

            if next_node not in cost_so_far or new_cost < cost_so_far[next_node]:

                cost_so_far[next_node] = new_cost

                priority = new_cost + heuristic(next_node, goal)

                heapq.heappush(frontier, (priority, next_node))

                came_from[next_node] = current_node

   

    path = []

    current_node = goal

    while current_node != start:

        path.append(current_node)

        current_node = came_from[current_node]

    path.append(start)

    path.reverse()

   

    return path

 

# Create graph and add nodes

 

graph = Graph()

graph.add_node((0, 0))

graph.add_node((1, 0))

graph.add_node((0, 1))

graph.add_node((1, 1))

graph.add_node((2, 1))

graph.add_node((2, 2))

 

# Add edges and assign weights

 

graph.add_edge((0, 0), (0, 1), 1)

graph.add_edge((0, 0), (1, 1), 4)

graph.add_edge((0, 0), (1, 0), 3)

graph.add_edge((0, 1), (2, 2), 2)

graph.add_edge((1, 1), (1, 0), 2)

graph.add_edge((1, 1), (2, 1), 4)

graph.add_edge((1, 1), (2, 2), 3)

graph.add_edge((1, 0), (2, 1), 5)

graph.add_edge((2, 1), (2, 2), 4)

 

 

# Call search algorithm

start_node = (0, 0)

goal_node = (2, 1)

 

path = astar_search(graph, start_node, goal_node)

print("Shortest path:", path)

 

 


 

# Memory–Bound A* algorithm

 

import heapq

 

class Graph:

    def __init__(self):

        self.nodes = set()

        self.edges = {}

   

    def add_node(self, value):

        self.nodes.add(value)

        if value not in self.edges:

            self.edges[value] = []

   

    def add_edge(self, from_node, to_node, cost):

        self.edges[from_node].append((to_node, cost))

        self.edges[to_node].append((from_node, cost)) 

   

    def get_neighbors(self, node):

        return self.edges[node]

 

def heuristic(node, goal):

    # Manhattan distance between two nodes

    x1, y1 = node

    x2, y2 = goal

    return abs(x1 - x2) + abs(y1 - y2)

 


 

def memory_bound_astar_search(graph, start, goal, memory_limit):

    frontier = [(0, start)]

    came_from = {}

    cost_so_far = {start: 0}

   

    while frontier:

        current_cost, current_node = heapq.heappop(frontier)

       

        if current_node == goal:

            break

       

        # Check if the memory limit is exceeded

        if len(cost_so_far) > memory_limit:

            # Discard nodes with the highest cost so far

            max_cost_node = max(cost_so_far, key=cost_so_far.get)

            del cost_so_far[max_cost_node]

       

        for next_node, edge_cost in graph.get_neighbors(current_node):

            new_cost = cost_so_far[current_node] + edge_cost

            if next_node not in cost_so_far or new_cost < cost_so_far[next_node]:

                cost_so_far[next_node] = new_cost

                priority = new_cost + heuristic(next_node, goal)

                heapq.heappush(frontier, (priority, next_node))

                came_from[next_node] = current_node

   

    path = []

    current_node = goal

    while current_node != start:

        path.append(current_node)

        current_node = came_from[current_node]

    path.append(start)

    path.reverse()

   

    return path

 


 

# Create Graph and add nodes

 

graph = Graph()

graph.add_node((0, 0))

graph.add_node((1, 0))

graph.add_node((0, 1))

graph.add_node((1, 1))

graph.add_node((2, 1))

graph.add_node((2, 2))

 

# Add edges and assign weights

 

graph.add_edge((0, 0), (0, 1), 1)

graph.add_edge((0, 0), (1, 1), 4)

graph.add_edge((0, 0), (1, 0), 3)

graph.add_edge((0, 1), (2, 2), 2)

graph.add_edge((1, 1), (1, 0), 2)

graph.add_edge((1, 1), (2, 1), 4)

graph.add_edge((1, 1), (2, 2), 3)

graph.add_edge((1, 0), (2, 1), 5)

graph.add_edge((2, 1), (2, 2), 4)

 

# call search algorithm

 

start_node = (0, 0)

goal_node = (2, 1)

memory_limit = int(input("Enter memory limit : "))

 

path = memory_bound_astar_search(graph, start_node, goal_node, memory_limit)

print("Shortest path:", path)

 

 

 

 

 

 

Result: Thus the program to demonstrate informed search algorithms were written and executed

 

 

 

Input Graph

 


Sample Output

 

# A* algorithm

 

>python Astar.py

Shortest path: [(0, 0), (0, 1), (2, 2), (2, 1)]

 

 

# Memory–Bound A* algorithm

 

>python MAstar.py

Enter memory limit : 5

Shortest path: [(0, 0), (1, 1), (2, 1)]

 

>python MAstar.py

Enter memory limit : 10

Shortest path: [(0, 0), (0, 1), (2, 2), (2, 1)]

 

 

AIML Lab


 AIML Lab



1.    Implementation of Uninformed search algorithms (BFS, DFS)


2.    Implementation of Informed search algorithms (A*, memory-bounded A*)


3.    Implement naïve Bayes models


4.    Implement Bayesian Networks


5.    Build Regression models


6.    Build decision trees and random forests


7.    Build SVM models


8.    Implement ensembling techniques


9.    Implement clustering algorithms


10.   Implement EM for Bayesian networks


11.   Build simple NN models


12.   Build deep learning NN models


Uninformed search algorithms

 

Expt No: 1                                           Uninformed search algorithms

Date:

 

Aim: To write a program to demonstrate Uninformed search algorithms – BFS and DFS algorithms

 

Program

 

# Uninformed search algorithms


from collections import deque

 

class TreeNode:

    def __init__(self, value):

        self.value = value

        self.children = []

 

    def add_child(self, child_node):

        self.children.append(child_node)

 

def bfs_traversal(root):

    if not root:

        return

 

    queue = deque([root])

 

    while queue:

        node = queue.popleft()

        print(node.value, end='\t')

 

        for child in node.children:

            queue.append(child)

 

def dfs_preorder_traversal(node):

    if not node:

        return

 

    print(node.value, end='\t')

 

    for child in node.children:

        dfs_preorder_traversal(child)

 

def dfs_inorder_traversal(node):

    if not node:

        return

 

    if len(node.children) >= 2:

        dfs_inorder_traversal(node.children[0])

 

    print(node.value, end='\t')

 

    if len(node.children) == 2:

        dfs_inorder_traversal(node.children[1])

 

def dfs_postorder_traversal(node):

    if not node:

        return

 

    for child in node.children:

        dfs_postorder_traversal(child)

 

    print(node.value, end='\t')

 

# Create nodes

root = TreeNode("A")

node_b = TreeNode("B")

node_c = TreeNode("C")

node_d = TreeNode("D")

node_e = TreeNode("E")

 

# Connect nodes

root.add_child(node_b)

root.add_child(node_c)

node_b.add_child(node_d)

node_b.add_child(node_e)

 

# BFS traversal

print("BFS Traversal:")

bfs_traversal(root)

print()

 

# DFS traversals

print("DFS Preorder Traversal:")

dfs_preorder_traversal(root)

print()

 

print("DFS Inorder Traversal:")

dfs_inorder_traversal(root)

print()

 

print("DFS Postorder Traversal:")

dfs_postorder_traversal(root)

 

Result: Thus the program to demonstrate uninformed search algorithms was written and executed

 

 

 

Input Tree

 



 

 

Sample Output

 

AIMLLab>python bfsdfs.py

BFS Traversal:

A       B       C       D       E

DFS Preorder Traversal:

A       B       D       E       C

DFS Inorder Traversal:

D       B       E       A       C

DFS Postorder Traversal:

D       E       B       C       A