Skip to content

Instantly share code, notes, and snippets.

@Adithya-Rama
Last active January 9, 2024 19:41
Show Gist options
  • Save Adithya-Rama/fb9f275c3dcecd3337bd45a9a01aea85 to your computer and use it in GitHub Desktop.
Save Adithya-Rama/fb9f275c3dcecd3337bd45a9a01aea85 to your computer and use it in GitHub Desktop.
This Gist Contains all the lab programs of AIML 7th sem (18CS71)
def recAOStar(n):
print("Expanding Node : ", n)
and_nodes = []
or_nodes = []
#Segregation of AND and OR nodes
if (n in allNodes):
if 'AND' in allNodes[n]:
and_nodes = allNodes[n]['AND']
if 'OR' in allNodes[n]:
or_nodes = allNodes[n]['OR']
# If leaf node then return
if len(and_nodes) == 0 and len(or_nodes) == 0:
return
solvable = False
marked = {}
while not solvable:
# If all the child nodes are visited and expanded, take the least cost of all the child nodes
if len(marked) == len(and_nodes) + len(or_nodes):
min_cost_least, min_cost_group_least = least_cost_group(and_nodes, or_nodes, {})
solvable = True
change_heuristic(n, min_cost_least)
optimal_child_group[n] = min_cost_group_least
continue
# Least cost of the unmarked child nodes
min_cost, min_cost_group = least_cost_group(and_nodes, or_nodes, marked)
is_expanded = False
# If the child nodes have sub trees then recursively visit them to recalculate the heuristic of the child node
if len(min_cost_group) > 1:
if (min_cost_group[0] in allNodes):
is_expanded = True
recAOStar(min_cost_group[0])
if (min_cost_group[1] in allNodes):
is_expanded = True
recAOStar(min_cost_group[1])
else:
if (min_cost_group in allNodes):
is_expanded = True
recAOStar(min_cost_group)
# If the child node had any subtree and expanded, verify if the new heuristic value is still the least among all nodes
if is_expanded:
min_cost_verify, min_cost_group_verify = least_cost_group(and_nodes, or_nodes, {})
if min_cost_group == min_cost_group_verify:
solvable = True
change_heuristic(n, min_cost_verify)
optimal_child_group[n] = min_cost_group
# If the child node does not have any subtrees then no change in heuristic, so update the min cost of the current node
else:
solvable = True
change_heuristic(n, min_cost)
optimal_child_group[n] = min_cost_group
#Mark the child node which was expanded
marked[min_cost_group] = 1
return heuristic(n)
# Function to calculate the min cost among all the child nodes
def least_cost_group(and_nodes, or_nodes, marked):
node_wise_cost = {}
for node_pair in and_nodes:
if not node_pair[0] + node_pair[1] in marked:
cost = 0
cost = cost + heuristic(node_pair[0]) + heuristic(node_pair[1]) + 2
node_wise_cost[node_pair[0] + node_pair[1]] = cost
for node in or_nodes:
if not node in marked:
cost = 0
cost = cost + heuristic(node) + 1
node_wise_cost[node] = cost
min_cost = 999999
min_cost_group = None
# Calculates the min heuristic
for costKey in node_wise_cost:
if node_wise_cost[costKey] < min_cost:
min_cost = node_wise_cost[costKey]
min_cost_group = costKey
return [min_cost, min_cost_group]
# Returns heuristic of a node
def heuristic(n):
return H_dist[n]
# Updates the heuristic of a node
def change_heuristic(n, cost):
H_dist[n] = cost
return
# Function to print the optimal cost nodes
def print_path(node):
print(optimal_child_group[node], end="")
node = optimal_child_group[node]
if len(node) > 1:
if node[0] in optimal_child_group:
print("->", end="")
print_path(node[0])
if node[1] in optimal_child_group:
print("->", end="")
print_path(node[1])
else:
if node in optimal_child_group:
print("->", end="")
print_path(node)
#Describe the heuristic here
H_dist = {
'A': -1,
'B': 4,
'C': 2,
'D': 3,
'E': 6,
'F': 8,
'G': 2,
'H': 0,
'I': 0,
'J': 0
}
#Describe your graph here
allNodes = {
'A': {'AND': [('C', 'D')], 'OR': ['B']},
'B': {'OR': ['E', 'F']},
'C': {'OR': ['G'], 'AND': [('H', 'I')]},
'D': {'OR': ['J']}
}
optimal_child_group = {}
optimal_cost = recAOStar('A')
print('Nodes which gives optimal cost are')
print_path('A')
print('\nOptimal Cost is :: ', optimal_cost)
print(optimal_child_group)
# OUTPUT :-
# Nodes which gives optimal cost are
# CD->HI->J
# Optimal Cost is :: 5
# {'B': 'E', 'C': 'HI', 'D': 'J', 'A': 'CD'}
Graph_nodes = {
'A' : [('B', 6), ('F', 3)],
'B' : [('C', 3), ('D', 2)],
'C' : [('D', 1), ('E', 5)],
'D' : [('C', 1), ('E', 8)],
'E' : [('I', 5), ('J', 5)],
'F' : [('G', 1), ('H', 7)],
'G' : [('I', 3)],
'H' : [('I', 2)],
'I' : [('E', 5), ('J', 3)],
}
def heuristic(v) :
H_dist = {
'A' : 10,
'B' : 8,
'C' : 5,
'D' : 7,
'E' : 3,
'F' : 6,
'G' : 5,
'H' : 3,
'I' : 1,
'J' : 0,
}
return H_dist[v]
def neighbors(v) :
if v in Graph_nodes :
return Graph_nodes[v]
else :
return None
def aStarAlgo(start_node, stop_node) :
open_set = set(start_node)
closed_set = set()
g = {}
parents = {}
g[start_node] = 0
parents[start_node] = start_node
while len(open_set) > 0 :
n = None
for v in open_set :
if n == None or g[v] + heuristic(v) < g[n] + heuristic(n) :
n = v
if n == stop_node or Graph_nodes[n] == None :
pass
else :
for (m, weight) in neighbors(n) :
if m not in open_set or m not in closed_set :
open_set.add(m)
parents[m] = n
g[m] = g[n] + weight
else :
if g[m] > g[n] + weight :
g[m] = g[n] + weight
parents[m] = n
if m in closed_set :
closed_set.remove(n)
open_set.add(m)
if n == None :
print("Path doenst exist!!")
return None
if n == stop_node :
path = []
while parents[n]!=n :
path.append(n)
n = parents[n]
path.append(start_node)
path.reverse()
print("The path is : ", path)
return path
open_set.remove(n)
closed_set.add(n)
print("Path doesnt exist")
return None
aStarAlgo('A', 'J')
# Output:- ['A', 'F', 'G', 'I', 'J']
import numpy as np
X = np.array(([2, 9], [1, 5], [3, 6]), dtype='float')
Y = np.array(([92], [86], [89]), dtype = 'float')
X = X / np.amax(X , axis=0)
Y = Y / 100
epochs = 1000
learning_rate = 0.6
inputLayers = 2
hiddenLayers = 3
outputLayers = 1
wh = np.random.uniform(size = (inputLayers, hiddenLayers))
bh = np.random.uniform(size = (1, hiddenLayers))
w0 = np.random.uniform(size = (hiddenLayers, outputLayers))
b0 = np.random.uniform(size = (1, outputLayers))
def sigmoid(z) :
return 1 / (1 + np.exp(-z))
def derivative(x) :
return x * (1-x)
for i in range(epochs) :
# Forward Propagation
z_h = np.dot(X, wh) + bh
sigmoid_h = sigmoid(z_h)
z_0 = np.dot(sigmoid_h, w0) + b0
output = sigmoid(z_0)
# Backward Propagation
deltaK = (Y - output) * derivative(output)
deltaH = deltaK.dot(w0.T) * derivative(sigmoid_h)
w0 = w0 + learning_rate * sigmoid_h.T.dot(deltaK)
wh = wh + learning_rate * X.T.dot(deltaH)
print(f"Input:\n {X}")
print(f"Actual Output:\n {Y} ")
print(f"Predicted Output:\n {output}")
# OUTPUT :-
# Input:
# [[0.66666667 1. ]
# [0.33333333 0.55555556]
# [1. 0.66666667]]
# Actual Output:
# [[0.92]
# [0.86]
# [0.89]]
# Predicted Output:
# [[0.89561426]
# [0.87785989]
# [0.89594741]]
import pandas as pd
from pandas import DataFrame
from math import log
from collections import Counter
from pprint import pprint
df_tennis = pd.read_csv('./Data/PlayTennis.csv')
def entropy(probs):
return sum([-prob * log(prob, 2) for prob in probs])
def entropy_of_list(a_list):
cnt = Counter(x for x in a_list)
num_instances = len(a_list) * 1.0
probs = [x / num_instances for x in cnt.values()]
return entropy(probs)
def information_gain(df, split_attribute_name, target_attribute_name):
df_split = df.groupby(split_attribute_name)
nobs = len(df.index) * 1.0
df_agg_ent = df_split.agg({target_attribute_name: [entropy_of_list, lambda x: len(x)/nobs]})[target_attribute_name]
df_agg_ent.columns = ['Entropy', 'PropObservations']
new_entropy = sum(df_agg_ent['Entropy'] * df_agg_ent['PropObservations'])
old_entropy = entropy_of_list(df[target_attribute_name])
return old_entropy - new_entropy
def id3(df, target_attribute_name, attribute_names, default_class=None):
cnt = Counter(x for x in df[target_attribute_name])
print(cnt)
if len(cnt) == 1:
return next(iter(cnt))
elif df.empty or (not attribute_names):
return default_class
else:
default_class = max(cnt.keys())
gainz = [information_gain(df, attr, target_attribute_name) for attr in attribute_names]
index_of_max = gainz.index(max(gainz))
best_attr = attribute_names[index_of_max]
tree = {best_attr:{}}
remaining_attribute_names = [i for i in attribute_names if i != best_attr]
for attr_val, data_subset in df.groupby(best_attr):
subtree = id3(data_subset, target_attribute_name, remaining_attribute_names, default_class)
tree[best_attr][attr_val] = subtree
return tree
attribute_names = list(df_tennis.columns)
print("List of attributes: ", attribute_names)
attribute_names.remove('Play Tennis')
print("Predicting Attributes: ", attribute_names)
tree = id3(df_tennis, 'Play Tennis', attribute_names)
print("\n\nThe Resultant Decistion Tree is: \n")
pprint(tree)
# OUTPUT :-
# The Resultant Decistion Tree is:
# {'Outlook': {'Overcast': 'Yes',
# 'Rain': {'Wind': {'Strong': 'No', 'Weak': 'Yes'}},
# 'Sunny': {'Humidity': {'High': 'No', 'Normal': 'Yes'}}}}
import pandas as pd
df = pd.read_csv("./Data/EnjoySport.csv")
print(df.head())
concepts = df.values[:,:-1]
target = df.values[:, -1]
def learn(concepts, target) :
specific_h = concepts[0].copy()
print(specific_h)
general_h = [["?" for i in range(len(specific_h))] for i in range(len(specific_h))]
print(general_h)
for i, h in enumerate(concepts) :
if target[i] == "yes" :
for x in range(len(specific_h)) :
if h[x] != specific_h[x] :
specific_h[x] = "?"
general_h[x][x] = "?"
if target[i] == "no" :
for x in range(len(specific_h)) :
if h[x] != specific_h[x] :
general_h[x][x] = specific_h[x]
else :
general_h[x][x] = "?"
indices = [i for i, val in enumerate(general_h) if val == ["?", "?", "?", "?", "?", "?"]]
for i in indices :
general_h.remove(["?", "?", "?", "?", "?", "?"])
print(specific_h)
print(general_h)
learn(concepts, target)
# OUTPUT:-
# ['sunny' 'warm' '?' 'strong' '?' '?']
# [['sunny', '?', '?', '?', '?', '?'], ['?', 'warm', '?', '?', '?', '?']]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment