MLExp 3
MLExp 3
MLExp 3
Write a program to demonstrate the working of the decision tree based ID3
algorithm. Use an appropriate data set for building the decision tree and apply this knowledge
to classify a new sample.
Dataset:
PlayTennis Dataset is saved as .csv (comma separated values) file in the current working
directory otherwise use the complete path of the dataset set in the program:
import pandas as pd
import math
import numpy as np
data = pd.read_csv("3-dataset.csv")
features.remove("answer")
class Node:
def __init__(self):
self.children = []
self.value = ""
self.isLeaf = False
self.pred = ""
def entropy(examples):
pos = 0.0
neg = 0.0
if row["answer"] == "yes":
pos += 1
else:
neg += 1
return 0.0
else:
uniq = np.unique(examples[attr])
#print ("\n",uniq)
gain = entropy(examples)
#print ("\n",gain)
for u in uniq:
subdata = examples[examples[attr] == u]
#print ("\n",subdata)
sub_e = entropy(subdata)
#print ("\n",gain)
return gain
def ID3(examples, attrs):
root = Node()
max_gain = 0
max_feat = ""
#print ("\n",examples)
max_gain = gain
max_feat = feature
root.value = max_feat
uniq = np.unique(examples[max_feat])
#print ("\n",uniq)
for u in uniq:
#print ("\n",u)
subdata = examples[examples[max_feat] == u]
#print ("\n",subdata)
if entropy(subdata) == 0.0:
newNode = Node()
newNode.isLeaf = True
newNode.value = u
newNode.pred = np.unique(subdata["answer"])
root.children.append(newNode)
else:
dummyNode = Node()
dummyNode.value = u
new_attrs = attrs.copy()
new_attrs.remove(max_feat)
dummyNode.children.append(child)
root.children.append(dummyNode)
return root
for i in range(depth):
print("\t", end="")
print(root.value, end="")
if root.isLeaf:
print()
printTree(child, depth + 1)
printTree(root)