FML File Final
FML File Final
FML File Final
Practical File
Submitted in partial fulfillment for the evaluation of
“Fundamentals of Machine
Learning-Lab”
Submitted By:
NAME: Kunal Saini, Aditya Jain, Aditi Jain, Ananya Tyagi, Aditya Vikram
ENROLL NO: 07317711621, 07617711621, 08217711621, 08817711621, 09117711621
BRANCH & SECTION: AI & ML-B
Submitted To:
Dr. Sonakshi Vij
1
FML_LAB FILE
INDEX
S.No Details Page Date Grade/Evaluation Sign
No.
1 Study and implement
Linear Regression
2
FML_LAB FILE
3
FML_LAB FILE
EXPERIMRNT-1
(LINEAR REGRESSION)
ABSTRACT:
4
FML_LAB FILE
CODE:
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error
# Perform predictions
y_pred = model.predict(X)
5
FML_LAB FILE
plt.xlabel('Perimeter_mean')
plt.ylabel('Concavity_mean')
plt.title('Linear Regression')
plt.legend()
plt.show()
OUTPUT:
LEARNING OUTCOMES:
6
FML_LAB FILE
EXPERIMENT-2
(LOGISTIC REGRESSION)
ABSTRACT:
7
FML_LAB FILE
CODE:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score, confusion_matrix
from sklearn.model_selection import train_test_split
8
FML_LAB FILE
# Calculate the accuracy of the model
accuracy = accuracy_score(y_test, y_pred)
print("Accuracy:", accuracy)
OUTPUT:
LEARNING OUTCOMES:
9
FML_LAB FILE
EXPERIMENT-3
(K NEAREST NEIGHBOR)
ABSTRACT:
10
FML_LAB FILE
CODE:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
from sklearn.metrics import accuracy_score, confusion_matrix
11
FML_LAB FILE
print("Accuracy:", accuracy)
# Scatter plot
plt.scatter(data['radius_mean'], data['concavity_mean'], c=data['id'], cmap='coolwarm')
plt.xlabel('Radius_mean')
plt.ylabel('Concavity_mean')
plt.title('KNN')
plt.colorbar(label='Id')
plt.show()
OUTPUT:
LEARNING OUTCOMES:
12
FML_LAB FILE
EXPERIMENT-4
(CLASSIFICATION USING SVM)
ABSTRACT:
13
FML_LAB FILE
CODE:
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.svm import SVR
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import r2_score
14
FML_LAB FILE
# Calculate the coefficient of determination (R^2) on the test data
r2 = r2_score(y_test, y_pred)
print('R^2 score:', r2)
# Plot the predicted values and the actual values on the test data
plt.figure(figsize=(10, 5))
plt.scatter(X_test, y_test, color='red')
plt.plot(X_test, y_pred, color='blue', linewidth=3)
plt.xlabel('perimeter_mean')
plt.ylabel('concavity_mean')
plt.title('Support Vector Regression')
plt.show()
OUTPUT:
LEARNING OUTCOMES:
15
FML_LAB FILE
EXPERIMENT-5
(BAGGING USING RANDOM FORESTS)
ABSTRACT:
16
FML_LAB FILE
CODE:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import accuracy_score, confusion_matrix
17
FML_LAB FILE
accuracy = accuracy_score(y_test, y_pred)
print("Accuracy:", accuracy)
# Feature Importance
feature_importance = rf.feature_importances_
feature_names = X.columns
18
FML_LAB FILE
OUTPUT:
LEARNING OUTCOMES:
19
FML_LAB FILE
EXPERIEMNT-6
(NAIVE BAYES)
ABSTRACT:
20
FML_LAB FILE
CODE:
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.naive_bayes import GaussianNB
from sklearn.metrics import confusion_matrix, classification_report
# Split the dataset into features (X) and target variable (y)
X=data[['perimeter_mean','area_mean','concavity_mean','radius_mean','texture_mean','smoot
hness_mean']]
#X = X.dropna()
y = data['diagnosis']
21
FML_LAB FILE
cm = confusion_matrix(y_test, y_pred)
# Plot the confusion matrix plt.figure(figsize=(8, 6))
sns.heatmap(cm, annot=True, cmap="Blues", fmt="d",
cbar=False) plt.title("Confusion Matrix")
plt.xlabel("Predicted")
plt.ylabel("Actual")
plt.show()
OUTPUT:
LEARNING OUTCOMES:
22
FML_LAB FILE
EXPERIMENT-7
(DECISION TREES)
ABSTRACT:
23
FML_LAB FILE
CODE:
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.metrics import classification_report
import seaborn as sns
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier, plot_tree
from sklearn.metrics import confusion_matrix
# Split the dataset into features (X) and target variable (y)
X=data[['perimeter_mean','area_mean','concavity_mean','radius_mean','texture_mean','smoot
hness_mean']]
#X = X.dropna()
y = data['diagnosis']
24
FML_LAB FILE
plt.title("Decision Tree")
plt.show()
25
FML_LAB FILE
OUTPUT:
LEARNING OUTCOMES:
26
FML_LAB FILE
EXPERIMENT-8
(K MEANS CLUSTERING)
ABSTRACT:
27
FML_LAB FILE
CODE:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.cluster import KMeans
28
FML_LAB FILE
print(labels)
print("Cluster Centers:")
print(centers)
# Scatter plot of the data points with cluster assignments and centers
plt.scatter(X[:, 0], X[:, 1], c=labels, cmap='viridis')
plt.scatter(centers[:, 0], centers[:, 1], marker='X', color='red', label='Centers')
plt.xlabel('Concavity_mean')
plt.ylabel('Perimeter_mean')
plt.title('K-means Clustering')
plt.legend()
plt.show()
OUTPUT:
LEARNING OUTCOMES:
29
FML_LAB FILE
EXPERIMENT-9
(GAUSSIAN MIXTURE MODEL)
ABSTRACT:
30
FML_LAB FILE
CODE:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.mixture import GaussianMixture
31
FML_LAB FILE
print(labels)
print("Probabilities:")
print(probs)
OUTPUT:
LEARNING OUTCOMES:
32
FML_LAB FILE
EXPERIMENT-10
(CLASSIFICATION BASED ON ASSOCIATION RULES)
ABSTRACT:
33
FML_LAB FILE
CODE:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.tree import DecisionTreeClassifier
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score, confusion_matrix
34
FML_LAB FILE
print("Accuracy:", accuracy)
OUTPUT:
LEARNING OUTCOMES:
35
FML_LAB FILE
36