Machine Learning Algorithms From Scratch
Machine Learning Algorithms From Scratch
import numpy as np
class LinearRegression():
def __init__(self,lr,n_iter):
self.lr=lr #learning rate
self.n_iter=n_iter #number_of_iteration_to_train
self.weights=None
self.bias=None
def fit_model(self,X,y):
"""to train model using gradient descent"""
n_samples,n_features=X.shape
print(n_samples,n_features)
self.weights=np.random.rand(n_features)
self.bias=0
for _ in tqdm(range(self.n_iter)):
# calculate y_predicted
y_pred=np.dot(X,self.weights)+self.bias
# Compute Gradients
delw=(1/n_samples)*np.dot(X.T,(y_pred-y))
delb=(1/n_samples)*np.sum(y_pred-y)
def predict(self,X):
return (np.dot(X,self.weights)+self.bias)
LOGISTIC REGRESSION From
Scratch:
class LogisticRegression():
def __init__(self,lr,n_iter):
self.lr=lr
self.n_iter=n_iter
self.weights=None
self.bias=None
def sigmoid(self,z):
return 1 / (1 + np.exp(-z))
def fit_model(self,X,y):
n_samples,n_features=X.shape
# compute gradient
delw= (1/n_samples)*np.dot(X.T, (y_pred-y))
delb= (1/n_samples)*np.sum(y_pred-y)
def predict_class(self,X):
linear_output=np.dot(X,self.weights)+self.bias
y_pred=self.sigmoid(linear_output)
y_pred_class=[1 if i>0.5 else 0 for i in y_pred]
return y_pred_class
K-Means Clustering From Scratch:
def kmeans(data, K, max_iterations=100, tolerance=1e-4):
# Randomly initialize centroids
centroids = random_initialize_centroids(data, K)
for _ in range(max_iterations):
# Assignment Step
clusters = {}
for point in data:
nearest_centroid = find_nearest_centroid(point, centroids)
if nearest_centroid in clusters:
clusters[nearest_centroid].append(point)
else:
clusters[nearest_centroid] = [point]
# Update Step
new_centroids = []
for centroid in centroids:
new_centroid = calculate_mean(clusters[centroid])
new_centroids.append(new_centroid)
centroids = new_centroids
class DBSCAN:
def __init__(self, epsilon, min_points):
self.epsilon = epsilon
self.min_points = min_points
self.visited = set()
return cluster
Gradient Boost for Regression:
import numpy as np
class GradientBoostingRegression:
def __init__(self, num_iterations=100, learning_rate=0.1):
self.num_iterations = num_iterations
self.learning_rate = learning_rate
self.models = []
prediction = weak_learner.predict(X)
F += self.learning_rate * prediction
self.models.append(weak_learner)
class DecisionStump:
def __init__(self):
self.feature_index = None
self.threshold = None
self.alpha = None
class AdaBoost:
def __init__(self, num_iterations=50):
self.num_iterations = num_iterations
self.classifiers = []
self.alphas = []
y_pred = classifier.predict(X)
weighted_error = np.sum(sample_weights[y_pred != y]) /
np.sum(sample_weights)
self.classifiers.append(classifier)
return np.sign(final_predictions)
# Example usage:
if __name__ == "__main__":
# Generate synthetic data for binary classification
np.random.seed(0)
X = np.random.rand(100, 2)
y = np.where(X[:, 0] + X[:, 1] > 1, 1, -1)
# Make predictions
X_test = np.array([[0.7, 0.3], [0.4, 0.6]])
y_pred = adaboost.predict(X_test)
print("Predicted:", y_pred)
Neural Network for binary Clf:
import numpy as np
def sigmoid_derivative(x):
return x * (1 - x)
class NeuralNetwork:
def __init__(self, input_size, hidden_size, output_size,
learning_rate=0.1):
# Initialize network architecture and hyperparameters
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
self.learning_rate = learning_rate
# Calculate gradients
delta_output = error *
sigmoid_derivative(self.predicted_output)
d_weights_hidden_output = np.dot(self.hidden_output.T,
delta_output)
delta_hidden = np.dot(delta_output,
self.weights_hidden_output.T) * sigmoid_derivative(self.hidden_output)
d_weights_input_hidden = np.dot(X.T, delta_hidden)
# Calculate and print the mean squared error for this epoch
mse = np.mean(np.square(y - self.predict(X)))
print(f"Epoch {epoch + 1}/{epochs}, Mean Squared Error:
{mse:.4f}")
# Example usage:
if __name__ == "__main__":
# Generate synthetic data for binary classification
np.random.seed(0)
X = np.random.rand(100, 2)
y = np.where(X[:, 0] + X[:, 1] > 1, 1, 0)