import numpy as np
import numpy as np
# Set legend
plt.legend()
"""
Predict the target variable using the logistic regression model.
Parameters:
X (numpy.ndarray): Feature matrix of shape (n, p), where n is the number of
samples and p is the number of features.
theta (numpy.ndarray): Model parameters of shape (p, 1).
Returns:
probabilities (numpy.ndarray): Predicted probabilities of shape (n, 1).
"""
return probabilities
"""
Compute the cost function for logistic regression.
Parameters:
X (numpy.ndarray): Feature matrix of shape (n, p), where n is the number of
samples and p is the number of features.
y (numpy.ndarray): Target values of shape (n, 1).
theta (numpy.ndarray): Model parameters of shape (p, 1).
Returns:
cost (float): Cost value corresponding to the logistic loss.
"""
n = len(y)
return cost
Returns:
gradient (numpy.ndarray): Gradient vector of shape (p, 1).
"""
n = len(y)
return gradient
# TODO: Implement the train function to learn the weights of the logistic
regression model using gradient descent
def train_model(X_train, y_train, learning_rate, num_iterations):
"""
Train the logistic regression model using gradient descent optimization.
Parameters:
X_train (numpy.ndarray): Feature matrix of shape (n, p) for training data.
y_train (numpy.ndarray): Target values of shape (n, 1) for training data.
learning_rate (float): Learning rate for gradient descent.
num_iterations (int): Number of iterations for training.
Returns:
theta (numpy.ndarray): Model parameters of shape (p, 1).
costs_train (list): List of training costs at each iteration.
"""
n, p = X_train.shape
theta = np.zeros((p, 1))
costs_train = []
probabilities_train = predict(X_train,theta_hat)
probabilities_test= predict(X_test,theta_hat)