Code
Code
warnings.filterwarnings("ignore")
import time
import numpy as np
import h5py
import scipy
%matplotlib inline
plt.rcParams['image.interpolation'] = 'nearest'
plt.rcParams['image.cmap'] = 'gray'
%load_ext autoreload
%autoreload 2
np.random.seed(1)
index = 10
plt.imshow(train_x_orig[index])
print ("y = " + str(train_y[0,index]) + ". It's a " + classes[train_y[0,index]].decode("utf-8") + " picture.")
m_train = train_x_orig.shape[0]
num_px = train_x_orig.shape[1]
m_test = test_x_orig.shape[0]
print ("Each image is of size: (" + str(num_px) + ", " + str(num_px) + ", 3)")
m_train = train_x_orig.shape[0]
num_px = train_x_orig.shape[1]
m_test = test_x_orig.shape[0]
print ("Each image is of size: (" + str(num_px) + ", " + str(num_px) + ", 3)")
n_h = 7
n_y = 1
Arguments:
Y -- true "label" vector (containing 0 if cat, 1 if non-cat), of shape (1, number of examples)
print_cost -- If set to True, this will print the cost every 100 iterations
Returns:
"""np.random.seed(1)
grads = {}
# Initialize parameters dictionary, by calling one of the functions you'd previously implemented
W1 = parameters["W1"]
b1 = parameters["b1"]
W2 = parameters["W2"]
b2 = parameters["b2"]
# Forward propagation: LINEAR -> RELU -> LINEAR -> SIGMOID. Inputs: "X, W1, b1". Output: "A1,
cache1, A2, cache2".
# Compute cost
cost = compute_cost(A2, Y)
# Backward propagation. Inputs: "dA2, cache2, cache1". Outputs: "dA1, dW2, db2; also dA0 (not
used), dW1, db1".
grads['dW1'] = dW1
grads['db1'] = db1
grads['dW2'] = dW2
grads['db2'] = db2
# Update parameters.
W1 = parameters["W1"]
b1 = parameters["b1"]
W2 = parameters["W2"]
b2 = parameters["b2"]
costs.append(cost)
plt.plot(np.squeeze(costs))
plt.ylabel('cost')
plt.show()
return parameters
parameters = two_layer_model(train_x, train_y, layers_dims = (n_x, n_h, n_y), num_iterations = 2500,
print_cost=True)
Arguments:
Y -- true "label" vector (containing 0 if cat, 1 if non-cat), of shape (1, number of examples)
layers_dims -- list containing the input size and each layer size, of length (number of layers + 1).
Returns:
parameters -- parameters learnt by the model. They can then be used to predict.
"""
np.random.seed(1)
parameters = initialize_parameters_deep(layers_dims)
cost = compute_cost(AL, Y)
# Backward propagation.
# Update parameters.
costs.append(cost)
plt.plot(np.squeeze(costs))
plt.ylabel('cost')
plt.show()
return parameters
parameters = L_layer_model(train_x, train_y, layers_dims, num_iterations = 2500, print_cost = True)
my_label_y = [1] # the true class of your image (1 -> cat, 0 -> non-cat)
my_image = my_image/255.
z=int(my_predicted_image[0][0])
plt.imshow(image)
print ("y = " + str(np.squeeze(my_predicted_image+1-(2*z))) + ", your L-layer model predicts a \"" +
classes[int(np.squeeze(my_predicted_image+1-(2*z))),].decode("utf-8") + "\" picture.")