Deep Learning Program
Deep Learning Program
print("n=",n)
i=0
sum=0
w=1
while (i<n-1):
w*=100
i+=1
i=0
while (i<n):
sum*=100
x=int(input())
sum+=x
print("W",i+1,"is:",w)
w/=100
i+=1
print("Sum=",sum)
while (sum>0):
y=sum%100
sum=int(sum/100)
n-=1
GRU
import tensorflow as tf
model = keras.Sequential()
model.add(layers.BatchNormalization())
model.add(layers.Dense(10))
print(model.summary())
mnist = keras.datasets.mnist
model.compile(
loss=keras.losses.SparseCategoricalCrossentropy(from_logits=True),
optimizer="sgd",
metrics=["accuracy"],
model.fit(
for i in range(10):
print(result.numpy(), y_test[i])
Bidirectional LSTM
import numpy as np
max_len = 200
y_test = np.array(y_test)
y_train = np.array(y_train)
model = Sequential()
model.add(Bidirectional(LSTM(64)))
model.add(Dropout(0.5))
model.add(Dense(1, activation='sigmoid'))
history=model.fit(x_train, y_train,
batch_size=64,
epochs=4,
validation_data=[x_test, y_test])
print(history.history['loss'])
print(history.history['accuracy'])
pyplot.plot(history.history['loss'])
pyplot.plot(history.history['accuracy'])
pyplot.xlabel('epoch')
pyplot.show()
import numpy as np
model = Sequential()
model.add(GRU(64,activation='relu', input_shape=(9,1)))
model.add(Dense(1))
model.compile(optimizer='adam', loss='mse')
print(test_input)
Bidirectional GRU
import numpy as np
max_len = 200
y_test = np.array(y_test)
y_train = np.array(y_train)
model = Sequential()
model.add(Bidirectional(GRU(64)))
model.add(Dropout(0.5))
model.add(Dense(1, activation='sigmoid'))
history=model.fit(x_train, y_train,
batch_size=64,
epochs=4,
validation_data=[x_test, y_test])
print(history.history['loss'])
print(history.history['accuracy'])
pyplot.plot(history.history['accuracy'])
pyplot.xlabel('epoch')
pyplot.show()